hexsha
stringlengths
40
40
size
int64
6
14.9M
ext
stringclasses
1 value
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
6
260
max_stars_repo_name
stringlengths
6
119
max_stars_repo_head_hexsha
stringlengths
40
41
max_stars_repo_licenses
list
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
6
260
max_issues_repo_name
stringlengths
6
119
max_issues_repo_head_hexsha
stringlengths
40
41
max_issues_repo_licenses
list
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
6
260
max_forks_repo_name
stringlengths
6
119
max_forks_repo_head_hexsha
stringlengths
40
41
max_forks_repo_licenses
list
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
avg_line_length
float64
2
1.04M
max_line_length
int64
2
11.2M
alphanum_fraction
float64
0
1
cells
list
cell_types
list
cell_type_groups
list
4aa262e95e40c8352207c0e692dd40e36f39ff2a
576,677
ipynb
Jupyter Notebook
notebooks/utils/experiment_logger.ipynb
abhishekunique/RND-ashwin
f8bcf3c593df2dacc0efba0875533be71ccb5011
[ "MIT" ]
null
null
null
notebooks/utils/experiment_logger.ipynb
abhishekunique/RND-ashwin
f8bcf3c593df2dacc0efba0875533be71ccb5011
[ "MIT" ]
7
2020-09-25T22:41:46.000Z
2022-03-12T00:37:25.000Z
notebooks/utils/experiment_logger.ipynb
abhishekunique/RND-ashwin
f8bcf3c593df2dacc0efba0875533be71ccb5011
[ "MIT" ]
null
null
null
1,409.968215
341,048
0.959554
[ [ [ "import numpy as np\nimport pickle\nimport gzip\nimport glob\nimport json\nimport csv\nimport sys\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport os\nimport imageio\nimport cv2\n \nsns.set()\n\n%matplotlib inline", "_____no_output_____" ] ], [ [ "Specify the experiment directory: pass this in as a command line argument.", "_____no_output_____" ] ], [ [ "# Specify the experiment directory\nexperiment_dir = '/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition'", "_____no_output_____" ] ], [ [ "What needs to be saved?\n1. Plots of whatever the user passes in (\"observation_keys\")\n - TODO: Split by whatever the experiment is being tuned over (like in viskit)\n2. (# of goals, resets/reset-free, domain/task, VICE/gtr, etc.)\n3. Gifs of the run\n4. Important parameters", "_____no_output_____" ] ], [ [ "def log_experiment(experiment_dir, observation_keys):\n # Search for the seed directories\n for seed in glob.iglob(os.path.join(experiment_dir, '*')):\n if not os.path.isdir(seed):\n continue\n ", "_____no_output_____" ], [ "test = '/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/progress.csv'\nwith open(test, newline='') as f:\n df = pd.read_csv(f)", "_____no_output_____" ], [ "df.columns", "_____no_output_____" ], [ "observation_keys = [\n 'object_to_target_circle_distance-last-mean',\n 'object_to_target_position_distance-last-mean',\n]", "_____no_output_____" ], [ "# evaluation_obs_path = 'evaluation/env_infos/obs/'\n# training_obs_path = 'training/env_infos/obs/'\n\ndef contains_str_from_list(str_to_check, str_list):\n return any(s in str_to_check for s in str_list)\n\nall_obs_keys_to_record = [\n col_name for col_name in df.columns\n if contains_str_from_list(col_name, observation_keys)]\n\n# all_obs_keys_to_record = np.concatenate([\n# [path + observation_key for observation_key in observation_keys]\n# for path in (evaluation_obs_path, training_obs_path)\n# ])\nall_obs_keys_to_record", "_____no_output_____" ], [ "record_data = df[all_obs_keys_to_record]\nnum_keys = len(all_obs_keys_to_record)\n\nif num_keys % 2 != 0:\n num_keys += 1\nnum_rows = num_keys // 2\nnum_cols = 2\n\ncurr_row, curr_col = 0, 0\nfig, ax = plt.subplots(2, 2, figsize=(18, 9))\nfor i, col in enumerate(record_data):\n num_data_points = len(record_data[col])\n data = record_data[col]\n# ax[i].subplot(num_rows, num_cols, i + 1)\n row_index, col_index = i // num_rows, i % num_cols\n ax[row_index, col_index].set_title(col)\n ax[row_index, col_index].plot(data)\n \n# plt.show()", "_____no_output_____" ], [ "def generate_plots(seed_dir, save_dir, observation_keys, fig=None, axes=None):\n data_fn = os.path.join(seed_dir, 'progress.csv')\n with open(data_fn, newline='') as f:\n df = pd.read_csv(f)\n \n def contains_str_from_list(str_to_check, str_list):\n return any(s in str_to_check for s in str_list)\n\n all_obs_keys_to_record = [\n col_name for col_name in df.columns\n if contains_str_from_list(col_name, observation_keys)\n ]\n \n record_data = df[all_obs_keys_to_record]\n num_keys = len(all_obs_keys_to_record)\n\n # Set up the figure\n if num_keys % 2 != 0:\n num_keys += 1\n num_rows = num_keys // 2\n num_cols = 2\n \n if fig is None and axes is None:\n fig, axes = plt.subplots(num_cols, num_rows, figsize=(18, 9))\n\n for i, col in enumerate(record_data):\n num_data_points = len(record_data[col])\n data = record_data[col]\n row_index, col_index = i // num_rows, i % num_cols\n axes[row_index, col_index].set_title(col)\n axes[row_index, col_index].plot(data, alpha=0.9)\n \n return fig, axes", "_____no_output_____" ], [ "video_save_frequency = 100\nvideo_path = '/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos'\nfor video_path in glob.iglob(os.path.join(video_path, '*00_0.mp4')):\n print(video_path)", "/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/training_path_300_0.mp4\n/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/training_path_500_0.mp4\n/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/training_path_100_0.mp4\n/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/evaluation_path_300_0.mp4\n/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/training_path_200_0.mp4\n/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/evaluation_path_200_0.mp4\n/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/training_path_400_0.mp4\n/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/evaluation_path_400_0.mp4\n/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/evaluation_path_500_0.mp4\n/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/id=9867fc30-seed=2007_2019-08-16_02-38-25c0jt87k7/videos/evaluation_path_100_0.mp4\n" ], [ "test_video = '/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T15-46-37-two_policies_debug/id=b529c39e-seed=2542_2019-08-16_15-46-38m9pcum43/videos/training_path_0_0.mp4'\n\ndef extract_video_frames(video_path, img_size):\n vidcap = cv2.VideoCapture(video_path)\n success, image = vidcap.read()\n images = []\n while success:\n image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n image = cv2.resize(image, img_size)\n images.append(image)\n success, image = vidcap.read()\n return images\n\ndef convert_images_to_gif(images, save_path):\n imageio.mimsave(save_path, images)\n\ndef video_to_gif(video_path, output_path, img_size=(100, 100)):\n images = extract_video_frames(test_video, img_size)\n convert_images_to_gif(images, output_path)", "_____no_output_____" ], [ "def save_gifs(seed_dir, save_dir, save_frequency=100):\n video_path = os.path.join(seed_dir, 'videos')\n # TODO: Find the videos to save w.r.t save_frequency.\n for path in glob.iglob(os.path.join(video_path, '*00_0.mp4')):\n seed_name = seed_dir.split('seed=')[-1].split('_')[0]\n output_fn = 'seed=' + seed_name + '_' + path.split('/')[-1].replace('mp4', 'gif')\n output_path = os.path.join(save_dir, output_fn)\n video_to_gif(path, output_path)", "_____no_output_____" ], [ "def log_experiment(experiment_dir, observation_keys):\n if not os.path.exists(os.path.join(experiment_dir, 'log')):\n os.mkdir(os.path.join(experiment_dir, 'log'))\n \n save_dir = os.path.join(experiment_dir, 'log')\n # Search for the seed directories\n fig, axes = None, None\n for seed_dir in glob.iglob(os.path.join(experiment_dir, '*')):\n if not os.path.isdir(seed_dir) or seed_dir == save_dir:\n continue\n fig, axes = generate_plots(seed_dir, save_dir, observation_keys, fig=fig, axes=axes)\n save_gifs(seed_dir, save_dir)\n\n output_fn = os.path.join(save_dir, 'plots.png')\n plt.savefig(output_fn)\n plt.show()\n ", "_____no_output_____" ], [ "log_experiment('/home/justinvyu/ray_results/gym/DClaw/TurnFreeValve3ResetFreeSwapGoal-v0/2019-08-16T02-38-24-state_estimation_scaled_goal_condition/',\n observation_keys)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa26549c43ec68aa6aeb01b022fd47f8b97e131
117,159
ipynb
Jupyter Notebook
MongoDB/notebooks/project_reshaping_all_3.ipynb
kasamoh/NoSQL
24ea4e835e182282acad7d42c72111f8bd45beb2
[ "MIT" ]
null
null
null
MongoDB/notebooks/project_reshaping_all_3.ipynb
kasamoh/NoSQL
24ea4e835e182282acad7d42c72111f8bd45beb2
[ "MIT" ]
null
null
null
MongoDB/notebooks/project_reshaping_all_3.ipynb
kasamoh/NoSQL
24ea4e835e182282acad7d42c72111f8bd45beb2
[ "MIT" ]
null
null
null
43.472727
270
0.457908
[ [ [ "import pymongo\nfrom pymongo import MongoClient\nimport pprint\nfrom IPython.display import clear_output\n\n# Replace XXXX with your connection URI from the Atlas UI\nclient=MongoClient(\"mongodb://analytics:[email protected]:27017,mflix-shard-00-01-zmtem.mongodb.net:27017,mflix-shard-00-02-zmtem.mongodb.net:27017/test?ssl=true&replicaSet=mflix-shard-0&authSource=admin&retryWrites=true\")\n\n# Like the last handout, this pipeline will not work on Atlas until MongoDB 3.6 has been released\n# If you're testing this before 3.6 is released you can download and install MongoDB 3.5.X locally\n# In that case you should use \"mongodb://localhost:27017\" as your connection URI\npipeline = [\n {\n '$limit': 100\n },\n {\n '$addFields': {\n 'lastupdated': {\n '$arrayElemAt': [\n {'$split': [\"$lastupdated\", \".\"]},\n 0\n ]}\n }\n },\n {\n '$project': {\n 'title': 1,\n 'year': 1,\n 'directors': {'$split': [\"$director\", \", \"]},\n 'actors': {'$split': [\"$cast\", \", \"]},\n 'writers': {'$split': [\"$writer\", \", \"]},\n 'genres': {'$split': [\"$genre\", \", \"]},\n 'languages': {'$split': [\"$language\", \", \"]},\n 'countries': {'$split': [\"$country\", \", \"]},\n 'plot': 1,\n 'fullPlot': \"$fullplot\",\n 'rated': \"$rating\"\n }\n }\n]\n\nclear_output()\npprint.pprint(list(client.mflix.movies_initial.aggregate(pipeline)))", "[{'_id': ObjectId('5c2d427893c58e295ca00af8'),\n 'actors': [''],\n 'countries': [' Carmencita does a dance with kicks and twirls'],\n 'directors': [''],\n 'fullPlot': '\"Performing on what looks like a small wooden stage',\n 'genres': ['\"Documentary'],\n 'languages': [' wearing a dress with a hoop skirt and white high-heeled '\n 'pumps'],\n 'plot': 'http://ia.media-imdb.com/images/M/MV5BMjAzNDEwMzk3OV5BMl5BanBnXkFtZTcwOTk4OTM5Ng@@._V1_SX300.jpg',\n 'rated': 'NOT RATED',\n 'title': 'Carmencita',\n 'writers': ['William K.L. Dickson'],\n 'year': '1894'},\n {'_id': ObjectId('5c2d427893c58e295ca00af9'),\n 'actors': ['\"Charles Kayser'],\n 'countries': [' places it on the anvil'],\n 'directors': ['William K.L. Dickson'],\n 'fullPlot': 'Three men hammer on an anvil and pass a bottle of beer around.',\n 'genres': ['Short'],\n 'languages': ['\"A stationary camera looks at a large anvil with a blacksmith '\n 'behind it and one on either side. The smith in the middle '\n 'draws a heated metal rod from the fire'],\n 'plot': '',\n 'rated': 'UNRATED',\n 'title': 'Blacksmith Scene',\n 'writers': [''],\n 'year': '1893'},\n {'_id': ObjectId('5c2d427893c58e295ca00afa'),\n 'actors': ['�mile Reynaud'],\n 'countries': [' Arlequin come to see his lover Colombine. But then Pierrot '\n 'knocks at the door and Colombine and Arlequin hide. Pierrot '\n 'starts singing but Arlequin scares him and the poor man goes '\n 'away.\"'],\n 'directors': [' Short\"'],\n 'fullPlot': '',\n 'genres': ['\"Animation'],\n 'languages': ['\"One night'],\n 'plot': '566',\n 'rated': '',\n 'title': 'Pauvre Pierrot',\n 'writers': ['1892-10-28'],\n 'year': '1892'},\n {'_id': ObjectId('5c2d427893c58e295ca00afb'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1894-01-09'],\n 'fullPlot': \"A man (Thomas Edison's assistant) takes a pinch of snuff and \"\n 'sneezes. This is one of the earliest Thomas Edison films and '\n 'was the first motion picture to be copyrighted in the United '\n 'States.',\n 'genres': ['\"Documentary'],\n 'languages': [\"A man (Edison's assistant) takes a pinch of snuff and \"\n 'sneezes. This is one of the earliest Edison films and was the '\n 'first motion picture to be copyrighted in the United States.'],\n 'plot': '',\n 'rated': '',\n 'title': 'Edison Kinetoscopic Record of a Sneeze',\n 'writers': ['William K.L. Dickson'],\n 'year': '1894'},\n {'_id': ObjectId('5c2d427893c58e295ca00afc'),\n 'actors': [''],\n 'countries': [' turning either left or right. Most of them are women in '\n '...\"'],\n 'directors': ['1895-03-22'],\n 'fullPlot': '\"A man opens the big gates to the Lumi�re factory. Through the '\n 'gateway and a smaller doorway beside it',\n 'genres': ['\"Documentary'],\n 'languages': [' workers are streaming out'],\n 'plot': '',\n 'rated': '',\n 'title': 'Employees Leaving the Lumi�re Factory',\n 'writers': ['Louis Lumi�re'],\n 'year': '1895'},\n {'_id': ObjectId('5c2d427893c58e295ca00afd'),\n 'actors': [' Louis Lumi�re\"'],\n 'countries': [' waiting for a train'],\n 'directors': ['1896-01-01'],\n 'fullPlot': 'http://ia.media-imdb.com/images/M/MV5BMjEyNDk5MDYzOV5BMl5BanBnXkFtZTgwNjIxMTEwMzE@._V1_SX300.jpg',\n 'genres': ['\"Documentary'],\n 'languages': ['\"A group of people are standing in a straight line along the '\n 'platform of a railway station'],\n 'plot': '5043',\n 'rated': '',\n 'title': 'The Arrival of a Train',\n 'writers': ['\"Auguste Lumi�re'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00afe'),\n 'actors': [''],\n 'countries': [' when a mischievous boy sneaks up behind his back'],\n 'directors': [''],\n 'fullPlot': '',\n 'genres': ['\"Comedy'],\n 'languages': ['\"A gardener is watering his flowers'],\n 'plot': '2554',\n 'rated': '',\n 'title': 'Tables Turned on the Gardener',\n 'writers': ['Louis Lumi�re'],\n 'year': '1895'},\n {'_id': ObjectId('5c2d427893c58e295ca00aff'),\n 'actors': [''],\n 'countries': ['\"A baby is seated at a table between its cheerful parents'],\n 'directors': ['1895-12-28'],\n 'fullPlot': '1669',\n 'genres': ['\"Documentary'],\n 'languages': [''],\n 'plot': '5.9',\n 'rated': '',\n 'title': \"Baby's Dinner\",\n 'writers': ['Louis Lumi�re'],\n 'year': '1895'},\n {'_id': ObjectId('5c2d427893c58e295ca00b00'),\n 'actors': [''],\n 'countries': ['\"The sea is before us. Some rocks are visible to the right '\n 'and a narrow jetty extends about ten meters or so about three '\n 'feet above the sea'],\n 'directors': ['1896-06-28'],\n 'fullPlot': '\"Several little boys run along a pier',\n 'genres': ['\"Documentary'],\n 'languages': [' then jump into the ocean.\"'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Sea',\n 'writers': ['Louis Lumi�re'],\n 'year': '1895'},\n {'_id': ObjectId('5c2d427893c58e295ca00b01'),\n 'actors': [''],\n 'countries': ['\"Auguste Lumi�re directs four workers in the demolition of an '\n 'old wall at the Lumi�re factory. One worker is pressing the '\n 'wall inwards with a jackscrew'],\n 'directors': ['2005-04-15'],\n 'fullPlot': '\"Auguste Lumi�re directs four workers in the demolition of an '\n 'old wall at the Lumi�re factory. One worker is pressing the '\n 'wall inwards with a jackscrew',\n 'genres': ['\"Documentary'],\n 'languages': [' while another is pushing it with a ...\"'],\n 'plot': '',\n 'rated': '',\n 'title': \"D�molition d'un mur\",\n 'writers': ['Louis Lumi�re'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b02'),\n 'actors': ['Georges M�li�s'],\n 'countries': [' Mephistopheles conjures up a young girl and various '\n 'supernatural creatures'],\n 'directors': [''],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['\"A bat flies into an ancient castle and transforms itself '\n 'into Mephistopheles himself. Producing a cauldron'],\n 'plot': '1135',\n 'rated': '',\n 'title': 'The House of the Devil',\n 'writers': ['Georges M�li�s'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b03'),\n 'actors': ['\"Jeanne d\\'Alcy'],\n 'countries': [' a chair and small table. He brings a well-dressed women '\n 'through the door'],\n 'directors': ['Georges M�li�s'],\n 'fullPlot': 'A woman disappears on stage.',\n 'genres': ['Short'],\n 'languages': ['\"An elegantly dressed man enters through a stage door onto a '\n 'set with decorated back screen'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Conjuring of a Woman at the House of Robert Houdin',\n 'writers': [''],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b04'),\n 'actors': [''],\n 'countries': [' and a battle ensues in hilarious comic fashion.\"'],\n 'directors': [''],\n 'fullPlot': '\"A man tries to get a good night\\'s sleep',\n 'genres': ['\"Short'],\n 'languages': [' but is disturbed by a giant spider that leaps onto his bed'],\n 'plot': '',\n 'rated': '',\n 'title': 'A Terrible Night',\n 'writers': ['Georges M�li�s'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b05'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['Three friends are playing cards in a beer garden. One of them '\n 'orders drinks. The waitress comes back with a bottle of wine '\n 'and three glasses on a tray. The man serves his friends. They '\n '...'],\n 'directors': [''],\n 'fullPlot': '462',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': '5.1',\n 'rated': '',\n 'title': 'Une partie de cartes',\n 'writers': ['Georges M�li�s'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b06'),\n 'actors': ['Georges M�li�s'],\n 'countries': [' candles ...\"'],\n 'directors': [''],\n 'fullPlot': '\"A weary traveler stops at an inn along the way to get a good '\n \"night's sleep\",\n 'genres': ['\"Short'],\n 'languages': [' but his rest is interrupted by odd happenings when he gets '\n 'to his room--beds vanishing and re-appearing'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Bewitched Inn',\n 'writers': ['Georges M�li�s'],\n 'year': '1897'},\n {'_id': ObjectId('5c2d427893c58e295ca00b07'),\n 'actors': ['George L. Du Maurier (novel)'],\n 'countries': [''],\n 'directors': [''],\n 'fullPlot': '\"Dancer Ella Lola dances a routine based on the famous '\n 'character of \"\"Trilby\"\".\"',\n 'genres': [''],\n 'languages': ['\"Dancer Ella Lola dances a routine based on the famous '\n 'character of \"\"Trilby\"\".\"'],\n 'plot': '',\n 'rated': '1898',\n 'title': '\"Ella Lola',\n 'writers': ['James H. White'],\n 'year': ' a la Trilby\"'},\n {'_id': ObjectId('5c2d427893c58e295ca00b08'),\n 'actors': ['Georges M�li�s (creator)'],\n 'countries': [' Satan appears and surprises the astronomer. At the command '\n 'of the Fairy ...\"'],\n 'directors': [''],\n 'fullPlot': '\"\"\"In the opening of this film is seen the astronomer intently '\n 'poring over his books. Suddenly',\n 'genres': ['\"Short'],\n 'languages': [' in a cloud of smoke'],\n 'plot': '',\n 'rated': '',\n 'title': 'A Trip to the Moon',\n 'writers': ['Georges M�li�s'],\n 'year': '1898'},\n {'_id': ObjectId('5c2d427893c58e295ca00b09'),\n 'actors': ['Charles Perrault (story)'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTgwMDY1MzM1NV5BMl5BanBnXkFtZTgwMjM1MzUwMzE@._V1_SX300.jpg'],\n 'directors': ['1899-12-25'],\n 'fullPlot': '6.6',\n 'genres': ['\"Drama'],\n 'languages': ['586'],\n 'plot': '',\n 'rated': '',\n 'title': 'Cinderella',\n 'writers': ['Georges M�li�s'],\n 'year': '1899'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0a'),\n 'actors': ['Georges M�li�s'],\n 'countries': [''],\n 'directors': ['1900-06-30'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': 'http://ia.media-imdb.com/images/M/MV5BMjM1NTQyNDQxOV5BMl5BanBnXkFtZTgwMTQ1MzUwMzE@._V1_SX300.jpg',\n 'rated': '',\n 'title': 'The Sign of the Cross',\n 'writers': ['Georges M�li�s'],\n 'year': '1899'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0b'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['France'],\n 'directors': ['Georges M�li�s'],\n 'fullPlot': '',\n 'genres': ['Short'],\n 'languages': [''],\n 'plot': '',\n 'rated': '',\n 'title': 'A Turn of the Century Illusionist',\n 'writers': [''],\n 'year': '1899'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0c'),\n 'actors': ['Harold Smith'],\n 'countries': [' at a watch'],\n 'directors': ['George Albert Smith'],\n 'fullPlot': ' seen magnified.\"',\n 'genres': ['Short'],\n 'languages': ['\"A child borrows his grandmother\\'s magnifying glass to look '\n 'at a newspaper ad for Bovril'],\n 'plot': '\"A boy looks through glasses at various objects',\n 'rated': '',\n 'title': \"Grandma's Reading Glass\",\n 'writers': [''],\n 'year': '1900'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0d'),\n 'actors': [''],\n 'countries': [' then he wakes up in bed next to his wife.\"'],\n 'directors': ['1900-08-01'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['\"A man dreams he is flirting with an attractive young lady'],\n 'plot': '378',\n 'rated': '',\n 'title': 'Let Me Dream Again',\n 'writers': ['George Albert Smith'],\n 'year': '1900'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0e'),\n 'actors': [''],\n 'countries': [' he begins to blow with all his might. Immediately the ...\"'],\n 'directors': [''],\n 'fullPlot': '\"A chemist in his laboratory places upon a table his own head',\n 'genres': ['\"Short'],\n 'languages': [' alive; then fixing upon his head a rubber tube with a pair '\n 'of bellows'],\n 'plot': '',\n 'rated': '',\n 'title': 'The India Rubber Head',\n 'writers': ['Georges M�li�s'],\n 'year': '1901'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0f'),\n 'actors': [''],\n 'countries': [' as they appear at night.\"'],\n 'directors': ['1901-11-01'],\n 'fullPlot': '\"A most perfect picture of the Pan-American Exposition '\n 'buildings',\n 'genres': ['\"Documentary'],\n 'languages': [' including the Electric Tower and Temple of Music'],\n 'plot': '',\n 'rated': '',\n 'title': 'Panorama of Esplanade by Night',\n 'writers': ['Edwin S. Porter'],\n 'year': '1901'},\n {'_id': ObjectId('5c2d427893c58e295ca00b10'),\n 'actors': [' Edwin S. Porter\"'],\n 'countries': ['\"Porter\\'s sequential continuity editing links several shots '\n 'to form a narrative of the famous fairy tale story of Jack '\n 'and his magic beanstalk. Borrowing on cinematographic methods '\n \"reminiscent of 'Georges Melies'\"],\n 'directors': ['1902-07-15'],\n 'fullPlot': 'http://ia.media-imdb.com/images/M/MV5BMjAzNTI3MzI0Nl5BMl5BanBnXkFtZTcwMzQ1MTYzMw@@._V1_SX300.jpg',\n 'genres': ['\"Short'],\n 'languages': [\"Porter's sequential continuity editing links several shots to \"\n 'form a narrative of the famous fairy tale story of Jack and '\n 'his magic beanstalk. Borrowing on cinematographic methods '\n '...'],\n 'plot': '442',\n 'rated': '',\n 'title': 'Jack and the Beanstalk',\n 'writers': ['\"George S. Fleming'],\n 'year': '1902'},\n {'_id': ObjectId('5c2d427893c58e295ca00b11'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['A group of astronomers go on an expedition to the moon.'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '23904',\n 'genres': ['\"Short'],\n 'languages': ['http://ia.media-imdb.com/images/M/MV5BMTQzMDYxNzUxNl5BMl5BanBnXkFtZTgwMjgxNjkxMTE@._V1_SX300.jpg'],\n 'plot': '8.2',\n 'rated': 'TV-G',\n 'title': 'A Trip to the Moon',\n 'writers': ['1902-10-04'],\n 'year': '1902'},\n {'_id': ObjectId('5c2d427893c58e295ca00b12'),\n 'actors': [' Percy Stow\"'],\n 'countries': ['6.3'],\n 'directors': ['1903-10-17'],\n 'fullPlot': ' Geoffrey Faithfull\"',\n 'genres': ['\"Fantasy'],\n 'languages': [''],\n 'plot': ' Blair',\n 'rated': '',\n 'title': 'Alice in Wonderland',\n 'writers': ['\"Cecil M. Hepworth'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b13'),\n 'actors': [' Edwin S. Porter\"'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMjAzNTI3MzI0Nl5BMl5BanBnXkFtZTcwMzQ1MTYzMw@@._V1_SX300.jpg'],\n 'directors': ['1903-01-01'],\n 'fullPlot': '6.4',\n 'genres': ['\"Short'],\n 'languages': ['1158'],\n 'plot': '',\n 'rated': 'NOT RATED',\n 'title': 'Life of an American Fireman',\n 'writers': ['\"George S. Fleming'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b14'),\n 'actors': [''],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTQ4NDE5MDcyNF5BMl5BanBnXkFtZTgwNDU3Njk5MTE@._V1_SX300.jpg'],\n 'directors': ['1903-12-01'],\n 'fullPlot': '7.4',\n 'genres': ['\"Short'],\n 'languages': ['9847'],\n 'plot': '',\n 'rated': 'TV-G',\n 'title': 'The Great Train Robbery',\n 'writers': ['Edwin S. Porter'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b15'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['\"A marching band appears'],\n 'directors': [' Music\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['The leader of a marching band demonstrates an unusual way of '\n 'writing music.'],\n 'plot': '1121',\n 'rated': '',\n 'title': 'The Music Lover',\n 'writers': ['1903-08-15'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b16'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMjM3MTIwOTU0NV5BMl5BanBnXkFtZTgwNDM1MzUwMzE@._V1_SX300.jpg'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '7.3',\n 'genres': ['\"Short'],\n 'languages': ['574'],\n 'plot': '',\n 'rated': '',\n 'title': 'Fairyland: A Kingdom of Fairies',\n 'writers': ['1903-09-05'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b17'),\n 'actors': ['George Albert Smith'],\n 'countries': ['\"A simple scene of two rather flamboyantly-dressed Edwardian '\n 'children attempting to feed a spoonful of medicine to a sick '\n 'kitten. The film is important for being one of the earliest '\n 'films to cut to a close-up'],\n 'directors': [' Family\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['A girl gives a spoonful of medicine to a kitten.'],\n 'plot': '468',\n 'rated': '',\n 'title': 'The Sick Kitten',\n 'writers': [''],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b18'),\n 'actors': [''],\n 'countries': ['France'],\n 'directors': ['Alice Guy'],\n 'fullPlot': '',\n 'genres': ['Short'],\n 'languages': [''],\n 'plot': '',\n 'rated': '',\n 'title': 'Faust et M�phistoph�l�s',\n 'writers': [''],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b19'),\n 'actors': ['\"Lewin Fitzhamon'],\n 'countries': ['6.7'],\n 'directors': [' Family\"'],\n 'fullPlot': ' Cecil M. Hepworth\"',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': ' Barbara Hepworth',\n 'rated': '',\n 'title': 'Rescued by Rover',\n 'writers': ['1905-08-19'],\n 'year': '1905'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1a'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTYxNjExMzk5Nl5BMl5BanBnXkFtZTgwMzE5MjAwMzE@._V1_SX300.jpg'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '7.7',\n 'genres': ['\"Short'],\n 'languages': ['2022'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Voyage Across the Impossible',\n 'writers': ['1904-10-01'],\n 'year': '1904'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1b'),\n 'actors': ['Harold M. Shaw'],\n 'countries': ['448'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['7.1'],\n 'plot': ' Ethel Jewett\"',\n 'rated': 'UNRATED',\n 'title': 'The Land Beyond the Sunset',\n 'writers': ['1912-10-28'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1c'),\n 'actors': [''],\n 'countries': ['\"According to the rapid strides that electricity is making in '\n 'this wonderful age we are not surprised to see in this '\n 'picture an ideal hotel of the future in which everything is '\n 'done by electricity. We see a couple entering the hostelry '\n 'and'],\n 'directors': ['1908-12-19'],\n 'fullPlot': '',\n 'genres': ['\"Animation'],\n 'languages': ['According to the rapid strides that electricity is making in '\n 'this wonderful age we are not surprised to see in this '\n 'picture an ideal hotel of the future in which everything is '\n 'done by ...'],\n 'plot': '481',\n 'rated': '',\n 'title': 'The Electric Hotel',\n 'writers': ['Segundo de Chom�n'],\n 'year': '1908'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1d'),\n 'actors': [''],\n 'countries': [' following the path of a subway train as it makes its way '\n 'through New York City subway tunnels on its journey to the '\n 'old ...\"'],\n 'directors': ['1905-06-05'],\n 'fullPlot': '\"Starting at Union Square',\n 'genres': ['\"Short'],\n 'languages': [' we are taken for an underground excursion'],\n 'plot': '',\n 'rated': 'UNRATED',\n 'title': 'New York Subway',\n 'writers': ['G.W. Bitzer'],\n 'year': '1905'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1e'),\n 'actors': ['Walter R. Booth'],\n 'countries': ['\"A magical glowing white motorcar dismembers policemen'],\n 'directors': [' Comedy\"'],\n 'fullPlot': '',\n 'genres': ['\"Fantasy'],\n 'languages': ['A British trick film in which a motorist ends up driving '\n 'around the rings of Saturn.'],\n 'plot': '467',\n 'rated': '',\n 'title': \"The '?' Motorist\",\n 'writers': ['1906-10-01'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1f'),\n 'actors': [' Edwin S. Porter\"'],\n 'countries': ['\"Adapted from Winsor McCay\\'s films and comics of the period'],\n 'directors': ['1906-02-01'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['The fiend faces the spectacular mind-bending consequences of '\n 'his free-wheeling rarebit binge.'],\n 'plot': '1082',\n 'rated': '',\n 'title': 'Dream of a Rarebit Fiend',\n 'writers': ['\"Wallace McCutcheon'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b20'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1972-07-01'],\n 'fullPlot': 'Two travellers are tormented by Satan from inn to inn and '\n 'eventuly experience a buggy ride through the heavens courtesy '\n 'of the Devil before he takes one of them down to hell and '\n 'roasts him ...',\n 'genres': ['\"Short'],\n 'languages': ['Two travellers are tormented by Satan from inn to inn and '\n 'eventuly experience a buggy ride through the heavens courtesy '\n 'of the Devil before he takes one of them down to hell and '\n 'roasts him on a spit.'],\n 'plot': 'http://ia.media-imdb.com/images/M/MV5BOTc4NDU2NTA0N15BMl5BanBnXkFtZTgwNDQ1MzUwMzE@._V1_SX300.jpg',\n 'rated': '',\n 'title': 'The 400 Tricks of the Devil',\n 'writers': ['Georges M�li�s'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b21'),\n 'actors': ['J. Stuart Blackton'],\n 'countries': ['\"Considered the first truly animated movie (or at least the '\n 'first verifiable'],\n 'directors': [' Comedy\"'],\n 'fullPlot': '',\n 'genres': ['\"Animation'],\n 'languages': ['A cartoonist draws faces and figures on a blackboard - and '\n 'they come to life.'],\n 'plot': '625',\n 'rated': '',\n 'title': 'Humorous Phases of Funny Faces',\n 'writers': ['1906-04-06'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b22'),\n 'actors': ['Charles Tait'],\n 'countries': ['285'],\n 'directors': [' Drama\"'],\n 'fullPlot': '',\n 'genres': ['\"Biography'],\n 'languages': ['6.3'],\n 'plot': ' Bella Cola\"',\n 'rated': '',\n 'title': 'The Story of the Kelly Gang',\n 'writers': ['1906-12-26'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b23'),\n 'actors': [' Ferdinand Zecca\"'],\n 'countries': [' but is confronted by a Good Spirit who opposes him.\"'],\n 'directors': ['1907-08-17'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['\"A demonic magician attempts to perform his act in a strange '\n 'grotto'],\n 'plot': '442',\n 'rated': '',\n 'title': 'The Red Spectre',\n 'writers': ['\"Segundo de Chom�n'],\n 'year': '1907'},\n {'_id': ObjectId('5c2d427893c58e295ca00b24'),\n 'actors': ['�mile Cohl'],\n 'countries': [' a series of scenes without much narrative structure'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '',\n 'genres': ['\"Animation'],\n 'languages': ['\"The first all-animated film in history'],\n 'plot': '1245',\n 'rated': '',\n 'title': 'A Fantasy',\n 'writers': ['1908-08-17'],\n 'year': '1908'},\n {'_id': ObjectId('5c2d427893c58e295ca00b25'),\n 'actors': [''],\n 'countries': ['\"http://ia.media-imdb.com/images/M/MV5BNjg3MTI3ODI2N15BMl5BanBnXkFtZTcwMTg1MDA4Mg@@._V1._CR64'],\n 'directors': ['1909-07-08'],\n 'fullPlot': '6.6',\n 'genres': ['\"Short'],\n 'languages': ['294'],\n 'plot': '',\n 'rated': 'NOT RATED',\n 'title': 'The Country Doctor',\n 'writers': ['D.W. Griffith'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b26'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1909-12-13'],\n 'fullPlot': '6.6',\n 'genres': ['\"Short'],\n 'languages': ['1375'],\n 'plot': '',\n 'rated': 'G',\n 'title': 'A Corner in Wheat',\n 'writers': ['D.W. Griffith'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b27'),\n 'actors': ['D.W. Griffith'],\n 'countries': [''],\n 'directors': [' Drama\"'],\n 'fullPlot': ' Mary Pickford',\n 'genres': ['\"Short'],\n 'languages': [' Gladys Egan\"'],\n 'plot': ' Marion Leonard',\n 'rated': '',\n 'title': 'The Lonely Villa',\n 'writers': ['1909-06-10'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b28'),\n 'actors': [''],\n 'countries': ['A king exacts vengeance upon his faithless mistress and her '\n 'lover.'],\n 'directors': ['1909-09-02'],\n 'fullPlot': '428',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': '6.1',\n 'rated': '',\n 'title': 'The Sealed Room',\n 'writers': ['D.W. Griffith'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b29'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1909-01-25'],\n 'fullPlot': '6.3',\n 'genres': ['\"Comedy'],\n 'languages': ['626'],\n 'plot': '',\n 'rated': 'NOT RATED',\n 'title': 'Those Awful Hats',\n 'writers': ['D.W. Griffith'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2a'),\n 'actors': ['Urban Gad'],\n 'countries': [''],\n 'directors': ['1912-04-18'],\n 'fullPlot': '6.6',\n 'genres': ['\"Short'],\n 'languages': ['429'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Woman Always Pays',\n 'writers': ['Urban Gad'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2b'),\n 'actors': ['J. Searle Dawley'],\n 'countries': ['2149'],\n 'directors': [' Horror\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.5'],\n 'plot': ' Augustus Phillips\"',\n 'rated': 'UNRATED',\n 'title': 'Frankenstein',\n 'writers': ['1910-03-18'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2c'),\n 'actors': ['D.W. Griffith'],\n 'countries': [''],\n 'directors': [' War\"'],\n 'fullPlot': '6.5',\n 'genres': ['\"Short'],\n 'languages': ['331'],\n 'plot': '',\n 'rated': 'NOT RATED',\n 'title': 'In the Border States',\n 'writers': ['1910-06-13'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2d'),\n 'actors': ['Charles Kingsley (poem)'],\n 'countries': [''],\n 'directors': ['1910-05-05'],\n 'fullPlot': '6.5',\n 'genres': ['\"Drama'],\n 'languages': ['343'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Unchanging Sea',\n 'writers': ['D.W. Griffith'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2e'),\n 'actors': ['Otis Turner'],\n 'countries': ['5.7'],\n 'directors': [' Short\"'],\n 'fullPlot': ' Robert Z. Leonard\"',\n 'genres': ['\"Adventure'],\n 'languages': [''],\n 'plot': ' Eugenie Besserer',\n 'rated': 'NOT RATED',\n 'title': 'The Wonderful Wizard of Oz',\n 'writers': ['1910-03-24'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2f'),\n 'actors': ['Wladyslaw Starewicz'],\n 'countries': [' and stars very realistic ...\"'],\n 'directors': [' Comedy\"'],\n 'fullPlot': 'http://ia.media-imdb.com/images/M/MV5BMTczODYxMzI5OV5BMl5BanBnXkFtZTgwNzE4NDU0MjE@._V1_SX300.jpg',\n 'genres': ['\"Animation'],\n 'languages': ['\"A jilted husband takes his revenge by filming his wife and '\n 'her lover and showing the result at the local cinema. This '\n \"was one of Starewicz' first animated films\"],\n 'plot': '1593',\n 'rated': '',\n 'title': 'The Revenge of a Kinematograph Cameraman',\n 'writers': ['1912-10-27'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b30'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1911-09-12'],\n 'fullPlot': '5.7',\n 'genres': ['\"Comedy'],\n 'languages': ['135'],\n 'plot': '',\n 'rated': 'UNRATED',\n 'title': 'Her Crowning Glory',\n 'writers': ['Laurence Trimble'],\n 'year': '1911'},\n {'_id': ObjectId('5c2d427893c58e295ca00b31'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['662'],\n 'directors': [' Romance\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.6'],\n 'plot': ' Edward Dillon\"',\n 'rated': 'UNRATED',\n 'title': 'The Lonedale Operator',\n 'writers': ['1911-03-23'],\n 'year': '1911'},\n {'_id': ObjectId('5c2d427893c58e295ca00b32'),\n 'actors': ['1911-04-08'],\n 'countries': ['1034'],\n 'directors': [' Short'],\n 'fullPlot': '',\n 'genres': ['7 min'],\n 'languages': ['7.3'],\n 'plot': 'Winsor McCay',\n 'rated': '1911',\n 'title': '\"Winsor McCay',\n 'writers': [' Comedy\"'],\n 'year': ' the Famous Cartoonist of the N.Y. Herald and His Moving Comics\"'},\n {'_id': ObjectId('5c2d427893c58e295ca00b33'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['149'],\n 'directors': [' Drama\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.4'],\n 'plot': ' Donald Crisp\"',\n 'rated': '',\n 'title': \"The Miser's Heart\",\n 'writers': ['1911-11-20'],\n 'year': '1911'},\n {'_id': ObjectId('5c2d427893c58e295ca00b34'),\n 'actors': ['George Hennessy'],\n 'countries': [''],\n 'directors': ['1912-05-27'],\n 'fullPlot': '5.6',\n 'genres': ['\"Short'],\n 'languages': ['75'],\n 'plot': '',\n 'rated': '',\n 'title': 'A Beast at Bay',\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b35'),\n 'actors': ['Victorien Sardou (adapted from the play by)'],\n 'countries': [''],\n 'directors': ['1912-11-13'],\n 'fullPlot': '5.1',\n 'genres': ['\"Drama'],\n 'languages': ['291'],\n 'plot': '',\n 'rated': 'UNRATED',\n 'title': 'Cleopatra',\n 'writers': ['Charles L. Gaskill'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b36'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['458'],\n 'directors': [' Sci-Fi\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.9'],\n 'plot': ' Fernande Albany\"',\n 'rated': '',\n 'title': 'The Conquest of the Pole',\n 'writers': [''],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b37'),\n 'actors': ['Lionel Barrymore'],\n 'countries': [''],\n 'directors': ['1912-12-16'],\n 'fullPlot': '6.1',\n 'genres': ['\"Short'],\n 'languages': ['210'],\n 'plot': '',\n 'rated': '',\n 'title': \"The Burglar's Dilemma\",\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b38'),\n 'actors': ['\"Francesco Bertolini'],\n 'countries': [''],\n 'directors': [' Fantasy\"'],\n 'fullPlot': ' Giuseppe de Liguoro',\n 'genres': ['\"Adventure'],\n 'languages': [' Pier Delle Vigne\"'],\n 'plot': ' Arturo Pirovano',\n 'rated': '',\n 'title': \"Dante's Inferno\",\n 'writers': ['1911-07-01'],\n 'year': '1911'},\n {'_id': ObjectId('5c2d427893c58e295ca00b39'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1912-02-15'],\n 'fullPlot': '6.1',\n 'genres': ['\"Short'],\n 'languages': ['103'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Mender of Nets',\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3a'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['1226'],\n 'directors': [' Drama\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.7'],\n 'plot': ' Walter Miller\"',\n 'rated': '',\n 'title': 'The Musketeers of Pig Alley',\n 'writers': ['1912-10-31'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3b'),\n 'actors': ['\"Anita Loos'],\n 'countries': ['468'],\n 'directors': ['1912-12-05'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.5'],\n 'plot': ' Lionel Barrymore\"',\n 'rated': '',\n 'title': 'The New York Hat',\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3c'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1912-10-24'],\n 'fullPlot': '6',\n 'genres': ['\"Short'],\n 'languages': ['306'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Painted Lady',\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3d'),\n 'actors': ['\"James Keane'],\n 'countries': ['173'],\n 'directors': ['\"Andr� Calmettes'],\n 'fullPlot': '',\n 'genres': ['Drama'],\n 'languages': ['5.6'],\n 'plot': ' James Keane\"',\n 'rated': '',\n 'title': 'The Life and Death of King Richard III',\n 'writers': [' James Keane\"'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3e'),\n 'actors': ['\"Victor Sj�str�m'],\n 'countries': [''],\n 'directors': ['Victor Sj�str�m'],\n 'fullPlot': '94',\n 'genres': ['Drama'],\n 'languages': [''],\n 'plot': '6',\n 'rated': '',\n 'title': 'Tr�dg�rdsm�staren',\n 'writers': ['Mauritz Stiller'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3f'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['336'],\n 'directors': [' Romance\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.2'],\n 'plot': ' Lionel Barrymore\"',\n 'rated': '',\n 'title': \"Death's Marathon\",\n 'writers': ['1913-06-14'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b40'),\n 'actors': ['\"Marcel Allain (novel)'],\n 'countries': ['6.8'],\n 'directors': [''],\n 'fullPlot': ' Ren�e Carl\"',\n 'genres': ['\"Crime'],\n 'languages': [''],\n 'plot': ' Georges Melchior',\n 'rated': '',\n 'title': 'Fantomas',\n 'writers': ['Louis Feuillade'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b41'),\n 'actors': ['Jere F. Looney'],\n 'countries': [''],\n 'directors': ['1913-05-10'],\n 'fullPlot': '6.1',\n 'genres': ['\"Short'],\n 'languages': ['175'],\n 'plot': '',\n 'rated': '',\n 'title': 'The House of Darkness',\n 'writers': ['D.W. Griffith'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b42'),\n 'actors': [' Victor Sj�str�m\"'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTI5MjYzMTY3Ml5BMl5BanBnXkFtZTcwMzY1NDE2Mw@@._V1_SX300.jpg'],\n 'directors': ['Victor Sj�str�m'],\n 'fullPlot': '7',\n 'genres': ['Drama'],\n 'languages': ['493'],\n 'plot': '',\n 'rated': '',\n 'title': 'Ingeborg Holm',\n 'writers': ['\"Nils Krok (play)'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b43'),\n 'actors': ['\"Frank Beal (story)'],\n 'countries': ['37'],\n 'directors': ['1913-12-08'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['5.7'],\n 'plot': ' Ninita Bristow\"',\n 'rated': '',\n 'title': 'The Inside of the White Slave Traffic',\n 'writers': ['Frank Beal'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b44'),\n 'actors': ['\"Marcel Allain (novel)'],\n 'countries': ['6.6'],\n 'directors': ['1913-10-02'],\n 'fullPlot': ' Ren�e Carl\"',\n 'genres': ['\"Crime'],\n 'languages': [''],\n 'plot': ' Georges Melchior',\n 'rated': '',\n 'title': 'Juve Against Fantomas',\n 'writers': ['Louis Feuillade'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b45'),\n 'actors': ['\"Marcel Allain (novel)'],\n 'countries': ['6.7'],\n 'directors': ['1914-03-01'],\n 'fullPlot': ' Luitz-Morat\"',\n 'genres': ['\"Crime'],\n 'languages': [''],\n 'plot': ' Georges Melchior',\n 'rated': '',\n 'title': 'The Dead Man Who Killed',\n 'writers': ['Louis Feuillade'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b46'),\n 'actors': ['Stellan Rye'],\n 'countries': [''],\n 'directors': [' Horror\"'],\n 'fullPlot': ' Grete Berger',\n 'genres': ['\"Drama'],\n 'languages': [' Lyda Salmonova\"'],\n 'plot': ' John Gottowt',\n 'rated': '',\n 'title': 'The Student of Prague',\n 'writers': ['1913-09-01'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b47'),\n 'actors': [''],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTU2NDg2ODkxMV5BMl5BanBnXkFtZTcwOTQzNzAzMg@@._V1_SX300.jpg'],\n 'directors': ['1913-11-24'],\n 'fullPlot': '6',\n 'genres': ['\"Crime'],\n 'languages': ['371'],\n 'plot': '',\n 'rated': 'TV-PG',\n 'title': 'Traffic in Souls',\n 'writers': ['George Loane Tucker'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b48'),\n 'actors': [' Eleuterio Rodolfi\"'],\n 'countries': ['6.1'],\n 'directors': ['1913-08-13'],\n 'fullPlot': ' Antonio Grisanti\"',\n 'genres': ['\"Adventure'],\n 'languages': [''],\n 'plot': ' Ubaldo Stefani',\n 'rated': '',\n 'title': 'The Last Days of Pompeii',\n 'writers': ['\"Mario Caserini'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b49'),\n 'actors': ['\"Edgar Allan Poe (stories)'],\n 'countries': ['788'],\n 'directors': ['1914-08-24'],\n 'fullPlot': '',\n 'genres': ['\"Crime'],\n 'languages': ['7'],\n 'plot': ' George Siegmann\"',\n 'rated': 'NOT RATED',\n 'title': \"The Avenging Conscience: or 'Thou Shalt Not Kill'\",\n 'writers': ['D.W. Griffith'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4a'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['6.4'],\n 'directors': [' Western\"'],\n 'fullPlot': ' Robert Harron\"',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': ' Alfred Paget',\n 'rated': '',\n 'title': 'The Battle at Elderbush Gulch',\n 'writers': ['1913-12-01'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4b'),\n 'actors': ['Giovanni Pastrone'],\n 'countries': [' Dante Testa\"'],\n 'directors': [' History\"'],\n 'fullPlot': ' Lidia Quaranta',\n 'genres': ['\"Adventure'],\n 'languages': [' Gina Marangoni'],\n 'plot': '\"Carolina Catena',\n 'rated': 'NOT RATED',\n 'title': 'Cabiria',\n 'writers': ['1914-06-01'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4c'),\n 'actors': ['Charles Perrault (story)'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTcxODcyNjAwN15BMl5BanBnXkFtZTcwMTc1MzI1Mw@@._V1_SX300.jpg'],\n 'directors': ['1914-12-28'],\n 'fullPlot': '6.9',\n 'genres': ['\"Fantasy'],\n 'languages': ['753'],\n 'plot': '',\n 'rated': '',\n 'title': 'Cinderella',\n 'writers': ['James Kirkwood'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4d'),\n 'actors': [' Louis Feuillade'],\n 'countries': ['770'],\n 'directors': ['Louis Feuillade'],\n 'fullPlot': '',\n 'genres': ['Drama'],\n 'languages': ['6.6'],\n 'plot': ' Ren�e Carl\"',\n 'rated': '',\n 'title': 'Fantomas Against Fantomas',\n 'writers': ['\"Marcel Allain (novel)'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4e'),\n 'actors': ['\"Marcel Allain (novel)'],\n 'countries': ['6.5'],\n 'directors': ['1914-07-10'],\n 'fullPlot': ' Mesnery\"',\n 'genres': ['\"Crime'],\n 'languages': [''],\n 'plot': ' Georges Melchior',\n 'rated': '',\n 'title': 'Le faux magistrat',\n 'writers': ['Louis Feuillade'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4f'),\n 'actors': ['Winsor McCay'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTQxNzI4ODQ3NF5BMl5BanBnXkFtZTgwNzY5NzMwMjE@._V1_SX300.jpg'],\n 'directors': [' Comedy\"'],\n 'fullPlot': '7.3',\n 'genres': ['\"Animation'],\n 'languages': ['1837'],\n 'plot': '',\n 'rated': '',\n 'title': 'Gertie the Dinosaur',\n 'writers': ['1914-09-15'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b50'),\n 'actors': ['\"Benjamin Christensen'],\n 'countries': ['258'],\n 'directors': ['1914-04-01'],\n 'fullPlot': '',\n 'genres': ['\"Drama'],\n 'languages': ['6.7'],\n 'plot': ' Fritz Lamprecht\"',\n 'rated': '',\n 'title': 'The Mysterious X',\n 'writers': ['Benjamin Christensen'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b51'),\n 'actors': [''],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BOTE1MjM3NjI1N15BMl5BanBnXkFtZTgwODc5NDcwNDE@._V1_SX300.jpg'],\n 'directors': ['1914-11-07'],\n 'fullPlot': '6',\n 'genres': ['\"Short'],\n 'languages': ['483'],\n 'plot': '',\n 'rated': '',\n 'title': 'His Musical Career',\n 'writers': ['Charles Chaplin'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b52'),\n 'actors': [''],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMjAxMjE2ODMwNF5BMl5BanBnXkFtZTgwMTAwMTAyMjE@._V1_SX300.jpg'],\n 'directors': ['1914-08-31'],\n 'fullPlot': '6.1',\n 'genres': ['\"Short'],\n 'languages': ['673'],\n 'plot': '',\n 'rated': '',\n 'title': 'His New Profession',\n 'writers': ['Charles Chaplin'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b53'),\n 'actors': ['\"Courtenay Foote'],\n 'countries': ['92'],\n 'directors': ['Lois Weber'],\n 'fullPlot': '257',\n 'genres': ['Drama'],\n 'languages': ['\"http://ia.media-imdb.com/images/M/MV5BMjA5NTYzNDQ4NV5BMl5BanBnXkFtZTgwMzA5NzgwMzE@._V1._CR1'],\n 'plot': '5.9',\n 'rated': 'PASSED',\n 'title': 'Hypocrites',\n 'writers': ['Lois Weber'],\n 'year': '1915'},\n {'_id': ObjectId('5c2d427893c58e295ca00b54'),\n 'actors': ['Edward S. Curtis (story)'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMjE3ODk0NTAwNF5BMl5BanBnXkFtZTcwNjU2MzYyMQ@@._V1_SX300.jpg'],\n 'directors': ['1914-12-07'],\n 'fullPlot': '5.8',\n 'genres': ['\"Drama'],\n 'languages': ['223'],\n 'plot': '',\n 'rated': '',\n 'title': 'In the Land of the Head Hunters',\n 'writers': ['Edward S. Curtis'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b55'),\n 'actors': [' D.W. Griffith'],\n 'countries': ['7.7'],\n 'directors': ['D.W. Griffith'],\n 'fullPlot': ' Robert Harron\"',\n 'genres': ['Drama'],\n 'languages': [''],\n 'plot': ' Mae Marsh',\n 'rated': '',\n 'title': 'Judith of Bethulia',\n 'writers': ['\"Thomas Bailey Aldrich (poem)'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b56'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1914-06-11'],\n 'fullPlot': '5.9',\n 'genres': ['\"Short'],\n 'languages': ['720'],\n 'plot': '',\n 'rated': 'TV-G',\n 'title': 'The Knockout',\n 'writers': ['Mack Sennett'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b57'),\n 'actors': [''],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTk1MjMyNzMxMF5BMl5BanBnXkFtZTYwMTQ3ODc5._V1_SX300.jpg'],\n 'directors': ['1914-07-09'],\n 'fullPlot': '5.7',\n 'genres': ['\"Short'],\n 'languages': ['739'],\n 'plot': '',\n 'rated': '',\n 'title': 'Laughing Gas',\n 'writers': ['Charles Chaplin'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b58'),\n 'actors': [' Mack Sennett\"'],\n 'countries': ['433'],\n 'directors': ['1914-04-18'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['5.7'],\n 'plot': ' Chester Conklin\"',\n 'rated': '',\n 'title': 'Mabel at the Wheel',\n 'writers': ['\"Mabel Normand'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b59'),\n 'actors': ['\"Charles Chaplin'],\n 'countries': ['665'],\n 'directors': ['1914-06-20'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['5.8'],\n 'plot': ' Eva Nelson\"',\n 'rated': '',\n 'title': \"Mabel's Married Life\",\n 'writers': ['Mack Sennett'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b5a'),\n 'actors': [''],\n 'countries': ['333_SY132_CR5'],\n 'directors': ['1914-02-02'],\n 'fullPlot': '78',\n 'genres': ['\"Short'],\n 'languages': ['253'],\n 'plot': '\"http://ia.media-imdb.com/images/M/MV5BMjI0Nzg5MjYxMF5BMl5BanBnXkFtZTcwMzgxODQ4Mg@@._V1._CR102',\n 'rated': 'TV-G',\n 'title': 'Making a Living',\n 'writers': ['Henry Lehrman'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b5b'),\n 'actors': ['Alice Guy'],\n 'countries': ['66'],\n 'directors': [' Drama\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.9'],\n 'plot': ' Fraunie Fraunholz\"',\n 'rated': 'UNRATED',\n 'title': 'The Ocean Waif',\n 'writers': ['1916-11-02'],\n 'year': '1916'}]\n" ], [ "clear_output()\npprint.pprint(list(client.mflix.movies_initial.aggregate(pipeline)))", "[{'_id': ObjectId('5c2d427893c58e295ca00af8'),\n 'actors': [''],\n 'countries': [' Carmencita does a dance with kicks and twirls'],\n 'directors': [''],\n 'fullPlot': '\"Performing on what looks like a small wooden stage',\n 'genres': ['\"Documentary'],\n 'languages': [' wearing a dress with a hoop skirt and white high-heeled '\n 'pumps'],\n 'plot': 'http://ia.media-imdb.com/images/M/MV5BMjAzNDEwMzk3OV5BMl5BanBnXkFtZTcwOTk4OTM5Ng@@._V1_SX300.jpg',\n 'rated': 'NOT RATED',\n 'title': 'Carmencita',\n 'writers': ['William K.L. Dickson'],\n 'year': '1894'},\n {'_id': ObjectId('5c2d427893c58e295ca00af9'),\n 'actors': ['\"Charles Kayser'],\n 'countries': [' places it on the anvil'],\n 'directors': ['William K.L. Dickson'],\n 'fullPlot': 'Three men hammer on an anvil and pass a bottle of beer around.',\n 'genres': ['Short'],\n 'languages': ['\"A stationary camera looks at a large anvil with a blacksmith '\n 'behind it and one on either side. The smith in the middle '\n 'draws a heated metal rod from the fire'],\n 'plot': '',\n 'rated': 'UNRATED',\n 'title': 'Blacksmith Scene',\n 'writers': [''],\n 'year': '1893'},\n {'_id': ObjectId('5c2d427893c58e295ca00afa'),\n 'actors': ['�mile Reynaud'],\n 'countries': [' Arlequin come to see his lover Colombine. But then Pierrot '\n 'knocks at the door and Colombine and Arlequin hide. Pierrot '\n 'starts singing but Arlequin scares him and the poor man goes '\n 'away.\"'],\n 'directors': [' Short\"'],\n 'fullPlot': '',\n 'genres': ['\"Animation'],\n 'languages': ['\"One night'],\n 'plot': '566',\n 'rated': '',\n 'title': 'Pauvre Pierrot',\n 'writers': ['1892-10-28'],\n 'year': '1892'},\n {'_id': ObjectId('5c2d427893c58e295ca00afb'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1894-01-09'],\n 'fullPlot': \"A man (Thomas Edison's assistant) takes a pinch of snuff and \"\n 'sneezes. This is one of the earliest Thomas Edison films and '\n 'was the first motion picture to be copyrighted in the United '\n 'States.',\n 'genres': ['\"Documentary'],\n 'languages': [\"A man (Edison's assistant) takes a pinch of snuff and \"\n 'sneezes. This is one of the earliest Edison films and was the '\n 'first motion picture to be copyrighted in the United States.'],\n 'plot': '',\n 'rated': '',\n 'title': 'Edison Kinetoscopic Record of a Sneeze',\n 'writers': ['William K.L. Dickson'],\n 'year': '1894'},\n {'_id': ObjectId('5c2d427893c58e295ca00afc'),\n 'actors': [''],\n 'countries': [' turning either left or right. Most of them are women in '\n '...\"'],\n 'directors': ['1895-03-22'],\n 'fullPlot': '\"A man opens the big gates to the Lumi�re factory. Through the '\n 'gateway and a smaller doorway beside it',\n 'genres': ['\"Documentary'],\n 'languages': [' workers are streaming out'],\n 'plot': '',\n 'rated': '',\n 'title': 'Employees Leaving the Lumi�re Factory',\n 'writers': ['Louis Lumi�re'],\n 'year': '1895'},\n {'_id': ObjectId('5c2d427893c58e295ca00afd'),\n 'actors': [' Louis Lumi�re\"'],\n 'countries': [' waiting for a train'],\n 'directors': ['1896-01-01'],\n 'fullPlot': 'http://ia.media-imdb.com/images/M/MV5BMjEyNDk5MDYzOV5BMl5BanBnXkFtZTgwNjIxMTEwMzE@._V1_SX300.jpg',\n 'genres': ['\"Documentary'],\n 'languages': ['\"A group of people are standing in a straight line along the '\n 'platform of a railway station'],\n 'plot': '5043',\n 'rated': '',\n 'title': 'The Arrival of a Train',\n 'writers': ['\"Auguste Lumi�re'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00afe'),\n 'actors': [''],\n 'countries': [' when a mischievous boy sneaks up behind his back'],\n 'directors': [''],\n 'fullPlot': '',\n 'genres': ['\"Comedy'],\n 'languages': ['\"A gardener is watering his flowers'],\n 'plot': '2554',\n 'rated': '',\n 'title': 'Tables Turned on the Gardener',\n 'writers': ['Louis Lumi�re'],\n 'year': '1895'},\n {'_id': ObjectId('5c2d427893c58e295ca00aff'),\n 'actors': [''],\n 'countries': ['\"A baby is seated at a table between its cheerful parents'],\n 'directors': ['1895-12-28'],\n 'fullPlot': '1669',\n 'genres': ['\"Documentary'],\n 'languages': [''],\n 'plot': '5.9',\n 'rated': '',\n 'title': \"Baby's Dinner\",\n 'writers': ['Louis Lumi�re'],\n 'year': '1895'},\n {'_id': ObjectId('5c2d427893c58e295ca00b00'),\n 'actors': [''],\n 'countries': ['\"The sea is before us. Some rocks are visible to the right '\n 'and a narrow jetty extends about ten meters or so about three '\n 'feet above the sea'],\n 'directors': ['1896-06-28'],\n 'fullPlot': '\"Several little boys run along a pier',\n 'genres': ['\"Documentary'],\n 'languages': [' then jump into the ocean.\"'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Sea',\n 'writers': ['Louis Lumi�re'],\n 'year': '1895'},\n {'_id': ObjectId('5c2d427893c58e295ca00b01'),\n 'actors': [''],\n 'countries': ['\"Auguste Lumi�re directs four workers in the demolition of an '\n 'old wall at the Lumi�re factory. One worker is pressing the '\n 'wall inwards with a jackscrew'],\n 'directors': ['2005-04-15'],\n 'fullPlot': '\"Auguste Lumi�re directs four workers in the demolition of an '\n 'old wall at the Lumi�re factory. One worker is pressing the '\n 'wall inwards with a jackscrew',\n 'genres': ['\"Documentary'],\n 'languages': [' while another is pushing it with a ...\"'],\n 'plot': '',\n 'rated': '',\n 'title': \"D�molition d'un mur\",\n 'writers': ['Louis Lumi�re'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b02'),\n 'actors': ['Georges M�li�s'],\n 'countries': [' Mephistopheles conjures up a young girl and various '\n 'supernatural creatures'],\n 'directors': [''],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['\"A bat flies into an ancient castle and transforms itself '\n 'into Mephistopheles himself. Producing a cauldron'],\n 'plot': '1135',\n 'rated': '',\n 'title': 'The House of the Devil',\n 'writers': ['Georges M�li�s'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b03'),\n 'actors': ['\"Jeanne d\\'Alcy'],\n 'countries': [' a chair and small table. He brings a well-dressed women '\n 'through the door'],\n 'directors': ['Georges M�li�s'],\n 'fullPlot': 'A woman disappears on stage.',\n 'genres': ['Short'],\n 'languages': ['\"An elegantly dressed man enters through a stage door onto a '\n 'set with decorated back screen'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Conjuring of a Woman at the House of Robert Houdin',\n 'writers': [''],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b04'),\n 'actors': [''],\n 'countries': [' and a battle ensues in hilarious comic fashion.\"'],\n 'directors': [''],\n 'fullPlot': '\"A man tries to get a good night\\'s sleep',\n 'genres': ['\"Short'],\n 'languages': [' but is disturbed by a giant spider that leaps onto his bed'],\n 'plot': '',\n 'rated': '',\n 'title': 'A Terrible Night',\n 'writers': ['Georges M�li�s'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b05'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['Three friends are playing cards in a beer garden. One of them '\n 'orders drinks. The waitress comes back with a bottle of wine '\n 'and three glasses on a tray. The man serves his friends. They '\n '...'],\n 'directors': [''],\n 'fullPlot': '462',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': '5.1',\n 'rated': '',\n 'title': 'Une partie de cartes',\n 'writers': ['Georges M�li�s'],\n 'year': '1896'},\n {'_id': ObjectId('5c2d427893c58e295ca00b06'),\n 'actors': ['Georges M�li�s'],\n 'countries': [' candles ...\"'],\n 'directors': [''],\n 'fullPlot': '\"A weary traveler stops at an inn along the way to get a good '\n \"night's sleep\",\n 'genres': ['\"Short'],\n 'languages': [' but his rest is interrupted by odd happenings when he gets '\n 'to his room--beds vanishing and re-appearing'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Bewitched Inn',\n 'writers': ['Georges M�li�s'],\n 'year': '1897'},\n {'_id': ObjectId('5c2d427893c58e295ca00b07'),\n 'actors': ['George L. Du Maurier (novel)'],\n 'countries': [''],\n 'directors': [''],\n 'fullPlot': '\"Dancer Ella Lola dances a routine based on the famous '\n 'character of \"\"Trilby\"\".\"',\n 'genres': [''],\n 'languages': ['\"Dancer Ella Lola dances a routine based on the famous '\n 'character of \"\"Trilby\"\".\"'],\n 'plot': '',\n 'rated': '1898',\n 'title': '\"Ella Lola',\n 'writers': ['James H. White'],\n 'year': ' a la Trilby\"'},\n {'_id': ObjectId('5c2d427893c58e295ca00b08'),\n 'actors': ['Georges M�li�s (creator)'],\n 'countries': [' Satan appears and surprises the astronomer. At the command '\n 'of the Fairy ...\"'],\n 'directors': [''],\n 'fullPlot': '\"\"\"In the opening of this film is seen the astronomer intently '\n 'poring over his books. Suddenly',\n 'genres': ['\"Short'],\n 'languages': [' in a cloud of smoke'],\n 'plot': '',\n 'rated': '',\n 'title': 'A Trip to the Moon',\n 'writers': ['Georges M�li�s'],\n 'year': '1898'},\n {'_id': ObjectId('5c2d427893c58e295ca00b09'),\n 'actors': ['Charles Perrault (story)'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTgwMDY1MzM1NV5BMl5BanBnXkFtZTgwMjM1MzUwMzE@._V1_SX300.jpg'],\n 'directors': ['1899-12-25'],\n 'fullPlot': '6.6',\n 'genres': ['\"Drama'],\n 'languages': ['586'],\n 'plot': '',\n 'rated': '',\n 'title': 'Cinderella',\n 'writers': ['Georges M�li�s'],\n 'year': '1899'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0a'),\n 'actors': ['Georges M�li�s'],\n 'countries': [''],\n 'directors': ['1900-06-30'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': 'http://ia.media-imdb.com/images/M/MV5BMjM1NTQyNDQxOV5BMl5BanBnXkFtZTgwMTQ1MzUwMzE@._V1_SX300.jpg',\n 'rated': '',\n 'title': 'The Sign of the Cross',\n 'writers': ['Georges M�li�s'],\n 'year': '1899'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0b'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['France'],\n 'directors': ['Georges M�li�s'],\n 'fullPlot': '',\n 'genres': ['Short'],\n 'languages': [''],\n 'plot': '',\n 'rated': '',\n 'title': 'A Turn of the Century Illusionist',\n 'writers': [''],\n 'year': '1899'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0c'),\n 'actors': ['Harold Smith'],\n 'countries': [' at a watch'],\n 'directors': ['George Albert Smith'],\n 'fullPlot': ' seen magnified.\"',\n 'genres': ['Short'],\n 'languages': ['\"A child borrows his grandmother\\'s magnifying glass to look '\n 'at a newspaper ad for Bovril'],\n 'plot': '\"A boy looks through glasses at various objects',\n 'rated': '',\n 'title': \"Grandma's Reading Glass\",\n 'writers': [''],\n 'year': '1900'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0d'),\n 'actors': [''],\n 'countries': [' then he wakes up in bed next to his wife.\"'],\n 'directors': ['1900-08-01'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['\"A man dreams he is flirting with an attractive young lady'],\n 'plot': '378',\n 'rated': '',\n 'title': 'Let Me Dream Again',\n 'writers': ['George Albert Smith'],\n 'year': '1900'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0e'),\n 'actors': [''],\n 'countries': [' he begins to blow with all his might. Immediately the ...\"'],\n 'directors': [''],\n 'fullPlot': '\"A chemist in his laboratory places upon a table his own head',\n 'genres': ['\"Short'],\n 'languages': [' alive; then fixing upon his head a rubber tube with a pair '\n 'of bellows'],\n 'plot': '',\n 'rated': '',\n 'title': 'The India Rubber Head',\n 'writers': ['Georges M�li�s'],\n 'year': '1901'},\n {'_id': ObjectId('5c2d427893c58e295ca00b0f'),\n 'actors': [''],\n 'countries': [' as they appear at night.\"'],\n 'directors': ['1901-11-01'],\n 'fullPlot': '\"A most perfect picture of the Pan-American Exposition '\n 'buildings',\n 'genres': ['\"Documentary'],\n 'languages': [' including the Electric Tower and Temple of Music'],\n 'plot': '',\n 'rated': '',\n 'title': 'Panorama of Esplanade by Night',\n 'writers': ['Edwin S. Porter'],\n 'year': '1901'},\n {'_id': ObjectId('5c2d427893c58e295ca00b10'),\n 'actors': [' Edwin S. Porter\"'],\n 'countries': ['\"Porter\\'s sequential continuity editing links several shots '\n 'to form a narrative of the famous fairy tale story of Jack '\n 'and his magic beanstalk. Borrowing on cinematographic methods '\n \"reminiscent of 'Georges Melies'\"],\n 'directors': ['1902-07-15'],\n 'fullPlot': 'http://ia.media-imdb.com/images/M/MV5BMjAzNTI3MzI0Nl5BMl5BanBnXkFtZTcwMzQ1MTYzMw@@._V1_SX300.jpg',\n 'genres': ['\"Short'],\n 'languages': [\"Porter's sequential continuity editing links several shots to \"\n 'form a narrative of the famous fairy tale story of Jack and '\n 'his magic beanstalk. Borrowing on cinematographic methods '\n '...'],\n 'plot': '442',\n 'rated': '',\n 'title': 'Jack and the Beanstalk',\n 'writers': ['\"George S. Fleming'],\n 'year': '1902'},\n {'_id': ObjectId('5c2d427893c58e295ca00b11'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['A group of astronomers go on an expedition to the moon.'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '23904',\n 'genres': ['\"Short'],\n 'languages': ['http://ia.media-imdb.com/images/M/MV5BMTQzMDYxNzUxNl5BMl5BanBnXkFtZTgwMjgxNjkxMTE@._V1_SX300.jpg'],\n 'plot': '8.2',\n 'rated': 'TV-G',\n 'title': 'A Trip to the Moon',\n 'writers': ['1902-10-04'],\n 'year': '1902'},\n {'_id': ObjectId('5c2d427893c58e295ca00b12'),\n 'actors': [' Percy Stow\"'],\n 'countries': ['6.3'],\n 'directors': ['1903-10-17'],\n 'fullPlot': ' Geoffrey Faithfull\"',\n 'genres': ['\"Fantasy'],\n 'languages': [''],\n 'plot': ' Blair',\n 'rated': '',\n 'title': 'Alice in Wonderland',\n 'writers': ['\"Cecil M. Hepworth'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b13'),\n 'actors': [' Edwin S. Porter\"'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMjAzNTI3MzI0Nl5BMl5BanBnXkFtZTcwMzQ1MTYzMw@@._V1_SX300.jpg'],\n 'directors': ['1903-01-01'],\n 'fullPlot': '6.4',\n 'genres': ['\"Short'],\n 'languages': ['1158'],\n 'plot': '',\n 'rated': 'NOT RATED',\n 'title': 'Life of an American Fireman',\n 'writers': ['\"George S. Fleming'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b14'),\n 'actors': [''],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTQ4NDE5MDcyNF5BMl5BanBnXkFtZTgwNDU3Njk5MTE@._V1_SX300.jpg'],\n 'directors': ['1903-12-01'],\n 'fullPlot': '7.4',\n 'genres': ['\"Short'],\n 'languages': ['9847'],\n 'plot': '',\n 'rated': 'TV-G',\n 'title': 'The Great Train Robbery',\n 'writers': ['Edwin S. Porter'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b15'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['\"A marching band appears'],\n 'directors': [' Music\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['The leader of a marching band demonstrates an unusual way of '\n 'writing music.'],\n 'plot': '1121',\n 'rated': '',\n 'title': 'The Music Lover',\n 'writers': ['1903-08-15'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b16'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMjM3MTIwOTU0NV5BMl5BanBnXkFtZTgwNDM1MzUwMzE@._V1_SX300.jpg'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '7.3',\n 'genres': ['\"Short'],\n 'languages': ['574'],\n 'plot': '',\n 'rated': '',\n 'title': 'Fairyland: A Kingdom of Fairies',\n 'writers': ['1903-09-05'],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b17'),\n 'actors': ['George Albert Smith'],\n 'countries': ['\"A simple scene of two rather flamboyantly-dressed Edwardian '\n 'children attempting to feed a spoonful of medicine to a sick '\n 'kitten. The film is important for being one of the earliest '\n 'films to cut to a close-up'],\n 'directors': [' Family\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['A girl gives a spoonful of medicine to a kitten.'],\n 'plot': '468',\n 'rated': '',\n 'title': 'The Sick Kitten',\n 'writers': [''],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b18'),\n 'actors': [''],\n 'countries': ['France'],\n 'directors': ['Alice Guy'],\n 'fullPlot': '',\n 'genres': ['Short'],\n 'languages': [''],\n 'plot': '',\n 'rated': '',\n 'title': 'Faust et M�phistoph�l�s',\n 'writers': [''],\n 'year': '1903'},\n {'_id': ObjectId('5c2d427893c58e295ca00b19'),\n 'actors': ['\"Lewin Fitzhamon'],\n 'countries': ['6.7'],\n 'directors': [' Family\"'],\n 'fullPlot': ' Cecil M. Hepworth\"',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': ' Barbara Hepworth',\n 'rated': '',\n 'title': 'Rescued by Rover',\n 'writers': ['1905-08-19'],\n 'year': '1905'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1a'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTYxNjExMzk5Nl5BMl5BanBnXkFtZTgwMzE5MjAwMzE@._V1_SX300.jpg'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '7.7',\n 'genres': ['\"Short'],\n 'languages': ['2022'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Voyage Across the Impossible',\n 'writers': ['1904-10-01'],\n 'year': '1904'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1b'),\n 'actors': ['Harold M. Shaw'],\n 'countries': ['448'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['7.1'],\n 'plot': ' Ethel Jewett\"',\n 'rated': 'UNRATED',\n 'title': 'The Land Beyond the Sunset',\n 'writers': ['1912-10-28'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1c'),\n 'actors': [''],\n 'countries': ['\"According to the rapid strides that electricity is making in '\n 'this wonderful age we are not surprised to see in this '\n 'picture an ideal hotel of the future in which everything is '\n 'done by electricity. We see a couple entering the hostelry '\n 'and'],\n 'directors': ['1908-12-19'],\n 'fullPlot': '',\n 'genres': ['\"Animation'],\n 'languages': ['According to the rapid strides that electricity is making in '\n 'this wonderful age we are not surprised to see in this '\n 'picture an ideal hotel of the future in which everything is '\n 'done by ...'],\n 'plot': '481',\n 'rated': '',\n 'title': 'The Electric Hotel',\n 'writers': ['Segundo de Chom�n'],\n 'year': '1908'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1d'),\n 'actors': [''],\n 'countries': [' following the path of a subway train as it makes its way '\n 'through New York City subway tunnels on its journey to the '\n 'old ...\"'],\n 'directors': ['1905-06-05'],\n 'fullPlot': '\"Starting at Union Square',\n 'genres': ['\"Short'],\n 'languages': [' we are taken for an underground excursion'],\n 'plot': '',\n 'rated': 'UNRATED',\n 'title': 'New York Subway',\n 'writers': ['G.W. Bitzer'],\n 'year': '1905'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1e'),\n 'actors': ['Walter R. Booth'],\n 'countries': ['\"A magical glowing white motorcar dismembers policemen'],\n 'directors': [' Comedy\"'],\n 'fullPlot': '',\n 'genres': ['\"Fantasy'],\n 'languages': ['A British trick film in which a motorist ends up driving '\n 'around the rings of Saturn.'],\n 'plot': '467',\n 'rated': '',\n 'title': \"The '?' Motorist\",\n 'writers': ['1906-10-01'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b1f'),\n 'actors': [' Edwin S. Porter\"'],\n 'countries': ['\"Adapted from Winsor McCay\\'s films and comics of the period'],\n 'directors': ['1906-02-01'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['The fiend faces the spectacular mind-bending consequences of '\n 'his free-wheeling rarebit binge.'],\n 'plot': '1082',\n 'rated': '',\n 'title': 'Dream of a Rarebit Fiend',\n 'writers': ['\"Wallace McCutcheon'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b20'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1972-07-01'],\n 'fullPlot': 'Two travellers are tormented by Satan from inn to inn and '\n 'eventuly experience a buggy ride through the heavens courtesy '\n 'of the Devil before he takes one of them down to hell and '\n 'roasts him ...',\n 'genres': ['\"Short'],\n 'languages': ['Two travellers are tormented by Satan from inn to inn and '\n 'eventuly experience a buggy ride through the heavens courtesy '\n 'of the Devil before he takes one of them down to hell and '\n 'roasts him on a spit.'],\n 'plot': 'http://ia.media-imdb.com/images/M/MV5BOTc4NDU2NTA0N15BMl5BanBnXkFtZTgwNDQ1MzUwMzE@._V1_SX300.jpg',\n 'rated': '',\n 'title': 'The 400 Tricks of the Devil',\n 'writers': ['Georges M�li�s'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b21'),\n 'actors': ['J. Stuart Blackton'],\n 'countries': ['\"Considered the first truly animated movie (or at least the '\n 'first verifiable'],\n 'directors': [' Comedy\"'],\n 'fullPlot': '',\n 'genres': ['\"Animation'],\n 'languages': ['A cartoonist draws faces and figures on a blackboard - and '\n 'they come to life.'],\n 'plot': '625',\n 'rated': '',\n 'title': 'Humorous Phases of Funny Faces',\n 'writers': ['1906-04-06'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b22'),\n 'actors': ['Charles Tait'],\n 'countries': ['285'],\n 'directors': [' Drama\"'],\n 'fullPlot': '',\n 'genres': ['\"Biography'],\n 'languages': ['6.3'],\n 'plot': ' Bella Cola\"',\n 'rated': '',\n 'title': 'The Story of the Kelly Gang',\n 'writers': ['1906-12-26'],\n 'year': '1906'},\n {'_id': ObjectId('5c2d427893c58e295ca00b23'),\n 'actors': [' Ferdinand Zecca\"'],\n 'countries': [' but is confronted by a Good Spirit who opposes him.\"'],\n 'directors': ['1907-08-17'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['\"A demonic magician attempts to perform his act in a strange '\n 'grotto'],\n 'plot': '442',\n 'rated': '',\n 'title': 'The Red Spectre',\n 'writers': ['\"Segundo de Chom�n'],\n 'year': '1907'},\n {'_id': ObjectId('5c2d427893c58e295ca00b24'),\n 'actors': ['�mile Cohl'],\n 'countries': [' a series of scenes without much narrative structure'],\n 'directors': [' Fantasy\"'],\n 'fullPlot': '',\n 'genres': ['\"Animation'],\n 'languages': ['\"The first all-animated film in history'],\n 'plot': '1245',\n 'rated': '',\n 'title': 'A Fantasy',\n 'writers': ['1908-08-17'],\n 'year': '1908'},\n {'_id': ObjectId('5c2d427893c58e295ca00b25'),\n 'actors': [''],\n 'countries': ['\"http://ia.media-imdb.com/images/M/MV5BNjg3MTI3ODI2N15BMl5BanBnXkFtZTcwMTg1MDA4Mg@@._V1._CR64'],\n 'directors': ['1909-07-08'],\n 'fullPlot': '6.6',\n 'genres': ['\"Short'],\n 'languages': ['294'],\n 'plot': '',\n 'rated': 'NOT RATED',\n 'title': 'The Country Doctor',\n 'writers': ['D.W. Griffith'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b26'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1909-12-13'],\n 'fullPlot': '6.6',\n 'genres': ['\"Short'],\n 'languages': ['1375'],\n 'plot': '',\n 'rated': 'G',\n 'title': 'A Corner in Wheat',\n 'writers': ['D.W. Griffith'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b27'),\n 'actors': ['D.W. Griffith'],\n 'countries': [''],\n 'directors': [' Drama\"'],\n 'fullPlot': ' Mary Pickford',\n 'genres': ['\"Short'],\n 'languages': [' Gladys Egan\"'],\n 'plot': ' Marion Leonard',\n 'rated': '',\n 'title': 'The Lonely Villa',\n 'writers': ['1909-06-10'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b28'),\n 'actors': [''],\n 'countries': ['A king exacts vengeance upon his faithless mistress and her '\n 'lover.'],\n 'directors': ['1909-09-02'],\n 'fullPlot': '428',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': '6.1',\n 'rated': '',\n 'title': 'The Sealed Room',\n 'writers': ['D.W. Griffith'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b29'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1909-01-25'],\n 'fullPlot': '6.3',\n 'genres': ['\"Comedy'],\n 'languages': ['626'],\n 'plot': '',\n 'rated': 'NOT RATED',\n 'title': 'Those Awful Hats',\n 'writers': ['D.W. Griffith'],\n 'year': '1909'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2a'),\n 'actors': ['Urban Gad'],\n 'countries': [''],\n 'directors': ['1912-04-18'],\n 'fullPlot': '6.6',\n 'genres': ['\"Short'],\n 'languages': ['429'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Woman Always Pays',\n 'writers': ['Urban Gad'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2b'),\n 'actors': ['J. Searle Dawley'],\n 'countries': ['2149'],\n 'directors': [' Horror\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.5'],\n 'plot': ' Augustus Phillips\"',\n 'rated': 'UNRATED',\n 'title': 'Frankenstein',\n 'writers': ['1910-03-18'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2c'),\n 'actors': ['D.W. Griffith'],\n 'countries': [''],\n 'directors': [' War\"'],\n 'fullPlot': '6.5',\n 'genres': ['\"Short'],\n 'languages': ['331'],\n 'plot': '',\n 'rated': 'NOT RATED',\n 'title': 'In the Border States',\n 'writers': ['1910-06-13'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2d'),\n 'actors': ['Charles Kingsley (poem)'],\n 'countries': [''],\n 'directors': ['1910-05-05'],\n 'fullPlot': '6.5',\n 'genres': ['\"Drama'],\n 'languages': ['343'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Unchanging Sea',\n 'writers': ['D.W. Griffith'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2e'),\n 'actors': ['Otis Turner'],\n 'countries': ['5.7'],\n 'directors': [' Short\"'],\n 'fullPlot': ' Robert Z. Leonard\"',\n 'genres': ['\"Adventure'],\n 'languages': [''],\n 'plot': ' Eugenie Besserer',\n 'rated': 'NOT RATED',\n 'title': 'The Wonderful Wizard of Oz',\n 'writers': ['1910-03-24'],\n 'year': '1910'},\n {'_id': ObjectId('5c2d427893c58e295ca00b2f'),\n 'actors': ['Wladyslaw Starewicz'],\n 'countries': [' and stars very realistic ...\"'],\n 'directors': [' Comedy\"'],\n 'fullPlot': 'http://ia.media-imdb.com/images/M/MV5BMTczODYxMzI5OV5BMl5BanBnXkFtZTgwNzE4NDU0MjE@._V1_SX300.jpg',\n 'genres': ['\"Animation'],\n 'languages': ['\"A jilted husband takes his revenge by filming his wife and '\n 'her lover and showing the result at the local cinema. This '\n \"was one of Starewicz' first animated films\"],\n 'plot': '1593',\n 'rated': '',\n 'title': 'The Revenge of a Kinematograph Cameraman',\n 'writers': ['1912-10-27'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b30'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1911-09-12'],\n 'fullPlot': '5.7',\n 'genres': ['\"Comedy'],\n 'languages': ['135'],\n 'plot': '',\n 'rated': 'UNRATED',\n 'title': 'Her Crowning Glory',\n 'writers': ['Laurence Trimble'],\n 'year': '1911'},\n {'_id': ObjectId('5c2d427893c58e295ca00b31'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['662'],\n 'directors': [' Romance\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.6'],\n 'plot': ' Edward Dillon\"',\n 'rated': 'UNRATED',\n 'title': 'The Lonedale Operator',\n 'writers': ['1911-03-23'],\n 'year': '1911'},\n {'_id': ObjectId('5c2d427893c58e295ca00b32'),\n 'actors': ['1911-04-08'],\n 'countries': ['1034'],\n 'directors': [' Short'],\n 'fullPlot': '',\n 'genres': ['7 min'],\n 'languages': ['7.3'],\n 'plot': 'Winsor McCay',\n 'rated': '1911',\n 'title': '\"Winsor McCay',\n 'writers': [' Comedy\"'],\n 'year': ' the Famous Cartoonist of the N.Y. Herald and His Moving Comics\"'},\n {'_id': ObjectId('5c2d427893c58e295ca00b33'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['149'],\n 'directors': [' Drama\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.4'],\n 'plot': ' Donald Crisp\"',\n 'rated': '',\n 'title': \"The Miser's Heart\",\n 'writers': ['1911-11-20'],\n 'year': '1911'},\n {'_id': ObjectId('5c2d427893c58e295ca00b34'),\n 'actors': ['George Hennessy'],\n 'countries': [''],\n 'directors': ['1912-05-27'],\n 'fullPlot': '5.6',\n 'genres': ['\"Short'],\n 'languages': ['75'],\n 'plot': '',\n 'rated': '',\n 'title': 'A Beast at Bay',\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b35'),\n 'actors': ['Victorien Sardou (adapted from the play by)'],\n 'countries': [''],\n 'directors': ['1912-11-13'],\n 'fullPlot': '5.1',\n 'genres': ['\"Drama'],\n 'languages': ['291'],\n 'plot': '',\n 'rated': 'UNRATED',\n 'title': 'Cleopatra',\n 'writers': ['Charles L. Gaskill'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b36'),\n 'actors': ['Georges M�li�s'],\n 'countries': ['458'],\n 'directors': [' Sci-Fi\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.9'],\n 'plot': ' Fernande Albany\"',\n 'rated': '',\n 'title': 'The Conquest of the Pole',\n 'writers': [''],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b37'),\n 'actors': ['Lionel Barrymore'],\n 'countries': [''],\n 'directors': ['1912-12-16'],\n 'fullPlot': '6.1',\n 'genres': ['\"Short'],\n 'languages': ['210'],\n 'plot': '',\n 'rated': '',\n 'title': \"The Burglar's Dilemma\",\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b38'),\n 'actors': ['\"Francesco Bertolini'],\n 'countries': [''],\n 'directors': [' Fantasy\"'],\n 'fullPlot': ' Giuseppe de Liguoro',\n 'genres': ['\"Adventure'],\n 'languages': [' Pier Delle Vigne\"'],\n 'plot': ' Arturo Pirovano',\n 'rated': '',\n 'title': \"Dante's Inferno\",\n 'writers': ['1911-07-01'],\n 'year': '1911'},\n {'_id': ObjectId('5c2d427893c58e295ca00b39'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1912-02-15'],\n 'fullPlot': '6.1',\n 'genres': ['\"Short'],\n 'languages': ['103'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Mender of Nets',\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3a'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['1226'],\n 'directors': [' Drama\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.7'],\n 'plot': ' Walter Miller\"',\n 'rated': '',\n 'title': 'The Musketeers of Pig Alley',\n 'writers': ['1912-10-31'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3b'),\n 'actors': ['\"Anita Loos'],\n 'countries': ['468'],\n 'directors': ['1912-12-05'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.5'],\n 'plot': ' Lionel Barrymore\"',\n 'rated': '',\n 'title': 'The New York Hat',\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3c'),\n 'actors': [''],\n 'countries': [''],\n 'directors': ['1912-10-24'],\n 'fullPlot': '6',\n 'genres': ['\"Short'],\n 'languages': ['306'],\n 'plot': '',\n 'rated': '',\n 'title': 'The Painted Lady',\n 'writers': ['D.W. Griffith'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3d'),\n 'actors': ['\"James Keane'],\n 'countries': ['173'],\n 'directors': ['\"Andr� Calmettes'],\n 'fullPlot': '',\n 'genres': ['Drama'],\n 'languages': ['5.6'],\n 'plot': ' James Keane\"',\n 'rated': '',\n 'title': 'The Life and Death of King Richard III',\n 'writers': [' James Keane\"'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3e'),\n 'actors': ['\"Victor Sj�str�m'],\n 'countries': [''],\n 'directors': ['Victor Sj�str�m'],\n 'fullPlot': '94',\n 'genres': ['Drama'],\n 'languages': [''],\n 'plot': '6',\n 'rated': '',\n 'title': 'Tr�dg�rdsm�staren',\n 'writers': ['Mauritz Stiller'],\n 'year': '1912'},\n {'_id': ObjectId('5c2d427893c58e295ca00b3f'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['336'],\n 'directors': [' Romance\"'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['6.2'],\n 'plot': ' Lionel Barrymore\"',\n 'rated': '',\n 'title': \"Death's Marathon\",\n 'writers': ['1913-06-14'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b40'),\n 'actors': ['\"Marcel Allain (novel)'],\n 'countries': ['6.8'],\n 'directors': [''],\n 'fullPlot': ' Ren�e Carl\"',\n 'genres': ['\"Crime'],\n 'languages': [''],\n 'plot': ' Georges Melchior',\n 'rated': '',\n 'title': 'Fantomas',\n 'writers': ['Louis Feuillade'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b41'),\n 'actors': ['Jere F. Looney'],\n 'countries': [''],\n 'directors': ['1913-05-10'],\n 'fullPlot': '6.1',\n 'genres': ['\"Short'],\n 'languages': ['175'],\n 'plot': '',\n 'rated': '',\n 'title': 'The House of Darkness',\n 'writers': ['D.W. Griffith'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b42'),\n 'actors': [' Victor Sj�str�m\"'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTI5MjYzMTY3Ml5BMl5BanBnXkFtZTcwMzY1NDE2Mw@@._V1_SX300.jpg'],\n 'directors': ['Victor Sj�str�m'],\n 'fullPlot': '7',\n 'genres': ['Drama'],\n 'languages': ['493'],\n 'plot': '',\n 'rated': '',\n 'title': 'Ingeborg Holm',\n 'writers': ['\"Nils Krok (play)'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b43'),\n 'actors': ['\"Frank Beal (story)'],\n 'countries': ['37'],\n 'directors': ['1913-12-08'],\n 'fullPlot': '',\n 'genres': ['\"Short'],\n 'languages': ['5.7'],\n 'plot': ' Ninita Bristow\"',\n 'rated': '',\n 'title': 'The Inside of the White Slave Traffic',\n 'writers': ['Frank Beal'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b44'),\n 'actors': ['\"Marcel Allain (novel)'],\n 'countries': ['6.6'],\n 'directors': ['1913-10-02'],\n 'fullPlot': ' Ren�e Carl\"',\n 'genres': ['\"Crime'],\n 'languages': [''],\n 'plot': ' Georges Melchior',\n 'rated': '',\n 'title': 'Juve Against Fantomas',\n 'writers': ['Louis Feuillade'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b45'),\n 'actors': ['\"Marcel Allain (novel)'],\n 'countries': ['6.7'],\n 'directors': ['1914-03-01'],\n 'fullPlot': ' Luitz-Morat\"',\n 'genres': ['\"Crime'],\n 'languages': [''],\n 'plot': ' Georges Melchior',\n 'rated': '',\n 'title': 'The Dead Man Who Killed',\n 'writers': ['Louis Feuillade'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b46'),\n 'actors': ['Stellan Rye'],\n 'countries': [''],\n 'directors': [' Horror\"'],\n 'fullPlot': ' Grete Berger',\n 'genres': ['\"Drama'],\n 'languages': [' Lyda Salmonova\"'],\n 'plot': ' John Gottowt',\n 'rated': '',\n 'title': 'The Student of Prague',\n 'writers': ['1913-09-01'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b47'),\n 'actors': [''],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTU2NDg2ODkxMV5BMl5BanBnXkFtZTcwOTQzNzAzMg@@._V1_SX300.jpg'],\n 'directors': ['1913-11-24'],\n 'fullPlot': '6',\n 'genres': ['\"Crime'],\n 'languages': ['371'],\n 'plot': '',\n 'rated': 'TV-PG',\n 'title': 'Traffic in Souls',\n 'writers': ['George Loane Tucker'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b48'),\n 'actors': [' Eleuterio Rodolfi\"'],\n 'countries': ['6.1'],\n 'directors': ['1913-08-13'],\n 'fullPlot': ' Antonio Grisanti\"',\n 'genres': ['\"Adventure'],\n 'languages': [''],\n 'plot': ' Ubaldo Stefani',\n 'rated': '',\n 'title': 'The Last Days of Pompeii',\n 'writers': ['\"Mario Caserini'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b49'),\n 'actors': ['\"Edgar Allan Poe (stories)'],\n 'countries': ['788'],\n 'directors': ['1914-08-24'],\n 'fullPlot': '',\n 'genres': ['\"Crime'],\n 'languages': ['7'],\n 'plot': ' George Siegmann\"',\n 'rated': 'NOT RATED',\n 'title': \"The Avenging Conscience: or 'Thou Shalt Not Kill'\",\n 'writers': ['D.W. Griffith'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4a'),\n 'actors': ['D.W. Griffith'],\n 'countries': ['6.4'],\n 'directors': [' Western\"'],\n 'fullPlot': ' Robert Harron\"',\n 'genres': ['\"Short'],\n 'languages': [''],\n 'plot': ' Alfred Paget',\n 'rated': '',\n 'title': 'The Battle at Elderbush Gulch',\n 'writers': ['1913-12-01'],\n 'year': '1913'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4b'),\n 'actors': ['Giovanni Pastrone'],\n 'countries': [' Dante Testa\"'],\n 'directors': [' History\"'],\n 'fullPlot': ' Lidia Quaranta',\n 'genres': ['\"Adventure'],\n 'languages': [' Gina Marangoni'],\n 'plot': '\"Carolina Catena',\n 'rated': 'NOT RATED',\n 'title': 'Cabiria',\n 'writers': ['1914-06-01'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4c'),\n 'actors': ['Charles Perrault (story)'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTcxODcyNjAwN15BMl5BanBnXkFtZTcwMTc1MzI1Mw@@._V1_SX300.jpg'],\n 'directors': ['1914-12-28'],\n 'fullPlot': '6.9',\n 'genres': ['\"Fantasy'],\n 'languages': ['753'],\n 'plot': '',\n 'rated': '',\n 'title': 'Cinderella',\n 'writers': ['James Kirkwood'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4d'),\n 'actors': [' Louis Feuillade'],\n 'countries': ['770'],\n 'directors': ['Louis Feuillade'],\n 'fullPlot': '',\n 'genres': ['Drama'],\n 'languages': ['6.6'],\n 'plot': ' Ren�e Carl\"',\n 'rated': '',\n 'title': 'Fantomas Against Fantomas',\n 'writers': ['\"Marcel Allain (novel)'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4e'),\n 'actors': ['\"Marcel Allain (novel)'],\n 'countries': ['6.5'],\n 'directors': ['1914-07-10'],\n 'fullPlot': ' Mesnery\"',\n 'genres': ['\"Crime'],\n 'languages': [''],\n 'plot': ' Georges Melchior',\n 'rated': '',\n 'title': 'Le faux magistrat',\n 'writers': ['Louis Feuillade'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b4f'),\n 'actors': ['Winsor McCay'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMTQxNzI4ODQ3NF5BMl5BanBnXkFtZTgwNzY5NzMwMjE@._V1_SX300.jpg'],\n 'directors': [' Comedy\"'],\n 'fullPlot': '7.3',\n 'genres': ['\"Animation'],\n 'languages': ['1837'],\n 'plot': '',\n 'rated': '',\n 'title': 'Gertie the Dinosaur',\n 'writers': ['1914-09-15'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b50'),\n 'actors': ['\"Benjamin Christensen'],\n 'countries': ['258'],\n 'directors': ['1914-04-01'],\n 'fullPlot': '',\n 'genres': ['\"Drama'],\n 'languages': ['6.7'],\n 'plot': ' Fritz Lamprecht\"',\n 'rated': '',\n 'title': 'The Mysterious X',\n 'writers': ['Benjamin Christensen'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b51'),\n 'actors': [''],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BOTE1MjM3NjI1N15BMl5BanBnXkFtZTgwODc5NDcwNDE@._V1_SX300.jpg'],\n 'directors': ['1914-11-07'],\n 'fullPlot': '6',\n 'genres': ['\"Short'],\n 'languages': ['483'],\n 'plot': '',\n 'rated': '',\n 'title': 'His Musical Career',\n 'writers': ['Charles Chaplin'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b52'),\n 'actors': [''],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMjAxMjE2ODMwNF5BMl5BanBnXkFtZTgwMTAwMTAyMjE@._V1_SX300.jpg'],\n 'directors': ['1914-08-31'],\n 'fullPlot': '6.1',\n 'genres': ['\"Short'],\n 'languages': ['673'],\n 'plot': '',\n 'rated': '',\n 'title': 'His New Profession',\n 'writers': ['Charles Chaplin'],\n 'year': '1914'},\n {'_id': ObjectId('5c2d427893c58e295ca00b53'),\n 'actors': ['\"Courtenay Foote'],\n 'countries': ['92'],\n 'directors': ['Lois Weber'],\n 'fullPlot': '257',\n 'genres': ['Drama'],\n 'languages': ['\"http://ia.media-imdb.com/images/M/MV5BMjA5NTYzNDQ4NV5BMl5BanBnXkFtZTgwMzA5NzgwMzE@._V1._CR1'],\n 'plot': '5.9',\n 'rated': 'PASSED',\n 'title': 'Hypocrites',\n 'writers': ['Lois Weber'],\n 'year': '1915'},\n {'_id': ObjectId('5c2d427893c58e295ca00b54'),\n 'actors': ['Edward S. Curtis (story)'],\n 'countries': ['http://ia.media-imdb.com/images/M/MV5BMjE3ODk0NTAwNF5BMl5BanBnXkFtZTcwNjU2MzYyMQ@@._V1_SX300.jpg'],\n 'directors': ['1914-12-07'],\n 'fullPlot': '5.8',\n 'genres': ['\"Drama'],\n 'languages': ['223'],\n" ], [ "client.mflix.movies_initial.find()[0]", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code" ] ]
4aa268119ae530526e5192d70f4341d80e25c57a
869,063
ipynb
Jupyter Notebook
ganti_background_foto.ipynb
brdx88/cv_udacity_notebook
0379a5a49df80f536f9819db9852f45ec05ed0d9
[ "Apache-2.0" ]
null
null
null
ganti_background_foto.ipynb
brdx88/cv_udacity_notebook
0379a5a49df80f536f9819db9852f45ec05ed0d9
[ "Apache-2.0" ]
null
null
null
ganti_background_foto.ipynb
brdx88/cv_udacity_notebook
0379a5a49df80f536f9819db9852f45ec05ed0d9
[ "Apache-2.0" ]
null
null
null
2,571.192308
216,932
0.96219
[ [ [ "import matplotlib.pyplot as plt\nimport numpy as np\nimport cv2", "_____no_output_____" ], [ "image = cv2.imread('atta.jpeg')\nimage = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\nprint(f\"This image's dimension: {image.shape}\")\nplt.imshow(image)\nplt.style.use('default')", "This image's dimension: (327, 581, 3)\n" ], [ "image_copy = np.copy(image)\nplt.imshow(image_copy)", "_____no_output_____" ], [ "# define the color threshold\n\nlower_blue = np.array([0,0,200])\nupper_blue = np.array([100,100,255])", "_____no_output_____" ], [ "# create a mask, select area of interest\n\nmask = cv2.inRange(image_copy, lower_blue, upper_blue)\nplt.imshow(mask, cmap = 'gray')", "_____no_output_____" ], [ "# mask the image to let the object show through\n\nmasked_image = np.copy(image_copy)\nmasked_image[mask != 0] = [0,0,0]\nplt.imshow(masked_image)", "_____no_output_____" ], [ "# mask and add a background image\n\nbackground_image = cv2.imread('red.jpg')\nbackground_image = cv2.cvtColor(background_image, cv2.COLOR_BGR2RGB)\n\ncrop_background = background_image[0:327, 0:581]\ncrop_background[mask == 0] = [0,0,0]\nplt.imshow(crop_background)", "_____no_output_____" ], [ "# create a complete image\n\ncomplete_image = masked_image + crop_background\n\nplt.imshow(complete_image)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa279350e538d779069fcab3a121368343c2d34
35,278
ipynb
Jupyter Notebook
Course 5 - Sequence Models/NoteBooks/Operations_on_word_vectors_v2a.ipynb
HarshitRuwali/Coursera-Deep-Learning-Specialization
8038f2f2d746ad455e0e3c45c736c5d9b7348d8a
[ "MIT" ]
null
null
null
Course 5 - Sequence Models/NoteBooks/Operations_on_word_vectors_v2a.ipynb
HarshitRuwali/Coursera-Deep-Learning-Specialization
8038f2f2d746ad455e0e3c45c736c5d9b7348d8a
[ "MIT" ]
null
null
null
Course 5 - Sequence Models/NoteBooks/Operations_on_word_vectors_v2a.ipynb
HarshitRuwali/Coursera-Deep-Learning-Specialization
8038f2f2d746ad455e0e3c45c736c5d9b7348d8a
[ "MIT" ]
null
null
null
39.68279
608
0.561823
[ [ [ "# Operations on word vectors\n\nWelcome to your first assignment of this week! \n\nBecause word embeddings are very computationally expensive to train, most ML practitioners will load a pre-trained set of embeddings. \n\n**After this assignment you will be able to:**\n\n- Load pre-trained word vectors, and measure similarity using cosine similarity\n- Use word embeddings to solve word analogy problems such as Man is to Woman as King is to ______. \n- Modify word embeddings to reduce their gender bias \n\n", "_____no_output_____" ], [ "## <font color='darkblue'>Updates</font>\n\n#### If you were working on the notebook before this update...\n* The current notebook is version \"2a\".\n* You can find your original work saved in the notebook with the previous version name (\"v2\") \n* To view the file directory, go to the menu \"File->Open\", and this will open a new tab that shows the file directory.\n\n#### List of updates\n* cosine_similarity\n * Additional hints.\n* complete_analogy\n * Replaces the list of input words with a set, and sets it outside the for loop (to follow best practices in coding).\n* Spelling, grammar and wording corrections.", "_____no_output_____" ], [ "Let's get started! Run the following cell to load the packages you will need.", "_____no_output_____" ] ], [ [ "import numpy as np\nfrom w2v_utils import *", "Using TensorFlow backend.\n" ] ], [ [ "#### Load the word vectors\n* For this assignment, we will use 50-dimensional GloVe vectors to represent words. \n* Run the following cell to load the `word_to_vec_map`. ", "_____no_output_____" ] ], [ [ "words, word_to_vec_map = read_glove_vecs('../../readonly/glove.6B.50d.txt')", "_____no_output_____" ] ], [ [ "You've loaded:\n- `words`: set of words in the vocabulary.\n- `word_to_vec_map`: dictionary mapping words to their GloVe vector representation.\n\n#### Embedding vectors versus one-hot vectors\n* Recall from the lesson videos that one-hot vectors do not do a good job of capturing the level of similarity between words (every one-hot vector has the same Euclidean distance from any other one-hot vector).\n* Embedding vectors such as GloVe vectors provide much more useful information about the meaning of individual words. \n* Lets now see how you can use GloVe vectors to measure the similarity between two words. ", "_____no_output_____" ], [ "# 1 - Cosine similarity\n\nTo measure the similarity between two words, we need a way to measure the degree of similarity between two embedding vectors for the two words. Given two vectors $u$ and $v$, cosine similarity is defined as follows: \n\n$$\\text{CosineSimilarity(u, v)} = \\frac {u \\cdot v} {||u||_2 ||v||_2} = cos(\\theta) \\tag{1}$$\n\n* $u \\cdot v$ is the dot product (or inner product) of two vectors\n* $||u||_2$ is the norm (or length) of the vector $u$\n* $\\theta$ is the angle between $u$ and $v$. \n* The cosine similarity depends on the angle between $u$ and $v$. \n * If $u$ and $v$ are very similar, their cosine similarity will be close to 1.\n * If they are dissimilar, the cosine similarity will take a smaller value. \n\n<img src=\"images/cosine_sim.png\" style=\"width:800px;height:250px;\">\n<caption><center> **Figure 1**: The cosine of the angle between two vectors is a measure their similarity</center></caption>\n\n**Exercise**: Implement the function `cosine_similarity()` to evaluate the similarity between word vectors.\n\n**Reminder**: The norm of $u$ is defined as $ ||u||_2 = \\sqrt{\\sum_{i=1}^{n} u_i^2}$\n\n#### Additional Hints\n* You may find `np.dot`, `np.sum`, or `np.sqrt` useful depending upon the implementation that you choose.", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: cosine_similarity\n\ndef cosine_similarity(u, v):\n \"\"\"\n Cosine similarity reflects the degree of similarity between u and v\n \n Arguments:\n u -- a word vector of shape (n,) \n v -- a word vector of shape (n,)\n\n Returns:\n cosine_similarity -- the cosine similarity between u and v defined by the formula above.\n \"\"\"\n \n distance = 0.0\n \n ### START CODE HERE ###\n # Compute the dot product between u and v (≈1 line)\n dot = np.dot(u, v)\n # Compute the L2 norm of u (≈1 line)\n norm_u = np.linalg.norm(u)\n # Compute the L2 norm of v (≈1 line)\n norm_v = np.linalg.norm(v)\n # Compute the cosine similarity defined by formula (1) (≈1 line)\n cosine_similarity = dot / (norm_u * norm_v)\n ### END CODE HERE ###\n \n return cosine_similarity", "_____no_output_____" ], [ "father = word_to_vec_map[\"father\"]\nmother = word_to_vec_map[\"mother\"]\nball = word_to_vec_map[\"ball\"]\ncrocodile = word_to_vec_map[\"crocodile\"]\nfrance = word_to_vec_map[\"france\"]\nitaly = word_to_vec_map[\"italy\"]\nparis = word_to_vec_map[\"paris\"]\nrome = word_to_vec_map[\"rome\"]\n\nprint(\"cosine_similarity(father, mother) = \", cosine_similarity(father, mother))\nprint(\"cosine_similarity(ball, crocodile) = \",cosine_similarity(ball, crocodile))\nprint(\"cosine_similarity(france - paris, rome - italy) = \",cosine_similarity(france - paris, rome - italy))", "cosine_similarity(father, mother) = 0.890903844289\ncosine_similarity(ball, crocodile) = 0.274392462614\ncosine_similarity(france - paris, rome - italy) = -0.675147930817\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **cosine_similarity(father, mother)** =\n </td>\n <td>\n 0.890903844289\n </td>\n </tr>\n <tr>\n <td>\n **cosine_similarity(ball, crocodile)** =\n </td>\n <td>\n 0.274392462614\n </td>\n </tr>\n <tr>\n <td>\n **cosine_similarity(france - paris, rome - italy)** =\n </td>\n <td>\n -0.675147930817\n </td>\n </tr>\n</table>", "_____no_output_____" ], [ "#### Try different words!\n* After you get the correct expected output, please feel free to modify the inputs and measure the cosine similarity between other pairs of words! \n* Playing around with the cosine similarity of other inputs will give you a better sense of how word vectors behave.", "_____no_output_____" ], [ "## 2 - Word analogy task\n\n* In the word analogy task, we complete the sentence: \n <font color='brown'>\"*a* is to *b* as *c* is to **____**\"</font>. \n\n* An example is: \n <font color='brown'> '*man* is to *woman* as *king* is to *queen*' </font>. \n\n* We are trying to find a word *d*, such that the associated word vectors $e_a, e_b, e_c, e_d$ are related in the following manner: \n $e_b - e_a \\approx e_d - e_c$\n* We will measure the similarity between $e_b - e_a$ and $e_d - e_c$ using cosine similarity. \n\n**Exercise**: Complete the code below to be able to perform word analogies!", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: complete_analogy\n\ndef complete_analogy(word_a, word_b, word_c, word_to_vec_map):\n \"\"\"\n Performs the word analogy task as explained above: a is to b as c is to ____. \n \n Arguments:\n word_a -- a word, string\n word_b -- a word, string\n word_c -- a word, string\n word_to_vec_map -- dictionary that maps words to their corresponding vectors. \n \n Returns:\n best_word -- the word such that v_b - v_a is close to v_best_word - v_c, as measured by cosine similarity\n \"\"\"\n \n # convert words to lowercase\n word_a, word_b, word_c = word_a.lower(), word_b.lower(), word_c.lower()\n \n ### START CODE HERE ###\n # Get the word embeddings e_a, e_b and e_c (≈1-3 lines)\n e_a, e_b, e_c = word_to_vec_map[word_a], word_to_vec_map[word_b], word_to_vec_map[word_c]\n ### END CODE HERE ###\n \n words = word_to_vec_map.keys()\n max_cosine_sim = -100 # Initialize max_cosine_sim to a large negative number\n best_word = None # Initialize best_word with None, it will help keep track of the word to output\n\n # to avoid best_word being one of the input words, skip the input words\n # place the input words in a set for faster searching than a list\n # We will re-use this set of input words inside the for-loop\n input_words_set = set([word_a, word_b, word_c])\n \n # loop over the whole word vector set\n for w in words: \n # to avoid best_word being one of the input words, skip the input words\n if w in input_words_set:\n continue\n \n ### START CODE HERE ###\n # Compute cosine similarity between the vector (e_b - e_a) and the vector ((w's vector representation) - e_c) (≈1 line)\n cosine_sim = cosine_similarity(e_b - e_a, word_to_vec_map[w] - e_c)\n \n # If the cosine_sim is more than the max_cosine_sim seen so far,\n # then: set the new max_cosine_sim to the current cosine_sim and the best_word to the current word (≈3 lines)\n if cosine_sim > max_cosine_sim:\n max_cosine_sim = cosine_sim\n best_word = w\n ### END CODE HERE ###\n \n return best_word", "_____no_output_____" ] ], [ [ "Run the cell below to test your code, this may take 1-2 minutes.", "_____no_output_____" ] ], [ [ "triads_to_try = [('italy', 'italian', 'spain'), ('india', 'delhi', 'japan'), ('man', 'woman', 'boy'), ('small', 'smaller', 'large')]\nfor triad in triads_to_try:\n print ('{} -> {} :: {} -> {}'.format( *triad, complete_analogy(*triad,word_to_vec_map)))", "italy -> italian :: spain -> spanish\nindia -> delhi :: japan -> tokyo\nman -> woman :: boy -> girl\nsmall -> smaller :: large -> larger\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **italy -> italian** ::\n </td>\n <td>\n spain -> spanish\n </td>\n </tr>\n <tr>\n <td>\n **india -> delhi** ::\n </td>\n <td>\n japan -> tokyo\n </td>\n </tr>\n <tr>\n <td>\n **man -> woman ** ::\n </td>\n <td>\n boy -> girl\n </td>\n </tr>\n <tr>\n <td>\n **small -> smaller ** ::\n </td>\n <td>\n large -> larger\n </td>\n </tr>\n</table>", "_____no_output_____" ], [ "* Once you get the correct expected output, please feel free to modify the input cells above to test your own analogies. \n* Try to find some other analogy pairs that do work, but also find some where the algorithm doesn't give the right answer:\n * For example, you can try small->smaller as big->?.", "_____no_output_____" ], [ "### Congratulations!\n\nYou've come to the end of the graded portion of the assignment. Here are the main points you should remember:\n\n- Cosine similarity is a good way to compare the similarity between pairs of word vectors.\n - Note that L2 (Euclidean) distance also works.\n- For NLP applications, using a pre-trained set of word vectors is often a good way to get started.\n- Even though you have finished the graded portions, we recommend you take a look at the rest of this notebook to learn about debiasing word vectors.\n\nCongratulations on finishing the graded portions of this notebook! \n", "_____no_output_____" ], [ "## 3 - Debiasing word vectors (OPTIONAL/UNGRADED) ", "_____no_output_____" ], [ "In the following exercise, you will examine gender biases that can be reflected in a word embedding, and explore algorithms for reducing the bias. In addition to learning about the topic of debiasing, this exercise will also help hone your intuition about what word vectors are doing. This section involves a bit of linear algebra, though you can probably complete it even without being an expert in linear algebra, and we encourage you to give it a shot. This portion of the notebook is optional and is not graded. \n\nLets first see how the GloVe word embeddings relate to gender. You will first compute a vector $g = e_{woman}-e_{man}$, where $e_{woman}$ represents the word vector corresponding to the word *woman*, and $e_{man}$ corresponds to the word vector corresponding to the word *man*. The resulting vector $g$ roughly encodes the concept of \"gender\". (You might get a more accurate representation if you compute $g_1 = e_{mother}-e_{father}$, $g_2 = e_{girl}-e_{boy}$, etc. and average over them. But just using $e_{woman}-e_{man}$ will give good enough results for now.) \n", "_____no_output_____" ] ], [ [ "g = word_to_vec_map['woman'] - word_to_vec_map['man']\nprint(g)", "[-0.087144 0.2182 -0.40986 -0.03922 -0.1032 0.94165\n -0.06042 0.32988 0.46144 -0.35962 0.31102 -0.86824\n 0.96006 0.01073 0.24337 0.08193 -1.02722 -0.21122\n 0.695044 -0.00222 0.29106 0.5053 -0.099454 0.40445\n 0.30181 0.1355 -0.0606 -0.07131 -0.19245 -0.06115\n -0.3204 0.07165 -0.13337 -0.25068714 -0.14293 -0.224957\n -0.149 0.048882 0.12191 -0.27362 -0.165476 -0.20426\n 0.54376 -0.271425 -0.10245 -0.32108 0.2516 -0.33455\n -0.04371 0.01258 ]\n" ] ], [ [ "Now, you will consider the cosine similarity of different words with $g$. Consider what a positive value of similarity means vs a negative cosine similarity. ", "_____no_output_____" ] ], [ [ "print ('List of names and their similarities with constructed vector:')\n\n# girls and boys name\nname_list = ['john', 'marie', 'sophie', 'ronaldo', 'priya', 'rahul', 'danielle', 'reza', 'katy', 'yasmin']\n\nfor w in name_list:\n print (w, cosine_similarity(word_to_vec_map[w], g))", "List of names and their similarities with constructed vector:\njohn -0.23163356146\nmarie 0.315597935396\nsophie 0.318687898594\nronaldo -0.312447968503\npriya 0.17632041839\nrahul -0.169154710392\ndanielle 0.243932992163\nreza -0.079304296722\nkaty 0.283106865957\nyasmin 0.233138577679\n" ] ], [ [ "As you can see, female first names tend to have a positive cosine similarity with our constructed vector $g$, while male first names tend to have a negative cosine similarity. This is not surprising, and the result seems acceptable. \n\nBut let's try with some other words.", "_____no_output_____" ] ], [ [ "print('Other words and their similarities:')\nword_list = ['lipstick', 'guns', 'science', 'arts', 'literature', 'warrior','doctor', 'tree', 'receptionist', \n 'technology', 'fashion', 'teacher', 'engineer', 'pilot', 'computer', 'singer']\nfor w in word_list:\n print (w, cosine_similarity(word_to_vec_map[w], g))", "Other words and their similarities:\nlipstick 0.276919162564\nguns -0.18884855679\nscience -0.0608290654093\narts 0.00818931238588\nliterature 0.0647250443346\nwarrior -0.209201646411\ndoctor 0.118952894109\ntree -0.0708939917548\nreceptionist 0.330779417506\ntechnology -0.131937324476\nfashion 0.0356389462577\nteacher 0.179209234318\nengineer -0.0803928049452\npilot 0.00107644989919\ncomputer -0.103303588739\nsinger 0.185005181365\n" ] ], [ [ "Do you notice anything surprising? It is astonishing how these results reflect certain unhealthy gender stereotypes. For example, \"computer\" is closer to \"man\" while \"literature\" is closer to \"woman\". Ouch! \n\nWe'll see below how to reduce the bias of these vectors, using an algorithm due to [Boliukbasi et al., 2016](https://arxiv.org/abs/1607.06520). Note that some word pairs such as \"actor\"/\"actress\" or \"grandmother\"/\"grandfather\" should remain gender specific, while other words such as \"receptionist\" or \"technology\" should be neutralized, i.e. not be gender-related. You will have to treat these two types of words differently when debiasing.\n\n### 3.1 - Neutralize bias for non-gender specific words \n\nThe figure below should help you visualize what neutralizing does. If you're using a 50-dimensional word embedding, the 50 dimensional space can be split into two parts: The bias-direction $g$, and the remaining 49 dimensions, which we'll call $g_{\\perp}$. In linear algebra, we say that the 49 dimensional $g_{\\perp}$ is perpendicular (or \"orthogonal\") to $g$, meaning it is at 90 degrees to $g$. The neutralization step takes a vector such as $e_{receptionist}$ and zeros out the component in the direction of $g$, giving us $e_{receptionist}^{debiased}$. \n\nEven though $g_{\\perp}$ is 49 dimensional, given the limitations of what we can draw on a 2D screen, we illustrate it using a 1 dimensional axis below. \n\n<img src=\"images/neutral.png\" style=\"width:800px;height:300px;\">\n<caption><center> **Figure 2**: The word vector for \"receptionist\" represented before and after applying the neutralize operation. </center></caption>\n\n**Exercise**: Implement `neutralize()` to remove the bias of words such as \"receptionist\" or \"scientist\". Given an input embedding $e$, you can use the following formulas to compute $e^{debiased}$: \n\n$$e^{bias\\_component} = \\frac{e \\cdot g}{||g||_2^2} * g\\tag{2}$$\n$$e^{debiased} = e - e^{bias\\_component}\\tag{3}$$\n\nIf you are an expert in linear algebra, you may recognize $e^{bias\\_component}$ as the projection of $e$ onto the direction $g$. If you're not an expert in linear algebra, don't worry about this.\n\n<!-- \n**Reminder**: a vector $u$ can be split into two parts: its projection over a vector-axis $v_B$ and its projection over the axis orthogonal to $v$:\n$$u = u_B + u_{\\perp}$$\nwhere : $u_B = $ and $ u_{\\perp} = u - u_B $\n!--> ", "_____no_output_____" ] ], [ [ "def neutralize(word, g, word_to_vec_map):\n \"\"\"\n Removes the bias of \"word\" by projecting it on the space orthogonal to the bias axis. \n This function ensures that gender neutral words are zero in the gender subspace.\n \n Arguments:\n word -- string indicating the word to debias\n g -- numpy-array of shape (50,), corresponding to the bias axis (such as gender)\n word_to_vec_map -- dictionary mapping words to their corresponding vectors.\n \n Returns:\n e_debiased -- neutralized word vector representation of the input \"word\"\n \"\"\"\n \n ### START CODE HERE ###\n # Select word vector representation of \"word\". Use word_to_vec_map. (≈ 1 line)\n e = word_to_vec_map[word]\n \n # Compute e_biascomponent using the formula give above. (≈ 1 line)\n e_biascomponent = (np.dot(e,g)/np.linalg.norm(g)**2)*g\n \n # Neutralize e by substracting e_biascomponent from it \n # e_debiased should be equal to its orthogonal projection. (≈ 1 line)\n e_debiased = e-e_biascomponent\n ### END CODE HERE ###\n \n return e_debiased", "_____no_output_____" ], [ "e = \"receptionist\"\nprint(\"cosine similarity between \" + e + \" and g, before neutralizing: \", cosine_similarity(word_to_vec_map[\"receptionist\"], g))\n\ne_debiased = neutralize(\"receptionist\", g, word_to_vec_map)\nprint(\"cosine similarity between \" + e + \" and g, after neutralizing: \", cosine_similarity(e_debiased, g))", "cosine similarity between receptionist and g, before neutralizing: 0.330779417506\ncosine similarity between receptionist and g, after neutralizing: -5.84103233224e-18\n" ] ], [ [ "**Expected Output**: The second result is essentially 0, up to numerical rounding (on the order of $10^{-17}$).\n\n\n<table>\n <tr>\n <td>\n **cosine similarity between receptionist and g, before neutralizing:** :\n </td>\n <td>\n 0.330779417506\n </td>\n </tr>\n <tr>\n <td>\n **cosine similarity between receptionist and g, after neutralizing:** :\n </td>\n <td>\n -3.26732746085e-17\n </tr>\n</table>", "_____no_output_____" ], [ "### 3.2 - Equalization algorithm for gender-specific words\n\nNext, lets see how debiasing can also be applied to word pairs such as \"actress\" and \"actor.\" Equalization is applied to pairs of words that you might want to have differ only through the gender property. As a concrete example, suppose that \"actress\" is closer to \"babysit\" than \"actor.\" By applying neutralizing to \"babysit\" we can reduce the gender-stereotype associated with babysitting. But this still does not guarantee that \"actor\" and \"actress\" are equidistant from \"babysit.\" The equalization algorithm takes care of this. \n\nThe key idea behind equalization is to make sure that a particular pair of words are equi-distant from the 49-dimensional $g_\\perp$. The equalization step also ensures that the two equalized steps are now the same distance from $e_{receptionist}^{debiased}$, or from any other work that has been neutralized. In pictures, this is how equalization works: \n\n<img src=\"images/equalize10.png\" style=\"width:800px;height:400px;\">\n\n\nThe derivation of the linear algebra to do this is a bit more complex. (See Bolukbasi et al., 2016 for details.) But the key equations are: \n\n$$ \\mu = \\frac{e_{w1} + e_{w2}}{2}\\tag{4}$$ \n\n$$ \\mu_{B} = \\frac {\\mu \\cdot \\text{bias_axis}}{||\\text{bias_axis}||_2^2} *\\text{bias_axis}\n\\tag{5}$$ \n\n$$\\mu_{\\perp} = \\mu - \\mu_{B} \\tag{6}$$\n\n$$ e_{w1B} = \\frac {e_{w1} \\cdot \\text{bias_axis}}{||\\text{bias_axis}||_2^2} *\\text{bias_axis}\n\\tag{7}$$ \n$$ e_{w2B} = \\frac {e_{w2} \\cdot \\text{bias_axis}}{||\\text{bias_axis}||_2^2} *\\text{bias_axis}\n\\tag{8}$$\n\n\n$$e_{w1B}^{corrected} = \\sqrt{ |{1 - ||\\mu_{\\perp} ||^2_2} |} * \\frac{e_{\\text{w1B}} - \\mu_B} {||(e_{w1} - \\mu_{\\perp}) - \\mu_B||} \\tag{9}$$\n\n\n$$e_{w2B}^{corrected} = \\sqrt{ |{1 - ||\\mu_{\\perp} ||^2_2} |} * \\frac{e_{\\text{w2B}} - \\mu_B} {||(e_{w2} - \\mu_{\\perp}) - \\mu_B||} \\tag{10}$$\n\n$$e_1 = e_{w1B}^{corrected} + \\mu_{\\perp} \\tag{11}$$\n$$e_2 = e_{w2B}^{corrected} + \\mu_{\\perp} \\tag{12}$$\n\n\n**Exercise**: Implement the function below. Use the equations above to get the final equalized version of the pair of words. Good luck!", "_____no_output_____" ] ], [ [ "def equalize(pair, bias_axis, word_to_vec_map):\n \"\"\"\n Debias gender specific words by following the equalize method described in the figure above.\n \n Arguments:\n pair -- pair of strings of gender specific words to debias, e.g. (\"actress\", \"actor\") \n bias_axis -- numpy-array of shape (50,), vector corresponding to the bias axis, e.g. gender\n word_to_vec_map -- dictionary mapping words to their corresponding vectors\n \n Returns\n e_1 -- word vector corresponding to the first word\n e_2 -- word vector corresponding to the second word\n \"\"\"\n \n ### START CODE HERE ###\n # Step 1: Select word vector representation of \"word\". Use word_to_vec_map. (≈ 2 lines)\n w1, w2 = pair[0],pair[1]\n e_w1, e_w2 = word_to_vec_map[w1],word_to_vec_map[w2]\n \n # Step 2: Compute the mean of e_w1 and e_w2 (≈ 1 line)\n mu = (e_w1 + e_w2)/2\n\n # Step 3: Compute the projections of mu over the bias axis and the orthogonal axis (≈ 2 lines)\n mu_B = (np.dot(mu,bias_axis)/np.linalg.norm(bias_axis)**2)*bias_axis\n mu_orth = mu-mu_B\n\n # Step 4: Use equations (7) and (8) to compute e_w1B and e_w2B (≈2 lines)\n e_w1B = (np.dot(e_w1,bias_axis)/np.linalg.norm(bias_axis)**2)*bias_axis\n e_w2B = (np.dot(e_w2,bias_axis)/np.linalg.norm(bias_axis)**2)*bias_axis\n \n # Step 5: Adjust the Bias part of e_w1B and e_w2B using the formulas (9) and (10) given above (≈2 lines)\n corrected_e_w1B = np.sqrt(np.abs(1-np.linalg.norm(mu_orth)**2))*((e_w1B - mu_B)/np.abs((e_w1-mu_orth)-mu_B))\n corrected_e_w2B = np.sqrt(np.abs(1-np.linalg.norm(mu_orth)**2))*((e_w2B - mu_B)/np.abs((e_w2-mu_orth)-mu_B))\n\n # Step 6: Debias by equalizing e1 and e2 to the sum of their corrected projections (≈2 lines)\n e1 = corrected_e_w1B + mu_orth\n e2 = corrected_e_w2B + mu_orth\n \n ### END CODE HERE ###\n \n return e1, e2", "_____no_output_____" ], [ "print(\"cosine similarities before equalizing:\")\nprint(\"cosine_similarity(word_to_vec_map[\\\"man\\\"], gender) = \", cosine_similarity(word_to_vec_map[\"man\"], g))\nprint(\"cosine_similarity(word_to_vec_map[\\\"woman\\\"], gender) = \", cosine_similarity(word_to_vec_map[\"woman\"], g))\nprint()\ne1, e2 = equalize((\"man\", \"woman\"), g, word_to_vec_map)\nprint(\"cosine similarities after equalizing:\")\nprint(\"cosine_similarity(e1, gender) = \", cosine_similarity(e1, g))\nprint(\"cosine_similarity(e2, gender) = \", cosine_similarity(e2, g))", "cosine similarities before equalizing:\ncosine_similarity(word_to_vec_map[\"man\"], gender) = -0.117110957653\ncosine_similarity(word_to_vec_map[\"woman\"], gender) = 0.356666188463\n\ncosine similarities after equalizing:\ncosine_similarity(e1, gender) = -0.716572752584\ncosine_similarity(e2, gender) = 0.739659647493\n" ] ], [ [ "**Expected Output**:\n\ncosine similarities before equalizing:\n<table>\n <tr>\n <td>\n **cosine_similarity(word_to_vec_map[\"man\"], gender)** =\n </td>\n <td>\n -0.117110957653\n </td>\n </tr>\n <tr>\n <td>\n **cosine_similarity(word_to_vec_map[\"woman\"], gender)** =\n </td>\n <td>\n 0.356666188463\n </td>\n </tr>\n</table>\n\ncosine similarities after equalizing:\n<table>\n <tr>\n <td>\n **cosine_similarity(u1, gender)** =\n </td>\n <td>\n -0.700436428931\n </td>\n </tr>\n <tr>\n <td>\n **cosine_similarity(u2, gender)** =\n </td>\n <td>\n 0.700436428931\n </td>\n </tr>\n</table>", "_____no_output_____" ], [ "Please feel free to play with the input words in the cell above, to apply equalization to other pairs of words. \n\nThese debiasing algorithms are very helpful for reducing bias, but are not perfect and do not eliminate all traces of bias. For example, one weakness of this implementation was that the bias direction $g$ was defined using only the pair of words _woman_ and _man_. As discussed earlier, if $g$ were defined by computing $g_1 = e_{woman} - e_{man}$; $g_2 = e_{mother} - e_{father}$; $g_3 = e_{girl} - e_{boy}$; and so on and averaging over them, you would obtain a better estimate of the \"gender\" dimension in the 50 dimensional word embedding space. Feel free to play with such variants as well. \n ", "_____no_output_____" ], [ "### Congratulations\n\nYou have come to the end of this notebook, and have seen a lot of the ways that word vectors can be used as well as modified. \n\nCongratulations on finishing this notebook! \n", "_____no_output_____" ], [ "**References**:\n- The debiasing algorithm is from Bolukbasi et al., 2016, [Man is to Computer Programmer as Woman is to\nHomemaker? Debiasing Word Embeddings](https://papers.nips.cc/paper/6228-man-is-to-computer-programmer-as-woman-is-to-homemaker-debiasing-word-embeddings.pdf)\n- The GloVe word embeddings were due to Jeffrey Pennington, Richard Socher, and Christopher D. Manning. (https://nlp.stanford.edu/projects/glove/)\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ] ]
4aa2805ea4e27295094b9d8a9775992322b4cb74
333,912
ipynb
Jupyter Notebook
Projects/Exploratory/UsedCars.ipynb
miku/haw-di-bim-lv22
5e3dd1f7a1eb02ebbe5cc801bd8094618d6525e3
[ "MIT" ]
null
null
null
Projects/Exploratory/UsedCars.ipynb
miku/haw-di-bim-lv22
5e3dd1f7a1eb02ebbe5cc801bd8094618d6525e3
[ "MIT" ]
null
null
null
Projects/Exploratory/UsedCars.ipynb
miku/haw-di-bim-lv22
5e3dd1f7a1eb02ebbe5cc801bd8094618d6525e3
[ "MIT" ]
null
null
null
96.701998
55,556
0.78471
[ [ [ "## 15 Used cars dataset\n\nA short tour through some used car data\n\n* https://data.world/data-society/used-cars-data\n\nThis is a real-world data set with couple of flaws: wrong or missing data, outliers.\n\nIn the process we:\n\n* inspect the data\n* dig into the strange (ugly) parts and clean bad rows\n* run a few aggregations and visualisation", "_____no_output_____" ] ], [ [ "import matplotlib\nmatplotlib.use('Agg')\nimport matplotlib.pyplot as plt\n%matplotlib inline", "_____no_output_____" ], [ "import pandas as pd", "_____no_output_____" ], [ "df = pd.read_csv(\"data/autos.csv\", encoding=\"latin-1\")", "_____no_output_____" ], [ "df.shape", "_____no_output_____" ] ], [ [ "## Inspection\n\n* unique\n* groupby\n* size\n* sort_values", "_____no_output_____" ], [ "Unique Brands", "_____no_output_____" ] ], [ [ "df.brand.unique()", "_____no_output_____" ], [ "df.brand.unique().size", "_____no_output_____" ] ], [ [ "Most popular brands", "_____no_output_____" ] ], [ [ "df.groupby('brand').size().sort_values(ascending=False)", "_____no_output_____" ], [ "df.groupby('brand').size().sort_values().plot(kind='barh', figsize=(8, 6), grid=True)", "_____no_output_____" ], [ "df.head()", "_____no_output_____" ] ], [ [ "## Null checks?\n\n* isnull\n* any\n* head\n* sum", "_____no_output_____" ] ], [ [ "df.isnull().values.any()", "_____no_output_____" ] ], [ [ "Ok, but where?", "_____no_output_____" ] ], [ [ "df[df.isnull().any(axis=1)].head() # 1/3 of rows contain 0 values", "_____no_output_____" ], [ "df[df.isnull().any(axis=1)].shape # about 1/3 of rows contain 0 values", "_____no_output_____" ] ], [ [ "Which columns?", "_____no_output_____" ] ], [ [ "void = df[pd.isnull(df).any(axis=1)].loc[:, df.isna().any()]", "_____no_output_____" ], [ "void.shape", "_____no_output_____" ] ], [ [ "We created a new data frame, see checks.", "_____no_output_____" ] ], [ [ "void.values.base is df.values", "_____no_output_____" ], [ "void._is_view", "_____no_output_____" ] ], [ [ "Counting null values.", "_____no_output_____" ] ], [ [ "void.isnull()", "_____no_output_____" ], [ "void.isnull().sum()", "_____no_output_____" ] ], [ [ "Question: Is there a group brands, that do not have a vehicle type?\n\n", "_____no_output_____" ] ], [ [ "s = df[df.vehicleType.isnull()].groupby('brand').size().sort_values(ascending=False)", "_____no_output_____" ], [ "s.head()", "_____no_output_____" ] ], [ [ "What is the ratio of undefined types for each brand in this dataset?", "_____no_output_____" ] ], [ [ "(s / df.groupby('brand').size()).describe()", "_____no_output_____" ], [ "(s / df.groupby('brand').size()).sort_values(ascending=False).head(10)", "_____no_output_____" ] ], [ [ "Oh right, the GDR built cars, too.", "_____no_output_____" ], [ "Ok, look at the prices.", "_____no_output_____" ] ], [ [ "df.price.describe()", "_____no_output_____" ], [ "# Suppress scientific notation.", "_____no_output_____" ], [ "pd.set_option('display.float_format', lambda x: '%.3f' % x)", "_____no_output_____" ], [ "df.price.describe()", "_____no_output_____" ] ], [ [ "Simple outlier detection.", "_____no_output_____" ] ], [ [ "len(df[df.price > 3 * df.price.mean()])", "_____no_output_____" ], [ "df[df.price > 3 * df.price.mean()].shape", "_____no_output_____" ], [ "df[df.price > 3 * df.price.mean()].describe()", "_____no_output_____" ] ], [ [ "Question: Which used car brands cost 50000 to 60000?", "_____no_output_____" ] ], [ [ "df[(df.price > 50000) & (df.price < 60000)].groupby('brand').size().sort_values(ascending=False)", "_____no_output_____" ] ], [ [ "And which ones cost over 100k?", "_____no_output_____" ] ], [ [ "df[(df.price > 100000)].groupby('brand').size().sort_values(ascending=False)", "_____no_output_____" ] ], [ [ "Getting closer to the outlier?", "_____no_output_____" ] ], [ [ "df[(df.price > 500000)].groupby('brand').size().sort_values(ascending=False)", "_____no_output_____" ] ], [ [ "Let's look at these gems.", "_____no_output_____" ] ], [ [ "df[(df.price > 400000)][[\"name\", \"seller\", \"price\", \"yearOfRegistration\"]]", "_____no_output_____" ] ], [ [ "Let's ignore all rows, where price is silly. Int and string match?", "_____no_output_____" ] ], [ [ "df[(df.price > 400000)].price.dtype", "_____no_output_____" ], [ "df[(df.price > 400000)].price.astype(str).dtype", "_____no_output_____" ] ], [ [ "Exclude some, but maybe all.", "_____no_output_____" ] ], [ [ "df[(df.price > 400000)].price.astype(str).str.match(\"999|111|12345|911911\").sum()", "_____no_output_____" ] ], [ [ "* https://stackoverflow.com/questions/13851535/how-to-delete-rows-from-a-pandas-dataframe-based-on-a-conditional-expression\n\n```\ndf = df.drop(df[df.score < 50].index)\n```", "_____no_output_____" ], [ "Let's drop these things above 500000.", "_____no_output_____" ] ], [ [ "df = df.drop(df[df.price > 500000].index)", "_____no_output_____" ], [ "df = df.drop(df[df.price < 1].index)", "_____no_output_____" ], [ "df.shape", "_____no_output_____" ], [ "df.price.dtype", "_____no_output_____" ], [ "df = df.drop(df[df.yearOfRegistration > 2018].index)", "_____no_output_____" ], [ "df = df.drop(df[df.yearOfRegistration < 1886].index)", "_____no_output_____" ], [ "df.shape", "_____no_output_____" ], [ "df.plot(kind=\"scatter\", x=\"yearOfRegistration\", y=\"price\")", "_____no_output_____" ], [ "df.plot(kind=\"scatter\", x=\"yearOfRegistration\", y=\"price\", alpha=0.5)", "_____no_output_____" ], [ "df.groupby('brand').price.mean().sort_values(ascending=True).plot(kind='barh', figsize=(10, 8), grid=True)", "_____no_output_____" ] ], [ [ "What else?", "_____no_output_____" ] ], [ [ "df.columns", "_____no_output_____" ] ], [ [ "Did PS increase over the years?", "_____no_output_____" ] ], [ [ "df.plot(kind=\"scatter\", x=\"yearOfRegistration\", y=\"powerPS\")", "_____no_output_____" ] ], [ [ "Ok, let's drop some more. 7500 PS?", "_____no_output_____" ] ], [ [ "df[df.powerPS > 500].head()", "_____no_output_____" ], [ "df[df.powerPS > 500].shape", "_____no_output_____" ], [ "df = df.drop(df[df.powerPS > 500].index)", "_____no_output_____" ], [ "df.shape", "_____no_output_____" ], [ "df.plot(kind=\"scatter\", x=\"yearOfRegistration\", y=\"powerPS\", alpha=0.4)", "_____no_output_____" ] ], [ [ "There seems to be some spike in the 1970? Some breakthrough?", "_____no_output_____" ], [ "What else?", "_____no_output_____" ] ], [ [ "df.columns", "_____no_output_____" ] ], [ [ "Price categories for vehicle types?", "_____no_output_____" ] ], [ [ "df.groupby('vehicleType').price.mean().sort_values().plot(kind='barh')", "_____no_output_____" ], [ "df.groupby('vehicleType').price.median().sort_values().plot(kind='barh')", "_____no_output_____" ] ], [ [ "What else?", "_____no_output_____" ] ], [ [ "df.columns", "_____no_output_____" ] ], [ [ "Last question: Fuel type.", "_____no_output_____" ] ], [ [ "df.fuelType.unique()", "_____no_output_____" ], [ "df.groupby('fuelType').price.mean().sort_values().plot(kind='bar')", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
4aa28141b8ee19d04c9efe9123661f979e7725d5
7,407
ipynb
Jupyter Notebook
pytorch1.x/modules/linear/01_LogisticRegression_2.ipynb
kingreatwill/penter
2d027fd2ae639ac45149659a410042fe76b9dab0
[ "MIT" ]
13
2020-01-04T07:37:38.000Z
2021-08-31T05:19:58.000Z
pytorch1.x/modules/linear/01_LogisticRegression_2.ipynb
kingreatwill/penter
2d027fd2ae639ac45149659a410042fe76b9dab0
[ "MIT" ]
3
2020-06-05T22:42:53.000Z
2020-08-24T07:18:54.000Z
pytorch1.x/modules/linear/01_LogisticRegression_2.ipynb
kingreatwill/penter
2d027fd2ae639ac45149659a410042fe76b9dab0
[ "MIT" ]
9
2020-10-19T04:53:06.000Z
2021-08-31T05:20:01.000Z
33.215247
116
0.414878
[ [ [ "import torch as t\nimport torchvision as tv\nimport numpy as np\nimport time", "_____no_output_____" ] ], [ [ "# 不是逻辑回归", "_____no_output_____" ] ], [ [ "# 超参数\nEPOCH = 5\nBATCH_SIZE = 100\nDOWNLOAD_MNIST = True # 下过数据的话, 就可以设置成 False\nN_TEST_IMG = 10 # 到时候显示 5张图片看效果, 如上图一\n\n\n\nclass DNN(t.nn.Module):\n def __init__(self):\n super(DNN, self).__init__()\n\n train_data = tv.datasets.FashionMNIST(\n root=\"./fashionmnist/\",\n train=True,\n transform=tv.transforms.ToTensor(),\n download=DOWNLOAD_MNIST\n )\n\n test_data = tv.datasets.FashionMNIST(\n root=\"./fashionmnist/\",\n train=False,\n transform=tv.transforms.ToTensor(),\n download=DOWNLOAD_MNIST\n )\n\n print(test_data)\n\n\n # Data Loader for easy mini-batch return in training, the image batch shape will be (50, 1, 28, 28)\n self.train_loader = t.utils.data.DataLoader(\n dataset=train_data,\n batch_size=BATCH_SIZE,\n shuffle=True)\n\n self.test_loader = t.utils.data.DataLoader(\n dataset=test_data,\n batch_size=1000,\n shuffle=True)\n\n self.cnn = t.nn.Sequential(\n t.nn.Conv2d(\n in_channels=1, # input height\n out_channels=32, # n_filters\n kernel_size=5, # filter size\n stride=1, # filter movement/step\n padding=2, # 如果想要 con2d 出来的图片长宽没有变化, padding=(kernel_size-1)/2 当 stride=1\n ), # output shape (16, 28, 28)\n t.nn.ELU(), # activation\n t.nn.MaxPool2d(kernel_size=2),\n\n t.nn.Conv2d(\n in_channels=32, # input height\n out_channels=64, # n_filters\n kernel_size=3, # filter size\n stride=1, # filter movement/step\n padding=1, # 如果想要 con2d 出来的图片长宽没有变化, padding=(kernel_size-1)/2 当 stride=1\n ), # output shape (64, 14, 14)\n t.nn.ELU(), # activation\n t.nn.MaxPool2d(kernel_size=2) # output shape (64, 7, 7)\n )\n\n self.dnn = t.nn.Sequential(\n t.nn.Linear(7*7*64,256),\n t.nn.Dropout(0.5),\n t.nn.ELU(),\n t.nn.Linear(256,10),\n )\n\n self.lr = 0.001\n self.loss = t.nn.CrossEntropyLoss()\n self.opt = t.optim.Adam(self.parameters(), lr = self.lr)\n\n def forward(self,x):\n cnn1 = self.cnn(x)\n #print(cnn1.shape)\n cnn1 = cnn1.view(-1,7*7*64)\n #print(cnn1.shape)\n out = self.dnn(cnn1)\n #print(out.shape)\n return(out)\n\ndef train():\n use_gpu = t.cuda.is_available()\n model = DNN()\n if(use_gpu):\n model.cuda()\n print(model)\n loss = model.loss\n opt = model.opt\n dataloader = model.train_loader\n testloader = model.test_loader\n\n\n for e in range(EPOCH):\n step = 0\n ts = time.time()\n for (x, y) in (dataloader):\n\n\n model.train()# train model dropout used\n step += 1\n b_x = x.view(-1,1,28,28) # batch x, shape (batch, 28*28)\n #print(b_x.shape)\n b_y = y\n if(use_gpu):\n b_x = b_x.cuda()\n b_y = b_y.cuda()\n out = model(b_x)\n losses = loss(out,b_y)\n opt.zero_grad()\n losses.backward()\n opt.step()\n if(step%100 == 0):\n if(use_gpu):\n print(e,step,losses.data.cpu().numpy())\n else:\n print(e,step,losses.data.numpy())\n\n model.eval() # train model dropout not use\n for (tx,ty) in testloader:\n t_x = tx.view(-1,1, 28,28) # batch x, shape (batch, 28*28)\n t_y = ty\n if(use_gpu):\n t_x = t_x.cuda()\n t_y = t_y.cuda()\n t_out = model(t_x)\n if(use_gpu):\n acc = (np.argmax(t_out.data.cpu().numpy(),axis=1) == t_y.data.cpu().numpy())\n else:\n acc = (np.argmax(t_out.data.numpy(),axis=1) == t_y.data.numpy())\n\n print(time.time() - ts ,np.sum(acc)/1000)\n ts = time.time()\n break#只测试前1000个\n\n\n\n t.save(model, './model.pkl') # 保存整个网络\n t.save(model.state_dict(), './model_params.pkl') # 只保存网络中的参数 (速度快, 占内存少)\n #加载参数的方式\n \"\"\"net = DNN()\n net.load_state_dict(t.load('./model_params.pkl'))\n net.eval()\"\"\"\n #加载整个模型的方式\n net = t.load('./model.pkl')\n net.cpu()\n net.eval()\n for (tx,ty) in testloader:\n t_x = tx.view(-1, 1,28,28) # batch x, shape (batch, 28*28)\n t_y = ty\n\n t_out = net(t_x)\n #acc = (np.argmax(t_out.data.CPU().numpy(),axis=1) == t_y.data.CPU().numpy())\n acc = (np.argmax(t_out.data.numpy(),axis=1) == t_y.data.numpy())\n\n print(np.sum(acc)/1000)\n\ntrain()", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code" ] ]
4aa288f26e4977c8e7a8fcdbabb407fbf8f4ade0
400,938
ipynb
Jupyter Notebook
communities/Exporting Data from CAS using Python.ipynb
c964309085/sas-viya-programming
0332ddc3670bfe9043d1fc9d41e346f5eae59994
[ "Apache-2.0" ]
128
2016-07-09T16:44:31.000Z
2022-03-31T22:01:35.000Z
communities/Exporting Data from CAS using Python.ipynb
KonuTech/sas-viya-programming
be306061d0567741dd7da7084c959d5b1c3b3716
[ "Apache-2.0" ]
14
2017-01-06T14:02:17.000Z
2022-02-10T02:28:41.000Z
communities/Exporting Data from CAS using Python.ipynb
KonuTech/sas-viya-programming
be306061d0567741dd7da7084c959d5b1c3b3716
[ "Apache-2.0" ]
141
2016-07-08T19:52:16.000Z
2022-03-01T03:30:33.000Z
44.912961
492
0.342986
[ [ [ "# Exporting Data from CAS using Python\n\nWhile the **save** action can export data to many formats and data sources, there are also ways of easily converting CAS table data to formats on the client as well. Keep in mind though that while you can export large data sets on the server, you may not want to attempt to bring tens of gigabytes of data down to the client using these methods.\n\nWhile you can always use the **fetch** action to get the data from a CAS table, you might just want to export the data to a file. To make this easier, the CASTable objects support the same **to_XXX** methods as Pandas DataFrames. This includes **to_csv**, **to_dict**, **to_excel**, **to_html**, and others. Behind the scenes, the **fetch** action is called and the resulting DataFrame is exported to the file corresponding to the export method used. Let's look at some examples.\n\nFirst we need a connection to the server.", "_____no_output_____" ] ], [ [ "import swat\n\nconn = swat.CAS(host, port, username, password)", "_____no_output_____" ] ], [ [ "For purposes of this example, we will load some data into the server to work with. You may already have tables in your server that you can use.", "_____no_output_____" ] ], [ [ "tbl = conn.read_csv('https://raw.githubusercontent.com/sassoftware/sas-viya-programming/master/data/cars.csv')\ntbl", "_____no_output_____" ], [ "tbl.head()", "_____no_output_____" ] ], [ [ "Now that we have a CASTable object to work with, we can export the data from the CAS table that it references to a local file. We'll start with CSV. The **to_csv** method will return a string of CSV data if you don't specify a filename. We'll do it that way in the following code.", "_____no_output_____" ] ], [ [ "print(tbl.to_csv())", ",Make,Model,Type,Origin,DriveTrain,MSRP,Invoice,EngineSize,Cylinders,Horsepower,MPG_City,MPG_Highway,Weight,Wheelbase,Length\n0,Acura,MDX,SUV,Asia,All,36945.0,33337.0,3.5,6.0,265.0,17.0,23.0,4451.0,106.0,189.0\n1,Acura,3.5 RL 4dr,Sedan,Asia,Front,43755.0,39014.0,3.5,6.0,225.0,18.0,24.0,3880.0,115.0,197.0\n2,Audi,A41.8T convertible 2dr,Sedan,Europe,Front,35940.0,32506.0,1.8,4.0,170.0,23.0,30.0,3638.0,105.0,180.0\n3,Audi,A6 3.0 4dr,Sedan,Europe,Front,36640.0,33129.0,3.0,6.0,220.0,20.0,27.0,3561.0,109.0,192.0\n4,Audi,A6 2.7 Turbo Quattro 4dr,Sedan,Europe,All,42840.0,38840.0,2.7,6.0,250.0,18.0,25.0,3836.0,109.0,192.0\n5,Audi,RS 6 4dr,Sports,Europe,Front,84600.0,76417.0,4.2,8.0,450.0,15.0,22.0,4024.0,109.0,191.0\n6,Audi,A6 3.0 Avant Quattro,Wagon,Europe,All,40840.0,37060.0,3.0,6.0,220.0,18.0,25.0,4035.0,109.0,192.0\n7,BMW,325i 4dr,Sedan,Europe,Rear,28495.0,26155.0,2.5,6.0,184.0,20.0,29.0,3219.0,107.0,176.0\n8,BMW,330i 4dr,Sedan,Europe,Rear,35495.0,32525.0,3.0,6.0,225.0,20.0,30.0,3285.0,107.0,176.0\n9,BMW,330Ci convertible 2dr,Sedan,Europe,Rear,44295.0,40530.0,3.0,6.0,225.0,19.0,28.0,3616.0,107.0,177.0\n10,BMW,745Li 4dr,Sedan,Europe,Rear,73195.0,66830.0,4.4,8.0,325.0,18.0,26.0,4464.0,123.0,204.0\n11,BMW,Z4 convertible 3.0i 2dr,Sports,Europe,Rear,41045.0,37575.0,3.0,6.0,225.0,21.0,29.0,2998.0,98.0,161.0\n12,Buick,Century Custom 4dr,Sedan,USA,Front,22180.0,20351.0,3.1,6.0,175.0,20.0,30.0,3353.0,109.0,195.0\n13,Buick,LeSabre Limited 4dr,Sedan,USA,Front,32245.0,29566.0,3.8,6.0,205.0,20.0,29.0,3591.0,112.0,200.0\n14,Cadillac,SRX V8,SUV,USA,Front,46995.0,43523.0,4.6,8.0,320.0,16.0,21.0,4302.0,116.0,195.0\n15,Cadillac,Seville SLS 4dr,Sedan,USA,Front,47955.0,43841.0,4.6,8.0,275.0,18.0,26.0,3992.0,112.0,201.0\n16,Chevrolet,Tahoe LT,SUV,USA,All,41465.0,36287.0,5.3,8.0,295.0,14.0,18.0,5050.0,116.0,197.0\n17,Chevrolet,Aveo LS 4dr hatch,Sedan,USA,Front,12585.0,11802.0,1.6,4.0,103.0,28.0,34.0,2348.0,98.0,153.0\n18,Chevrolet,Impala 4dr,Sedan,USA,Front,21900.0,20095.0,3.4,6.0,180.0,21.0,32.0,3465.0,111.0,200.0\n19,Chevrolet,Impala LS 4dr,Sedan,USA,Front,25000.0,22931.0,3.8,6.0,200.0,20.0,30.0,3476.0,111.0,200.0\n20,Chevrolet,Astro,Sedan,USA,All,26395.0,23954.0,4.3,6.0,190.0,14.0,17.0,4605.0,111.0,190.0\n21,Chevrolet,Avalanche 1500,Truck,USA,All,36100.0,31689.0,5.3,8.0,295.0,14.0,18.0,5678.0,130.0,222.0\n22,Chevrolet,SSR,Truck,USA,Rear,41995.0,39306.0,5.3,8.0,300.0,16.0,19.0,4760.0,116.0,191.0\n23,Chrysler,Sebring 4dr,Sedan,USA,Front,19090.0,17805.0,2.4,4.0,150.0,22.0,30.0,3173.0,108.0,191.0\n24,Chrysler,Concorde LXi 4dr,Sedan,USA,Front,26860.0,24909.0,3.5,6.0,232.0,19.0,27.0,3548.0,113.0,208.0\n25,Chrysler,Sebring Limited convertible 2dr,Sedan,USA,Front,30950.0,28613.0,2.7,6.0,200.0,21.0,28.0,3448.0,106.0,194.0\n26,Chrysler,Pacifica,Wagon,USA,Rear,31230.0,28725.0,3.5,6.0,250.0,17.0,23.0,4675.0,116.0,199.0\n27,Dodge,Intrepid SE 4dr,Sedan,USA,Front,22035.0,20502.0,2.7,6.0,200.0,21.0,29.0,3469.0,113.0,204.0\n28,Dodge,Caravan SE,Sedan,USA,Front,21795.0,20508.0,2.4,4.0,150.0,20.0,26.0,3862.0,113.0,189.0\n29,Dodge,Dakota Club Cab,Truck,USA,Rear,20300.0,18670.0,3.7,6.0,210.0,16.0,22.0,3829.0,131.0,219.0\n30,Ford,Explorer XLT V6,SUV,USA,All,29670.0,26983.0,4.0,6.0,210.0,15.0,20.0,4463.0,114.0,190.0\n31,Ford,Focus SE 4dr,Sedan,USA,Front,15460.0,14496.0,2.0,4.0,130.0,26.0,33.0,2606.0,103.0,168.0\n32,Ford,Taurus SES Duratec 4dr,Sedan,USA,Front,22735.0,20857.0,3.0,6.0,201.0,19.0,26.0,3313.0,109.0,198.0\n33,Ford,Freestar SE,Sedan,USA,Front,26930.0,24498.0,3.9,6.0,193.0,17.0,23.0,4275.0,121.0,201.0\n34,Ford,F-150 Regular Cab XL,Truck,USA,Rear,22010.0,19490.0,4.6,8.0,231.0,15.0,19.0,4788.0,126.0,211.0\n35,Ford,Taurus SE,Wagon,USA,Front,22290.0,20457.0,3.0,6.0,155.0,19.0,26.0,3497.0,109.0,198.0\n36,GMC,Safari SLE,Sedan,USA,Rear,25640.0,23215.0,4.3,6.0,190.0,16.0,20.0,4309.0,111.0,190.0\n37,GMC,Sonoma Crew Cab,Truck,USA,All,25395.0,23043.0,4.3,6.0,190.0,15.0,19.0,4083.0,123.0,208.0\n38,Honda,CR-V LX,SUV,Asia,All,19860.0,18419.0,2.4,4.0,160.0,21.0,25.0,3258.0,103.0,179.0\n39,Honda,Civic LX 4dr,Sedan,Asia,Front,15850.0,14531.0,1.7,4.0,115.0,32.0,38.0,2513.0,103.0,175.0\n40,Honda,Civic Si 2dr hatch,Sedan,Asia,Front,19490.0,17849.0,2.0,4.0,160.0,26.0,30.0,2782.0,101.0,166.0\n41,Honda,Odyssey EX,Sedan,Asia,Front,27450.0,24744.0,3.5,6.0,240.0,18.0,25.0,4365.0,118.0,201.0\n42,Hyundai,Accent 2dr hatch,Sedan,Asia,Front,10539.0,10107.0,1.6,4.0,103.0,29.0,33.0,2255.0,96.0,167.0\n43,Hyundai,Elantra GT 4dr,Sedan,Asia,Front,15389.0,14207.0,2.0,4.0,138.0,26.0,34.0,2635.0,103.0,178.0\n44,Hyundai,XG350 4dr,Sedan,Asia,Front,24589.0,22055.0,3.5,6.0,194.0,17.0,26.0,3651.0,108.0,192.0\n45,Infiniti,G35 Sport Coupe 2dr,Sedan,Asia,Rear,29795.0,27536.0,3.5,6.0,280.0,18.0,26.0,3416.0,112.0,182.0\n46,Infiniti,Q45 Luxury 4dr,Sedan,Asia,Rear,52545.0,47575.0,4.5,8.0,340.0,17.0,23.0,3977.0,113.0,200.0\n47,Isuzu,Rodeo S,SUV,Asia,Front,20449.0,19261.0,3.2,6.0,193.0,17.0,21.0,3836.0,106.0,178.0\n48,Jaguar,S-Type 4.2 4dr,Sedan,Europe,Rear,49995.0,45556.0,4.2,8.0,294.0,18.0,28.0,3874.0,115.0,192.0\n49,Jaguar,XJR 4dr,Sedan,Europe,Rear,74995.0,68306.0,4.2,8.0,390.0,17.0,24.0,3948.0,119.0,200.0\n50,Jaguar,XKR convertible 2dr,Sports,Europe,Rear,86995.0,79226.0,4.2,8.0,390.0,16.0,23.0,4042.0,102.0,187.0\n51,Kia,Sorento LX,SUV,Asia,Front,19635.0,18630.0,3.5,6.0,192.0,16.0,19.0,4112.0,107.0,180.0\n52,Kia,Spectra 4dr,Sedan,Asia,Front,12360.0,11630.0,1.8,4.0,124.0,24.0,32.0,2661.0,101.0,178.0\n53,Kia,Amanti 4dr,Sedan,Asia,Front,26000.0,23764.0,3.5,6.0,195.0,17.0,25.0,4021.0,110.0,196.0\n54,Land Rover,Discovery SE,SUV,Europe,All,39250.0,35777.0,4.6,8.0,217.0,12.0,16.0,4576.0,100.0,185.0\n55,Lexus,RX 330,SUV,Asia,All,39195.0,34576.0,3.3,6.0,230.0,18.0,24.0,4065.0,107.0,186.0\n56,Lexus,GS 300 4dr,Sedan,Asia,Rear,41010.0,36196.0,3.0,6.0,220.0,18.0,25.0,3649.0,110.0,189.0\n57,Lexus,IS 300 SportCross,Wagon,Asia,Rear,32455.0,28647.0,3.0,6.0,215.0,18.0,24.0,3410.0,105.0,177.0\n58,Lincoln,LS V6 Premium 4dr,Sedan,USA,Rear,36895.0,33929.0,3.0,6.0,232.0,20.0,26.0,3681.0,115.0,194.0\n59,Lincoln,Town Car Ultimate 4dr,Sedan,USA,Rear,44925.0,41217.0,4.6,8.0,239.0,17.0,25.0,4369.0,118.0,215.0\n60,Mazda,Tribute DX 2.0,SUV,Asia,All,21087.0,19742.0,2.0,4.0,130.0,22.0,25.0,3091.0,103.0,173.0\n61,Mazda,MPV ES,Sedan,Asia,Front,28750.0,26600.0,3.0,6.0,200.0,18.0,25.0,3812.0,112.0,188.0\n62,Mazda,RX-8 4dr manual,Sports,Asia,Rear,27200.0,25179.0,1.3,,238.0,18.0,24.0,3029.0,106.0,174.0\n63,Mercedes-Benz,ML500,SUV,Europe,All,46470.0,43268.0,5.0,8.0,288.0,14.0,17.0,4874.0,111.0,183.0\n64,Mercedes-Benz,C240 4dr,Sedan,Europe,All,33480.0,31187.0,2.6,6.0,168.0,19.0,25.0,3360.0,107.0,178.0\n65,Mercedes-Benz,C32 AMG 4dr,Sedan,Europe,Rear,52120.0,48522.0,3.2,6.0,349.0,16.0,21.0,3540.0,107.0,178.0\n66,Mercedes-Benz,CLK500 coupe 2dr (convertible),Sedan,Europe,Rear,52800.0,49104.0,5.0,8.0,302.0,17.0,22.0,3585.0,107.0,183.0\n67,Mercedes-Benz,S500 4dr,Sedan,Europe,All,86970.0,80939.0,5.0,8.0,302.0,16.0,24.0,4390.0,122.0,203.0\n68,Mercedes-Benz,SLK230 convertible 2dr,Sports,Europe,Rear,40320.0,37548.0,2.3,4.0,192.0,21.0,29.0,3055.0,95.0,158.0\n69,Mercedes-Benz,E500,Wagon,Europe,All,60670.0,56474.0,5.0,8.0,302.0,16.0,24.0,4230.0,112.0,190.0\n70,Mercury,Grand Marquis LS Premium 4dr,Sedan,USA,Rear,29595.0,27148.0,4.6,8.0,224.0,17.0,25.0,4052.0,115.0,212.0\n71,Mercury,Monterey Luxury,Sedan,USA,Front,33995.0,30846.0,4.2,6.0,201.0,16.0,23.0,4340.0,121.0,202.0\n72,Mitsubishi,Outlander LS,SUV,Asia,Front,18892.0,17569.0,2.4,4.0,160.0,21.0,27.0,3240.0,103.0,179.0\n73,Mitsubishi,Lancer OZ Rally 4dr auto,Sedan,Asia,Front,17232.0,16196.0,2.0,4.0,120.0,25.0,31.0,2744.0,102.0,181.0\n74,Mitsubishi,Eclipse Spyder GT convertible 2dr,Sports,Asia,Front,26992.0,25218.0,3.0,6.0,210.0,21.0,28.0,3296.0,101.0,177.0\n75,Nissan,Pathfinder SE,SUV,Asia,Front,27339.0,25972.0,3.5,6.0,240.0,16.0,21.0,3871.0,106.0,183.0\n76,Nissan,Altima S 4dr,Sedan,Asia,Front,19240.0,18030.0,2.5,4.0,175.0,21.0,26.0,3039.0,110.0,192.0\n77,Nissan,Maxima SL 4dr,Sedan,Asia,Front,29440.0,26966.0,3.5,6.0,265.0,20.0,28.0,3476.0,111.0,194.0\n78,Nissan,350Z Enthusiast convertible 2dr,Sports,Asia,Rear,34390.0,31845.0,3.5,6.0,287.0,20.0,26.0,3428.0,104.0,169.0\n79,Oldsmobile,Alero GX 2dr,Sedan,USA,Front,18825.0,17642.0,2.2,4.0,140.0,24.0,32.0,2946.0,107.0,187.0\n80,Pontiac,Sunfire 1SA 2dr,Sedan,USA,Front,15495.0,14375.0,2.2,4.0,140.0,24.0,33.0,2771.0,104.0,182.0\n81,Pontiac,Grand Prix GT2 4dr,Sedan,USA,Front,24295.0,22284.0,3.8,6.0,200.0,20.0,30.0,3484.0,111.0,198.0\n82,Pontiac,GTO 2dr,Sports,USA,Rear,33500.0,30710.0,5.7,8.0,340.0,16.0,20.0,3725.0,110.0,190.0\n83,Porsche,911 Carrera 4S coupe 2dr (convert),Sports,Europe,All,84165.0,72206.0,3.6,6.0,315.0,17.0,24.0,3240.0,93.0,175.0\n84,Porsche,Boxster S convertible 2dr,Sports,Europe,Rear,52365.0,45766.0,3.2,6.0,258.0,18.0,26.0,2911.0,95.0,170.0\n85,Saab,9-5 Aero 4dr,Sedan,Europe,Front,39465.0,37721.0,2.3,4.0,250.0,21.0,29.0,3470.0,106.0,190.0\n86,Saturn,VUE,SUV,USA,All,20585.0,19238.0,2.2,4.0,143.0,21.0,26.0,3381.0,107.0,181.0\n87,Saturn,lon2 quad coupe 2dr,Sedan,USA,Front,14850.0,13904.0,2.2,4.0,140.0,26.0,35.0,2751.0,103.0,185.0\n88,Scion,xA 4dr hatch,Sedan,Asia,Front,12965.0,12340.0,1.5,4.0,108.0,32.0,38.0,2340.0,93.0,154.0\n89,Subaru,Legacy GT 4dr,Sedan,Asia,All,25645.0,23336.0,2.5,4.0,165.0,21.0,28.0,3395.0,104.0,184.0\n90,Subaru,Impreza WRX 4dr,Sports,Asia,All,25045.0,23022.0,2.0,4.0,227.0,20.0,27.0,3085.0,99.0,174.0\n91,Subaru,Outback,Wagon,Asia,All,23895.0,21773.0,2.5,4.0,165.0,21.0,28.0,3430.0,104.0,187.0\n92,Suzuki,Aerio LX 4dr,Sedan,Asia,Front,14500.0,14317.0,2.3,4.0,155.0,25.0,31.0,2676.0,98.0,171.0\n93,Suzuki,Aerio SX,Wagon,Asia,All,16497.0,16291.0,2.3,4.0,155.0,24.0,29.0,2932.0,98.0,167.0\n94,Toyota,Highlander V6,SUV,Asia,All,27930.0,24915.0,3.3,6.0,230.0,18.0,24.0,3935.0,107.0,185.0\n95,Toyota,Corolla S 4dr,Sedan,Asia,Front,15030.0,13650.0,1.8,4.0,130.0,32.0,40.0,2524.0,102.0,178.0\n96,Toyota,Echo 4dr,Sedan,Asia,Front,11290.0,10642.0,1.5,4.0,108.0,35.0,43.0,2055.0,93.0,163.0\n97,Toyota,Camry Solara SE V6 2dr,Sedan,Asia,Front,21965.0,19819.0,3.3,6.0,225.0,20.0,29.0,3417.0,107.0,193.0\n98,Toyota,Avalon XLS 4dr,Sedan,Asia,Front,30920.0,27271.0,3.0,6.0,210.0,21.0,29.0,3439.0,107.0,192.0\n99,Toyota,MR2 Spyder convertible 2dr,Sports,Asia,Rear,25130.0,22787.0,1.8,4.0,138.0,26.0,32.0,2195.0,97.0,153.0\n100,Toyota,Matrix XR,Wagon,Asia,Front,16695.0,15156.0,1.8,4.0,130.0,29.0,36.0,2679.0,102.0,171.0\n101,Volkswagen,Jetta GLS TDI 4dr,Sedan,Europe,Front,21055.0,19638.0,1.9,4.0,100.0,38.0,46.0,3003.0,99.0,172.0\n102,Volkswagen,Passat GLS 4dr,Sedan,Europe,Front,23955.0,21898.0,1.8,4.0,170.0,22.0,31.0,3241.0,106.0,185.0\n103,Volkswagen,Phaeton W12 4dr,Sedan,Europe,Front,75000.0,69130.0,6.0,12.0,420.0,12.0,19.0,5399.0,118.0,204.0\n104,Volvo,XC90 T6,SUV,Europe,All,41250.0,38851.0,2.9,6.0,268.0,15.0,20.0,4638.0,113.0,189.0\n105,Volvo,S60 R 4dr,Sedan,Europe,All,37560.0,35382.0,2.5,5.0,300.0,18.0,25.0,3571.0,107.0,181.0\n106,Volvo,C70 HPT convertible 2dr,Sedan,Europe,Front,42565.0,40083.0,2.3,5.0,242.0,20.0,26.0,3450.0,105.0,186.0\n107,Acura,RSX Type S 2dr,Sedan,Asia,Front,23820.0,21761.0,2.0,4.0,200.0,24.0,31.0,2778.0,101.0,172.0\n108,Acura,3.5 RL w/Navigation 4dr,Sedan,Asia,Front,46100.0,41100.0,3.5,6.0,225.0,18.0,24.0,3893.0,115.0,197.0\n109,Audi,A4 3.0 4dr,Sedan,Europe,Front,31840.0,28846.0,3.0,6.0,220.0,20.0,28.0,3462.0,104.0,179.0\n110,Audi,A6 3.0 Quattro 4dr,Sedan,Europe,All,39640.0,35992.0,3.0,6.0,220.0,18.0,25.0,3880.0,109.0,192.0\n111,Audi,A6 4.2 Quattro 4dr,Sedan,Europe,All,49690.0,44936.0,4.2,8.0,300.0,17.0,24.0,4024.0,109.0,193.0\n112,Audi,TT 1.8 convertible 2dr (coupe),Sports,Europe,Front,35940.0,32512.0,1.8,4.0,180.0,20.0,28.0,3131.0,95.0,159.0\n113,Audi,S4 Avant Quattro,Wagon,Europe,All,49090.0,44446.0,4.2,8.0,340.0,15.0,21.0,3936.0,104.0,179.0\n114,BMW,325Ci 2dr,Sedan,Europe,Rear,30795.0,28245.0,2.5,6.0,184.0,20.0,29.0,3197.0,107.0,177.0\n115,BMW,330Ci 2dr,Sedan,Europe,Rear,36995.0,33890.0,3.0,6.0,225.0,20.0,30.0,3285.0,107.0,176.0\n116,BMW,530i 4dr,Sedan,Europe,Rear,44995.0,41170.0,3.0,6.0,225.0,20.0,30.0,3472.0,114.0,191.0\n117,BMW,M3 coupe 2dr,Sports,Europe,Rear,48195.0,44170.0,3.2,6.0,333.0,16.0,24.0,3415.0,108.0,177.0\n118,BMW,325xi Sport,Wagon,Europe,All,32845.0,30110.0,2.5,6.0,184.0,19.0,26.0,3594.0,107.0,176.0\n119,Buick,LeSabre Custom 4dr,Sedan,USA,Front,26470.0,24282.0,3.8,6.0,205.0,20.0,29.0,3567.0,112.0,200.0\n120,Buick,Park Avenue 4dr,Sedan,USA,Front,35545.0,32244.0,3.8,6.0,205.0,20.0,29.0,3778.0,114.0,207.0\n121,Cadillac,CTS VVT 4dr,Sedan,USA,Rear,30835.0,28575.0,3.6,6.0,255.0,18.0,25.0,3694.0,113.0,190.0\n122,Cadillac,XLR convertible 2dr,Sports,USA,Rear,76200.0,70546.0,4.6,8.0,320.0,17.0,25.0,3647.0,106.0,178.0\n123,Chevrolet,TrailBlazer LT,SUV,USA,Front,30295.0,27479.0,4.2,6.0,275.0,16.0,21.0,4425.0,113.0,192.0\n124,Chevrolet,Cavalier 2dr,Sedan,USA,Front,14610.0,13697.0,2.2,4.0,140.0,26.0,37.0,2617.0,104.0,183.0\n125,Chevrolet,Malibu 4dr,Sedan,USA,Front,18995.0,17434.0,2.2,4.0,145.0,24.0,34.0,3174.0,106.0,188.0\n126,Chevrolet,Impala SS 4dr,Sedan,USA,Front,27995.0,25672.0,3.8,6.0,240.0,18.0,28.0,3606.0,111.0,200.0\n127,Chevrolet,Venture LS,Sedan,USA,Front,27020.0,24518.0,3.4,6.0,185.0,19.0,26.0,3699.0,112.0,187.0\n128,Chevrolet,Colorado Z85,Truck,USA,All,18760.0,17070.0,2.8,4.0,175.0,18.0,23.0,3623.0,111.0,192.0\n129,Chevrolet,Malibu Maxx LS,Wagon,USA,Front,22225.0,20394.0,3.5,6.0,200.0,22.0,30.0,3458.0,112.0,188.0\n130,Chrysler,Sebring Touring 4dr,Sedan,USA,Front,21840.0,20284.0,2.7,6.0,200.0,21.0,28.0,3222.0,108.0,191.0\n131,Chrysler,PT Cruiser GT 4dr,Sedan,USA,Front,25955.0,24172.0,2.4,4.0,220.0,21.0,27.0,3217.0,103.0,169.0\n132,Chrysler,Town and Country LX,Sedan,USA,Front,27490.0,25371.0,3.3,6.0,180.0,19.0,26.0,4068.0,119.0,201.0\n133,Dodge,Durango SLT,SUV,USA,All,32235.0,29472.0,4.7,8.0,230.0,15.0,21.0,4987.0,119.0,201.0\n134,Dodge,Stratus SXT 4dr,Sedan,USA,Front,18820.0,17512.0,2.4,4.0,150.0,21.0,28.0,3182.0,108.0,191.0\n135,Dodge,Grand Caravan SXT,Sedan,USA,All,32660.0,29812.0,3.8,6.0,215.0,18.0,25.0,4440.0,119.0,201.0\n136,Dodge,Ram 1500 Regular Cab ST,Truck,USA,Rear,20215.0,18076.0,3.7,6.0,215.0,16.0,21.0,4542.0,121.0,208.0\n137,Ford,Escape XLS,SUV,USA,All,22515.0,20907.0,3.0,6.0,201.0,18.0,23.0,3346.0,103.0,173.0\n138,Ford,Focus ZX5 5dr,Sedan,USA,Front,15580.0,14607.0,2.0,4.0,130.0,26.0,33.0,2691.0,103.0,168.0\n139,Ford,Crown Victoria 4dr,Sedan,USA,Rear,24345.0,22856.0,4.6,8.0,224.0,17.0,25.0,4057.0,115.0,212.0\n140,Ford,Mustang 2dr (convertible),Sports,USA,Rear,18345.0,16943.0,3.8,6.0,193.0,20.0,29.0,3290.0,101.0,183.0\n141,Ford,F-150 Supercab Lariat,Truck,USA,All,33540.0,29405.0,5.4,8.0,300.0,14.0,18.0,5464.0,133.0,218.0\n142,GMC,Envoy XUV SLE,SUV,USA,Front,31890.0,28922.0,4.2,6.0,275.0,15.0,19.0,4945.0,129.0,208.0\n143,GMC,Canyon Z85 SL Regular Cab,Truck,USA,Rear,16530.0,14877.0,2.8,4.0,175.0,18.0,25.0,3351.0,111.0,192.0\n144,Honda,Civic Hybrid 4dr manual (gas/electric),Hybrid,Asia,Front,20140.0,18451.0,1.4,4.0,93.0,46.0,51.0,2732.0,103.0,175.0\n145,Honda,Element LX,SUV,Asia,All,18690.0,17334.0,2.4,4.0,160.0,21.0,24.0,3468.0,101.0,167.0\n146,Honda,Accord LX 2dr,Sedan,Asia,Front,19860.0,17924.0,2.4,4.0,160.0,26.0,34.0,2994.0,105.0,188.0\n147,Honda,Accord LX V6 4dr,Sedan,Asia,Front,23760.0,21428.0,3.0,6.0,240.0,21.0,30.0,3349.0,108.0,190.0\n148,Honda,S2000 convertible 2dr,Sports,Asia,Rear,33260.0,29965.0,2.2,4.0,240.0,20.0,25.0,2835.0,95.0,162.0\n149,Hyundai,Accent GL 4dr,Sedan,Asia,Front,11839.0,11116.0,1.6,4.0,103.0,29.0,33.0,2290.0,96.0,167.0\n150,Hyundai,Elantra GT 4dr hatch,Sedan,Asia,Front,15389.0,14207.0,2.0,4.0,138.0,26.0,34.0,2698.0,103.0,178.0\n151,Hyundai,XG350 L 4dr,Sedan,Asia,Front,26189.0,23486.0,3.5,6.0,194.0,17.0,26.0,3651.0,108.0,192.0\n152,Infiniti,G35 4dr,Sedan,Asia,All,32445.0,29783.0,3.5,6.0,260.0,18.0,26.0,3677.0,112.0,187.0\n153,Infiniti,FX35,Wagon,Asia,Rear,34895.0,31756.0,3.5,6.0,280.0,16.0,22.0,4056.0,112.0,189.0\n154,Jaguar,X-Type 2.5 4dr,Sedan,Europe,All,29995.0,27355.0,2.5,6.0,192.0,18.0,26.0,3428.0,107.0,184.0\n155,Jaguar,S-Type R 4dr,Sedan,Europe,Rear,63120.0,57499.0,4.2,8.0,390.0,17.0,24.0,4046.0,115.0,192.0\n156,Jaguar,XK8 coupe 2dr,Sports,Europe,Rear,69995.0,63756.0,4.2,8.0,294.0,18.0,26.0,3779.0,102.0,187.0\n157,Jeep,Grand Cherokee Laredo,SUV,USA,Front,27905.0,25686.0,4.0,6.0,195.0,16.0,21.0,3790.0,106.0,181.0\n158,Kia,Optima LX 4dr,Sedan,Asia,Front,16040.0,14910.0,2.4,4.0,138.0,23.0,30.0,3281.0,106.0,186.0\n159,Kia,Spectra GS 4dr hatch,Sedan,Asia,Front,13580.0,12830.0,1.8,4.0,124.0,24.0,32.0,2686.0,101.0,178.0\n160,Kia,Sedona LX,Sedan,Asia,Front,20615.0,19400.0,3.5,6.0,195.0,16.0,22.0,4802.0,115.0,194.0\n161,Land Rover,Freelander SE,SUV,Europe,All,25995.0,23969.0,2.5,6.0,174.0,18.0,21.0,3577.0,101.0,175.0\n162,Lexus,ES 330 4dr,Sedan,Asia,Front,32350.0,28755.0,3.3,6.0,225.0,20.0,29.0,3460.0,107.0,191.0\n163,Lexus,GS 430 4dr,Sedan,Asia,Rear,48450.0,42232.0,4.3,8.0,300.0,18.0,23.0,3715.0,110.0,189.0\n164,Lincoln,Navigator Luxury,SUV,USA,All,52775.0,46360.0,5.4,8.0,300.0,13.0,18.0,5969.0,119.0,206.0\n165,Lincoln,LS V8 Sport 4dr,Sedan,USA,Rear,40095.0,36809.0,3.9,8.0,280.0,17.0,24.0,3768.0,115.0,194.0\n166,Lincoln,Town Car Ultimate L 4dr,Sedan,USA,Rear,50470.0,46208.0,4.6,8.0,239.0,17.0,25.0,4474.0,124.0,221.0\n167,Mazda,Mazda3 i 4dr,Sedan,Asia,Front,15500.0,14525.0,2.0,4.0,148.0,26.0,34.0,2696.0,104.0,178.0\n168,Mazda,MX-5 Miata convertible 2dr,Sports,Asia,Rear,22388.0,20701.0,1.8,4.0,142.0,23.0,28.0,2387.0,89.0,156.0\n169,Mazda,B2300 SX Regular Cab,Truck,Asia,Rear,14840.0,14070.0,2.3,4.0,143.0,24.0,29.0,2960.0,112.0,188.0\n170,Mercedes-Benz,C230 Sport 2dr,Sedan,Europe,Rear,26060.0,24249.0,1.8,4.0,189.0,22.0,30.0,3250.0,107.0,178.0\n171,Mercedes-Benz,C320 Sport 4dr,Sedan,Europe,Rear,35920.0,33456.0,3.2,6.0,215.0,19.0,26.0,3430.0,107.0,178.0\n172,Mercedes-Benz,CL500 2dr,Sedan,Europe,Rear,94820.0,88324.0,5.0,8.0,302.0,16.0,24.0,4085.0,114.0,196.0\n173,Mercedes-Benz,E320 4dr,Sedan,Europe,Rear,48170.0,44849.0,3.2,6.0,221.0,19.0,27.0,3635.0,112.0,190.0\n174,Mercedes-Benz,SL500 convertible 2dr,Sports,Europe,Rear,90520.0,84325.0,5.0,8.0,302.0,16.0,23.0,4065.0,101.0,179.0\n175,Mercedes-Benz,SLK32 AMG 2dr,Sports,Europe,Rear,56170.0,52289.0,3.2,6.0,349.0,17.0,22.0,3220.0,95.0,158.0\n176,Mercury,Mountaineer,SUV,USA,Front,29995.0,27317.0,4.0,6.0,210.0,16.0,21.0,4374.0,114.0,190.0\n177,Mercury,Sable LS Premium 4dr,Sedan,USA,Front,23895.0,21918.0,3.0,6.0,201.0,19.0,26.0,3315.0,109.0,200.0\n178,Mercury,Sable GS,Wagon,USA,Front,22595.0,20748.0,3.0,6.0,155.0,19.0,26.0,3488.0,109.0,198.0\n179,Mitsubishi,Lancer ES 4dr,Sedan,Asia,Front,14622.0,13751.0,2.0,4.0,120.0,25.0,31.0,2656.0,102.0,181.0\n180,Mitsubishi,Diamante LS 4dr,Sedan,Asia,Front,29282.0,27250.0,3.5,6.0,205.0,18.0,25.0,3549.0,107.0,194.0\n181,Mitsubishi,Lancer Evolution 4dr,Sports,Asia,Front,29562.0,27466.0,2.0,4.0,271.0,18.0,26.0,3263.0,103.0,179.0\n182,Nissan,Xterra XE V6,SUV,Asia,Front,20939.0,19512.0,3.3,6.0,180.0,17.0,20.0,3760.0,104.0,178.0\n183,Nissan,Sentra SE-R 4dr,Sedan,Asia,Front,17640.0,16444.0,2.5,4.0,165.0,23.0,28.0,2761.0,100.0,178.0\n184,Nissan,Quest S,Sedan,Asia,Front,24780.0,22958.0,3.5,6.0,240.0,19.0,26.0,4012.0,124.0,204.0\n185,Nissan,Frontier King Cab XE V6,Truck,Asia,All,19479.0,18253.0,3.3,6.0,180.0,17.0,20.0,3932.0,116.0,191.0\n186,Oldsmobile,Alero GLS 2dr,Sedan,USA,Front,23675.0,21485.0,3.4,6.0,170.0,20.0,29.0,3085.0,107.0,187.0\n187,Pontiac,Grand Am GT 2dr,Sedan,USA,Front,22450.0,20595.0,3.4,6.0,175.0,20.0,29.0,3118.0,107.0,186.0\n188,Pontiac,Bonneville GXP 4dr,Sedan,USA,Front,35995.0,32997.0,4.6,8.0,275.0,17.0,20.0,3790.0,112.0,203.0\n189,Pontiac,Vibe,Wagon,USA,Rear,17045.0,15973.0,1.8,4.0,130.0,29.0,36.0,2701.0,102.0,172.0\n190,Porsche,911 Targa coupe 2dr,Sports,Europe,Rear,76765.0,67128.0,3.6,6.0,315.0,18.0,26.0,3119.0,93.0,175.0\n191,Saab,9-3 Arc Sport 4dr,Sedan,Europe,Front,30860.0,29269.0,2.0,4.0,210.0,20.0,28.0,3175.0,105.0,183.0\n192,Saab,9-3 Arc convertible 2dr,Sedan,Europe,Front,40670.0,38520.0,2.0,4.0,210.0,21.0,29.0,3480.0,105.0,182.0\n193,Saturn,Ion1 4dr,Sedan,USA,Front,10995.0,10319.0,2.2,4.0,140.0,26.0,35.0,2692.0,103.0,185.0\n194,Saturn,lon3 quad coupe 2dr,Sedan,USA,Front,16350.0,15299.0,2.2,4.0,140.0,26.0,35.0,2751.0,103.0,185.0\n195,Scion,xB,Wagon,Asia,Front,14165.0,13480.0,1.5,4.0,108.0,31.0,35.0,2425.0,98.0,155.0\n196,Subaru,Outback Limited Sedan 4dr,Sedan,Asia,All,27145.0,24687.0,2.5,4.0,165.0,20.0,27.0,3495.0,104.0,184.0\n197,Subaru,Impreza WRX STi 4dr,Sports,Asia,All,31545.0,29130.0,2.5,4.0,300.0,18.0,24.0,3263.0,100.0,174.0\n198,Suzuki,XL-7 EX,SUV,Asia,Front,23699.0,22307.0,2.7,6.0,185.0,18.0,22.0,3682.0,110.0,187.0\n199,Suzuki,Forenza S 4dr,Sedan,Asia,Front,12269.0,12116.0,2.0,4.0,119.0,24.0,31.0,2701.0,102.0,177.0\n200,Toyota,Prius 4dr (gas/electric),Hybrid,Asia,Front,20510.0,18926.0,1.5,4.0,110.0,59.0,51.0,2890.0,106.0,175.0\n201,Toyota,Land Cruiser,SUV,Asia,All,54765.0,47986.0,4.7,8.0,325.0,13.0,17.0,5390.0,112.0,193.0\n202,Toyota,Corolla LE 4dr,Sedan,Asia,Front,15295.0,13889.0,1.8,4.0,130.0,32.0,40.0,2524.0,102.0,178.0\n203,Toyota,Camry LE 4dr,Sedan,Asia,Front,19560.0,17558.0,2.4,4.0,157.0,24.0,33.0,3086.0,107.0,189.0\n204,Toyota,Avalon XL 4dr,Sedan,Asia,Front,26560.0,23693.0,3.0,6.0,210.0,21.0,29.0,3417.0,107.0,192.0\n205,Toyota,Sienna CE,Sedan,Asia,Front,23495.0,21198.0,3.3,6.0,230.0,19.0,27.0,4120.0,119.0,200.0\n206,Toyota,Tacoma,Truck,Asia,Rear,12800.0,11879.0,2.4,4.0,142.0,22.0,27.0,2750.0,103.0,191.0\n207,Volkswagen,Touareg V6,SUV,Europe,All,35515.0,32243.0,3.2,6.0,220.0,15.0,20.0,5086.0,112.0,187.0\n208,Volkswagen,New Beetle GLS 1.8T 2dr,Sedan,Europe,Front,21055.0,19638.0,1.8,4.0,150.0,24.0,31.0,2820.0,99.0,161.0\n209,Volkswagen,Passat GLX V6 4MOTION 4dr,Sedan,Europe,Front,33180.0,30583.0,2.8,6.0,190.0,19.0,26.0,3721.0,106.0,185.0\n210,Volkswagen,Jetta GL,Wagon,Europe,Front,19005.0,17427.0,2.0,4.0,115.0,24.0,30.0,3034.0,99.0,174.0\n211,Volvo,S40 4dr,Sedan,Europe,Front,25135.0,23701.0,1.9,4.0,170.0,22.0,29.0,2767.0,101.0,178.0\n212,Volvo,S80 2.9 4dr,Sedan,Europe,Front,37730.0,35542.0,2.9,6.0,208.0,20.0,28.0,3576.0,110.0,190.0\n213,Volvo,S80 T6 4dr,Sedan,Europe,Front,45210.0,42573.0,2.9,6.0,268.0,19.0,26.0,3653.0,110.0,190.0\n214,Acura,TSX 4dr,Sedan,Asia,Front,26990.0,24647.0,2.4,4.0,200.0,22.0,29.0,3230.0,105.0,183.0\n215,Acura,NSX coupe 2dr manual S,Sports,Asia,Rear,89765.0,79978.0,3.2,6.0,290.0,17.0,24.0,3153.0,100.0,174.0\n216,Audi,A4 3.0 Quattro 4dr manual,Sedan,Europe,All,33430.0,30366.0,3.0,6.0,220.0,17.0,26.0,3583.0,104.0,179.0\n217,Audi,A4 3.0 convertible 2dr,Sedan,Europe,Front,42490.0,38325.0,3.0,6.0,220.0,20.0,27.0,3814.0,105.0,180.0\n218,Audi,A8 L Quattro 4dr,Sedan,Europe,All,69190.0,64740.0,4.2,8.0,330.0,17.0,24.0,4399.0,121.0,204.0\n219,Audi,TT 1.8 Quattro 2dr (convertible),Sports,Europe,All,37390.0,33891.0,1.8,4.0,225.0,20.0,28.0,2921.0,96.0,159.0\n220,BMW,X3 3.0i,SUV,Europe,All,37000.0,33873.0,3.0,6.0,225.0,16.0,23.0,4023.0,110.0,180.0\n221,BMW,325Ci convertible 2dr,Sedan,Europe,Rear,37995.0,34800.0,2.5,6.0,184.0,19.0,27.0,3560.0,107.0,177.0\n222,BMW,330xi 4dr,Sedan,Europe,All,37245.0,34115.0,3.0,6.0,225.0,20.0,29.0,3483.0,107.0,176.0\n223,BMW,545iA 4dr,Sedan,Europe,Rear,54995.0,50270.0,4.4,8.0,325.0,18.0,26.0,3814.0,114.0,191.0\n224,BMW,M3 convertible 2dr,Sports,Europe,Rear,56595.0,51815.0,3.2,6.0,333.0,16.0,23.0,3781.0,108.0,177.0\n225,Buick,Rainier,SUV,USA,All,37895.0,34357.0,4.2,6.0,275.0,15.0,21.0,4600.0,113.0,193.0\n226,Buick,Regal LS 4dr,Sedan,USA,Front,24895.0,22835.0,3.8,6.0,200.0,20.0,30.0,3461.0,109.0,196.0\n227,Buick,Park Avenue Ultra 4dr,Sedan,USA,Front,40720.0,36927.0,3.8,6.0,240.0,18.0,28.0,3909.0,114.0,207.0\n228,Cadillac,Deville 4dr,Sedan,USA,Front,45445.0,41650.0,4.6,8.0,275.0,18.0,26.0,3984.0,115.0,207.0\n229,Cadillac,Escalade EXT,Truck,USA,All,52975.0,48541.0,6.0,8.0,345.0,13.0,17.0,5879.0,130.0,221.0\n230,Chevrolet,Tracker,SUV,USA,Front,20255.0,19108.0,2.5,6.0,165.0,19.0,22.0,2866.0,98.0,163.0\n231,Chevrolet,Cavalier 4dr,Sedan,USA,Front,14810.0,13884.0,2.2,4.0,140.0,26.0,37.0,2676.0,104.0,183.0\n232,Chevrolet,Malibu LS 4dr,Sedan,USA,Front,20370.0,18639.0,3.5,6.0,200.0,22.0,30.0,3297.0,106.0,188.0\n233,Chevrolet,Malibu LT 4dr,Sedan,USA,Front,23495.0,21551.0,3.5,6.0,200.0,23.0,32.0,3315.0,106.0,188.0\n234,Chevrolet,Corvette 2dr,Sports,USA,Rear,44535.0,39068.0,5.7,8.0,350.0,18.0,25.0,3246.0,105.0,180.0\n235,Chevrolet,Silverado 1500 Regular Cab,Truck,USA,Rear,20310.0,18480.0,4.3,6.0,200.0,15.0,21.0,4142.0,119.0,206.0\n236,Chrysler,PT Cruiser 4dr,Sedan,USA,Front,17985.0,16919.0,2.4,4.0,150.0,22.0,29.0,3101.0,103.0,169.0\n237,Chrysler,300M 4dr,Sedan,USA,Front,29865.0,27797.0,3.5,6.0,250.0,18.0,27.0,3581.0,113.0,198.0\n238,Chrysler,Sebring convertible 2dr,Sedan,USA,Front,25215.0,23451.0,2.4,4.0,150.0,22.0,30.0,3357.0,106.0,194.0\n239,Chrysler,Town and Country Limited,Sedan,USA,Front,38380.0,35063.0,3.8,6.0,215.0,18.0,25.0,4331.0,119.0,201.0\n240,Dodge,Neon SE 4dr,Sedan,USA,Front,13670.0,12849.0,2.0,4.0,132.0,29.0,36.0,2581.0,105.0,174.0\n241,Dodge,Stratus SE 4dr,Sedan,USA,Front,20220.0,18821.0,2.4,4.0,150.0,21.0,28.0,3175.0,108.0,191.0\n242,Dodge,Viper SRT-10 convertible 2dr,Sports,USA,Rear,81795.0,74451.0,8.3,10.0,500.0,12.0,20.0,3410.0,99.0,176.0\n243,Ford,Excursion 6.8 XLT,SUV,USA,All,41475.0,36494.0,6.8,10.0,310.0,10.0,13.0,7190.0,137.0,227.0\n244,Ford,Focus ZX3 2dr hatch,Sedan,USA,Front,13270.0,12482.0,2.0,4.0,130.0,26.0,33.0,2612.0,103.0,168.0\n245,Ford,Focus SVT 2dr,Sedan,USA,Front,19135.0,17878.0,2.0,4.0,170.0,21.0,28.0,2750.0,103.0,168.0\n246,Ford,Crown Victoria LX 4dr,Sedan,USA,Rear,27370.0,25105.0,4.6,8.0,224.0,17.0,25.0,4057.0,115.0,212.0\n247,Ford,Mustang GT Premium convertible 2dr,Sports,USA,Rear,29380.0,26875.0,4.6,8.0,260.0,17.0,25.0,3347.0,101.0,183.0\n248,Ford,Ranger 2.3 XL Regular Cab,Truck,USA,Rear,14385.0,13717.0,2.3,4.0,143.0,24.0,29.0,3028.0,111.0,188.0\n249,GMC,Yukon 1500 SLE,SUV,USA,Front,35725.0,31361.0,4.8,8.0,285.0,16.0,19.0,5042.0,116.0,199.0\n250,GMC,Sierra Extended Cab 1500,Truck,USA,Rear,25717.0,22604.0,4.8,8.0,285.0,17.0,20.0,4548.0,144.0,230.0\n251,Honda,Insight 2dr (gas/electric),Hybrid,Asia,Front,19110.0,17911.0,2.0,3.0,73.0,60.0,66.0,1850.0,95.0,155.0\n252,Honda,Civic DX 2dr,Sedan,Asia,Front,13270.0,12175.0,1.7,4.0,115.0,32.0,38.0,2432.0,103.0,175.0\n253,Honda,Accord EX 2dr,Sedan,Asia,Front,22260.0,20080.0,2.4,4.0,160.0,26.0,34.0,3047.0,105.0,188.0\n254,Honda,Accord EX V6 2dr,Sedan,Asia,Front,26960.0,24304.0,3.0,6.0,240.0,21.0,30.0,3294.0,105.0,188.0\n255,Hummer,H2,SUV,USA,All,49995.0,45815.0,6.0,8.0,316.0,10.0,12.0,6400.0,123.0,190.0\n256,Hyundai,Accent GT 2dr hatch,Sedan,Asia,Front,11939.0,11209.0,1.6,4.0,103.0,29.0,33.0,2339.0,96.0,167.0\n257,Hyundai,Sonata GLS 4dr,Sedan,Asia,Front,19339.0,17574.0,2.7,6.0,170.0,19.0,27.0,3217.0,106.0,187.0\n258,Hyundai,Tiburon GT V6 2dr,Sports,Asia,Front,18739.0,17101.0,2.7,6.0,172.0,19.0,26.0,3023.0,100.0,173.0\n259,Infiniti,I35 4dr,Sedan,Asia,Front,31145.0,28320.0,3.5,6.0,255.0,19.0,26.0,3306.0,108.0,194.0\n260,Infiniti,FX45,Wagon,Asia,All,36395.0,33121.0,4.5,8.0,315.0,15.0,19.0,4309.0,112.0,189.0\n261,Jaguar,X-Type 3.0 4dr,Sedan,Europe,All,33995.0,30995.0,3.0,6.0,227.0,18.0,25.0,3516.0,107.0,184.0\n262,Jaguar,Vanden Plas 4dr,Sedan,Europe,Rear,68995.0,62846.0,4.2,8.0,294.0,18.0,28.0,3803.0,119.0,200.0\n263,Jaguar,XK8 convertible 2dr,Sports,Europe,Rear,74995.0,68306.0,4.2,8.0,294.0,18.0,26.0,3980.0,102.0,187.0\n264,Jeep,Liberty Sport,SUV,USA,All,20130.0,18973.0,2.4,4.0,150.0,20.0,24.0,3826.0,104.0,174.0\n265,Kia,Rio 4dr manual,Sedan,Asia,Front,10280.0,9875.0,1.6,4.0,104.0,26.0,33.0,2403.0,95.0,167.0\n266,Kia,Spectra GSX 4dr hatch,Sedan,Asia,Front,14630.0,13790.0,1.8,4.0,124.0,24.0,32.0,2697.0,101.0,178.0\n267,Kia,Rio Cinco,Wagon,Asia,Front,11905.0,11410.0,1.6,4.0,104.0,26.0,33.0,2447.0,95.0,167.0\n268,Lexus,GX 470,SUV,Asia,All,45700.0,39838.0,4.7,8.0,235.0,15.0,19.0,4740.0,110.0,188.0\n269,Lexus,IS 300 4dr manual,Sedan,Asia,Rear,31045.0,27404.0,3.0,6.0,215.0,18.0,25.0,3255.0,105.0,177.0\n270,Lexus,LS 430 4dr,Sedan,Asia,Rear,55750.0,48583.0,4.3,8.0,290.0,18.0,25.0,3990.0,115.0,197.0\n271,Lincoln,Aviator Ultimate,SUV,USA,Front,42915.0,39443.0,4.6,8.0,302.0,13.0,18.0,4834.0,114.0,193.0\n272,Lincoln,LS V8 Ultimate 4dr,Sedan,USA,Rear,43495.0,39869.0,3.9,8.0,280.0,17.0,24.0,3768.0,115.0,194.0\n273,MINI,Cooper,Sedan,Europe,Front,16999.0,15437.0,1.6,4.0,115.0,28.0,37.0,2524.0,97.0,143.0\n274,Mazda,Mazda3 s 4dr,Sedan,Asia,Front,17200.0,15922.0,2.3,4.0,160.0,25.0,31.0,2762.0,104.0,179.0\n275,Mazda,MX-5 Miata LS convertible 2dr,Sports,Asia,Rear,25193.0,23285.0,1.8,4.0,142.0,23.0,28.0,2387.0,89.0,156.0\n276,Mazda,B4000 SE Cab Plus,Truck,Asia,All,22350.0,20482.0,4.0,6.0,207.0,15.0,19.0,3571.0,126.0,203.0\n277,Mercedes-Benz,C320 Sport 2dr,Sedan,Europe,Rear,28370.0,26435.0,3.2,6.0,215.0,19.0,26.0,3430.0,107.0,178.0\n278,Mercedes-Benz,C320 4dr,Sedan,Europe,Rear,37630.0,35046.0,3.2,6.0,215.0,20.0,26.0,3450.0,107.0,178.0\n279,Mercedes-Benz,CL600 2dr,Sedan,Europe,Rear,128420.0,119600.0,5.5,12.0,493.0,13.0,19.0,4473.0,114.0,196.0\n280,Mercedes-Benz,E500 4dr,Sedan,Europe,Rear,57270.0,53382.0,5.0,8.0,302.0,16.0,20.0,3815.0,112.0,190.0\n281,Mercedes-Benz,SL55 AMG 2dr,Sports,Europe,Rear,121770.0,113388.0,5.5,8.0,493.0,14.0,21.0,4235.0,101.0,179.0\n282,Mercedes-Benz,C240,Wagon,Europe,Rear,33780.0,31466.0,2.6,6.0,168.0,19.0,25.0,3470.0,107.0,179.0\n283,Mercury,Sable GS 4dr,Sedan,USA,Front,21595.0,19848.0,3.0,6.0,155.0,20.0,27.0,3308.0,109.0,200.0\n284,Mercury,Grand Marquis LS Ultimate 4dr,Sedan,USA,Rear,30895.0,28318.0,4.6,8.0,224.0,17.0,25.0,4052.0,115.0,212.0\n285,Mitsubishi,Endeavor XLS,SUV,Asia,All,30492.0,28330.0,3.8,6.0,215.0,17.0,21.0,4134.0,109.0,190.0\n286,Mitsubishi,Lancer LS 4dr,Sedan,Asia,Front,16722.0,15718.0,2.0,4.0,120.0,25.0,31.0,2795.0,102.0,181.0\n287,Mitsubishi,Galant GTS 4dr,Sedan,Asia,Front,25700.0,23883.0,3.8,6.0,230.0,18.0,26.0,3649.0,108.0,191.0\n288,Mitsubishi,Lancer Sportback LS,Wagon,Asia,Front,17495.0,16295.0,2.4,4.0,160.0,25.0,31.0,3020.0,102.0,181.0\n289,Nissan,Sentra 1.8 4dr,Sedan,Asia,Front,12740.0,12205.0,1.8,4.0,126.0,28.0,35.0,2513.0,100.0,178.0\n290,Nissan,Altima SE 4dr,Sedan,Asia,Front,23290.0,21580.0,3.5,6.0,245.0,21.0,26.0,3197.0,110.0,192.0\n291,Nissan,Quest SE,Sedan,Asia,Front,32780.0,30019.0,3.5,6.0,240.0,18.0,25.0,4175.0,124.0,204.0\n292,Nissan,Titan King Cab XE,Truck,Asia,All,26650.0,24926.0,5.6,8.0,305.0,14.0,18.0,5287.0,140.0,224.0\n293,Oldsmobile,Silhouette GL,Sedan,USA,Front,28790.0,26120.0,3.4,6.0,185.0,19.0,26.0,3948.0,120.0,201.0\n294,Pontiac,Grand Prix GT1 4dr,Sedan,USA,Front,22395.0,20545.0,3.8,6.0,200.0,20.0,30.0,3477.0,111.0,198.0\n295,Pontiac,Montana,Sedan,USA,Front,23845.0,21644.0,3.4,6.0,185.0,19.0,26.0,3803.0,112.0,187.0\n296,Porsche,Cayenne S,SUV,Europe,All,56665.0,49865.0,4.5,8.0,340.0,14.0,18.0,4950.0,112.0,188.0\n297,Porsche,911 GT2 2dr,Sports,Europe,Rear,192465.0,173560.0,3.6,6.0,477.0,17.0,24.0,3131.0,93.0,175.0\n298,Saab,9-3 Aero 4dr,Sedan,Europe,Front,33360.0,31562.0,2.0,4.0,210.0,20.0,28.0,3175.0,105.0,183.0\n299,Saab,9-3 Aero convertible 2dr,Sedan,Europe,Front,43175.0,40883.0,2.0,4.0,210.0,21.0,30.0,3700.0,105.0,182.0\n300,Saturn,lon2 4dr,Sedan,USA,Front,14300.0,13393.0,2.2,4.0,140.0,26.0,35.0,2692.0,103.0,185.0\n301,Saturn,L300-2 4dr,Sedan,USA,Front,21410.0,19801.0,3.0,6.0,182.0,20.0,28.0,3197.0,107.0,190.0\n302,Subaru,Impreza 2.5 RS 4dr,Sedan,Asia,All,19945.0,18399.0,2.5,4.0,165.0,22.0,28.0,2965.0,99.0,174.0\n303,Subaru,Outback H6 4dr,Sedan,Asia,All,29345.0,26660.0,3.0,6.0,212.0,19.0,26.0,3610.0,104.0,184.0\n304,Subaru,Baja,Truck,Asia,All,24520.0,22304.0,2.5,4.0,165.0,21.0,28.0,3485.0,104.0,193.0\n305,Suzuki,Vitara LX,SUV,Asia,All,17163.0,16949.0,2.5,6.0,165.0,19.0,22.0,3020.0,98.0,163.0\n306,Suzuki,Forenza EX 4dr,Sedan,Asia,Front,15568.0,15378.0,2.0,4.0,119.0,22.0,30.0,2756.0,102.0,177.0\n307,Toyota,Sequoia SR5,SUV,Asia,All,35695.0,31827.0,4.7,8.0,240.0,14.0,17.0,5270.0,118.0,204.0\n308,Toyota,RAV4,SUV,Asia,All,20290.0,18553.0,2.4,4.0,161.0,22.0,27.0,3119.0,98.0,167.0\n309,Toyota,Echo 2dr manual,Sedan,Asia,Front,10760.0,10144.0,1.5,4.0,108.0,35.0,43.0,2035.0,93.0,163.0\n310,Toyota,Camry LE V6 4dr,Sedan,Asia,Front,22775.0,20325.0,3.0,6.0,210.0,21.0,29.0,3296.0,107.0,189.0\n311,Toyota,Camry XLE V6 4dr,Sedan,Asia,Front,25920.0,23125.0,3.0,6.0,210.0,21.0,29.0,3362.0,107.0,189.0\n312,Toyota,Sienna XLE Limited,Sedan,Asia,Front,28800.0,25690.0,3.3,6.0,230.0,19.0,27.0,4165.0,119.0,200.0\n313,Toyota,Tundra Regular Cab V6,Truck,Asia,Rear,16495.0,14978.0,3.4,6.0,190.0,16.0,18.0,3925.0,128.0,218.0\n314,Volkswagen,Golf GLS 4dr,Sedan,Europe,Front,18715.0,17478.0,2.0,4.0,115.0,24.0,31.0,2897.0,99.0,165.0\n315,Volkswagen,Jetta GLI VR6 4dr,Sedan,Europe,Front,23785.0,21686.0,2.8,6.0,200.0,21.0,30.0,3179.0,99.0,172.0\n316,Volkswagen,Passat W8 4MOTION 4dr,Sedan,Europe,Front,39235.0,36052.0,4.0,8.0,270.0,18.0,25.0,3953.0,106.0,185.0\n317,Volkswagen,Passat GLS 1.8T,Wagon,Europe,Front,24955.0,22801.0,1.8,4.0,170.0,22.0,31.0,3338.0,106.0,184.0\n318,Volvo,S60 2.5 4dr,Sedan,Europe,All,31745.0,29916.0,2.5,5.0,208.0,20.0,27.0,3903.0,107.0,180.0\n319,Volvo,S80 2.5T 4dr,Sedan,Europe,All,37885.0,35688.0,2.5,5.0,194.0,20.0,27.0,3691.0,110.0,190.0\n320,Volvo,V40,Wagon,Europe,Front,26135.0,24641.0,1.9,4.0,170.0,22.0,29.0,2822.0,101.0,180.0\n321,Acura,TL 4dr,Sedan,Asia,Front,33195.0,30299.0,3.2,6.0,270.0,20.0,28.0,3575.0,108.0,186.0\n322,Audi,A4 1.8T 4dr,Sedan,Europe,Front,25940.0,23508.0,1.8,4.0,170.0,22.0,31.0,3252.0,104.0,179.0\n323,Audi,A4 3.0 Quattro 4dr auto,Sedan,Europe,All,34480.0,31388.0,3.0,6.0,220.0,18.0,25.0,3627.0,104.0,179.0\n324,Audi,A4 3.0 Quattro convertible 2dr,Sedan,Europe,All,44240.0,40075.0,3.0,6.0,220.0,18.0,25.0,4013.0,105.0,180.0\n325,Audi,S4 Quattro 4dr,Sedan,Europe,All,48040.0,43556.0,4.2,8.0,340.0,14.0,20.0,3825.0,104.0,179.0\n326,Audi,TT 3.2 coupe 2dr (convertible),Sports,Europe,All,40590.0,36739.0,3.2,6.0,250.0,21.0,29.0,3351.0,96.0,159.0\n327,BMW,X5 4.4i,SUV,Europe,All,52195.0,47720.0,4.4,8.0,325.0,16.0,22.0,4824.0,111.0,184.0\n328,BMW,325xi 4dr,Sedan,Europe,All,30245.0,27745.0,2.5,6.0,184.0,19.0,27.0,3461.0,107.0,176.0\n329,BMW,525i 4dr,Sedan,Europe,Rear,39995.0,36620.0,2.5,6.0,184.0,19.0,28.0,3428.0,114.0,191.0\n330,BMW,745i 4dr,Sedan,Europe,Rear,69195.0,63190.0,4.4,8.0,325.0,18.0,26.0,4376.0,118.0,198.0\n331,BMW,Z4 convertible 2.5i 2dr,Sports,Europe,Rear,33895.0,31065.0,2.5,6.0,184.0,20.0,28.0,2932.0,98.0,161.0\n332,Buick,Rendezvous CX,SUV,USA,Front,26545.0,24085.0,3.4,6.0,185.0,19.0,26.0,4024.0,112.0,187.0\n333,Buick,Regal GS 4dr,Sedan,USA,Front,28345.0,26047.0,3.8,6.0,240.0,18.0,28.0,3536.0,109.0,196.0\n334,Cadillac,Escalade,SUV,USA,Front,52795.0,48377.0,5.3,8.0,295.0,14.0,18.0,5367.0,116.0,199.0\n335,Cadillac,Deville DTS 4dr,Sedan,USA,Front,50595.0,46362.0,4.6,8.0,300.0,18.0,26.0,4044.0,115.0,207.0\n336,Chevrolet,Suburban 1500 LT,SUV,USA,Front,42735.0,37422.0,5.3,8.0,295.0,14.0,18.0,4947.0,130.0,219.0\n337,Chevrolet,Aveo 4dr,Sedan,USA,Front,11690.0,10965.0,1.6,4.0,103.0,28.0,34.0,2370.0,98.0,167.0\n338,Chevrolet,Cavalier LS 2dr,Sedan,USA,Front,16385.0,15357.0,2.2,4.0,140.0,26.0,37.0,2617.0,104.0,183.0\n339,Chevrolet,Monte Carlo LS 2dr,Sedan,USA,Front,21825.0,20026.0,3.4,6.0,180.0,21.0,32.0,3340.0,111.0,198.0\n340,Chevrolet,Monte Carlo SS 2dr,Sedan,USA,Front,24225.0,22222.0,3.8,6.0,200.0,18.0,28.0,3434.0,111.0,198.0\n341,Chevrolet,Corvette convertible 2dr,Sports,USA,Rear,51535.0,45193.0,5.7,8.0,350.0,18.0,25.0,3248.0,105.0,180.0\n342,Chevrolet,Silverado SS,Truck,USA,All,40340.0,35399.0,6.0,8.0,300.0,13.0,17.0,4804.0,144.0,238.0\n343,Chrysler,PT Cruiser Limited 4dr,Sedan,USA,Front,22000.0,20573.0,2.4,4.0,150.0,22.0,29.0,3105.0,103.0,169.0\n344,Chrysler,Concorde LX 4dr,Sedan,USA,Front,24130.0,22452.0,2.7,6.0,200.0,21.0,29.0,3479.0,113.0,208.0\n345,Chrysler,300M Special Edition 4dr,Sedan,USA,Front,33295.0,30884.0,3.5,6.0,255.0,18.0,27.0,3650.0,113.0,198.0\n346,Chrysler,Crossfire 2dr,Sports,USA,Rear,34495.0,32033.0,3.2,6.0,215.0,17.0,25.0,3060.0,95.0,160.0\n347,Dodge,Neon SXT 4dr,Sedan,USA,Front,15040.0,14086.0,2.0,4.0,132.0,29.0,36.0,2626.0,105.0,174.0\n348,Dodge,Intrepid ES 4dr,Sedan,USA,Front,24885.0,23058.0,3.5,6.0,232.0,18.0,27.0,3487.0,113.0,204.0\n349,Dodge,Dakota Regular Cab,Truck,USA,Rear,17630.0,16264.0,3.7,6.0,210.0,16.0,22.0,3714.0,112.0,193.0\n350,Ford,Expedition 4.6 XLT,SUV,USA,Front,34560.0,30468.0,4.6,8.0,232.0,15.0,19.0,5000.0,119.0,206.0\n351,Ford,Focus LX 4dr,Sedan,USA,Front,13730.0,12906.0,2.0,4.0,110.0,27.0,36.0,2606.0,103.0,168.0\n352,Ford,Taurus LX 4dr,Sedan,USA,Front,20320.0,18881.0,3.0,6.0,155.0,20.0,27.0,3306.0,109.0,198.0\n353,Ford,Crown Victoria LX Sport 4dr,Sedan,USA,Rear,30315.0,27756.0,4.6,8.0,239.0,17.0,25.0,4057.0,115.0,212.0\n354,Ford,Thunderbird Deluxe convert w/hardtop 2d,Sports,USA,Front,37530.0,34483.0,3.9,8.0,280.0,17.0,24.0,3780.0,107.0,186.0\n355,Ford,Focus ZTW,Wagon,USA,Front,17475.0,16375.0,2.0,4.0,130.0,26.0,33.0,2702.0,103.0,178.0\n356,GMC,Yukon XL 2500 SLT,SUV,USA,All,46265.0,40534.0,6.0,8.0,325.0,13.0,17.0,6133.0,130.0,219.0\n357,GMC,Sierra HD 2500,Truck,USA,All,29322.0,25759.0,6.0,8.0,300.0,13.0,18.0,5440.0,133.0,222.0\n358,Honda,Pilot LX,SUV,Asia,All,27560.0,24843.0,3.5,6.0,240.0,17.0,22.0,4387.0,106.0,188.0\n359,Honda,Civic HX 2dr,Sedan,Asia,Front,14170.0,12996.0,1.7,4.0,117.0,36.0,44.0,2500.0,103.0,175.0\n360,Honda,Civic EX 4dr,Sedan,Asia,Front,17750.0,16265.0,1.7,4.0,127.0,32.0,37.0,2601.0,103.0,175.0\n361,Honda,Odyssey LX,Sedan,Asia,Front,24950.0,22498.0,3.5,6.0,240.0,18.0,25.0,4310.0,118.0,201.0\n362,Hyundai,Santa Fe GLS,SUV,Asia,Front,21589.0,20201.0,2.7,6.0,173.0,20.0,26.0,3549.0,103.0,177.0\n363,Hyundai,Elantra GLS 4dr,Sedan,Asia,Front,13839.0,12781.0,2.0,4.0,138.0,26.0,34.0,2635.0,103.0,178.0\n364,Hyundai,Sonata LX 4dr,Sedan,Asia,Front,20339.0,18380.0,2.7,6.0,170.0,19.0,27.0,3217.0,106.0,187.0\n365,Infiniti,G35 4dr,Sedan,Asia,Rear,28495.0,26157.0,3.5,6.0,260.0,18.0,26.0,3336.0,112.0,187.0\n366,Infiniti,M45 4dr,Sedan,Asia,Rear,42845.0,38792.0,4.5,8.0,340.0,17.0,23.0,3851.0,110.0,197.0\n367,Isuzu,Ascender S,SUV,Asia,All,31849.0,29977.0,4.2,6.0,275.0,15.0,20.0,4967.0,129.0,208.0\n368,Jaguar,S-Type 3.0 4dr,Sedan,Europe,Rear,43895.0,40004.0,3.0,6.0,235.0,18.0,26.0,3777.0,115.0,192.0\n369,Jaguar,XJ8 4dr,Sedan,Europe,Rear,59995.0,54656.0,4.2,8.0,294.0,18.0,28.0,3803.0,119.0,200.0\n370,Jaguar,XKR coupe 2dr,Sports,Europe,Rear,81995.0,74676.0,4.2,8.0,390.0,16.0,23.0,3865.0,102.0,187.0\n371,Jeep,Wrangler Sahara convertible 2dr,SUV,USA,All,25520.0,23275.0,4.0,6.0,190.0,16.0,19.0,3575.0,93.0,150.0\n372,Kia,Rio 4dr auto,Sedan,Asia,Front,11155.0,10705.0,1.6,4.0,104.0,25.0,32.0,2458.0,95.0,167.0\n373,Kia,Optima LX V6 4dr,Sedan,Asia,Front,18435.0,16850.0,2.7,6.0,170.0,20.0,27.0,3279.0,106.0,186.0\n374,Land Rover,Range Rover HSE,SUV,Europe,All,72250.0,65807.0,4.4,8.0,282.0,12.0,16.0,5379.0,113.0,195.0\n375,Lexus,LX 470,SUV,Asia,All,64800.0,56455.0,4.7,8.0,235.0,13.0,17.0,5590.0,112.0,193.0\n376,Lexus,IS 300 4dr auto,Sedan,Asia,Rear,32415.0,28611.0,3.0,6.0,215.0,18.0,24.0,3285.0,105.0,177.0\n377,Lexus,SC 430 convertible 2dr,Sports,Asia,Rear,63200.0,55063.0,4.3,8.0,300.0,18.0,23.0,3840.0,103.0,178.0\n378,Lincoln,LS V6 Luxury 4dr,Sedan,USA,Rear,32495.0,29969.0,3.0,6.0,232.0,20.0,26.0,3681.0,115.0,194.0\n379,Lincoln,Town Car Signature 4dr,Sedan,USA,Rear,41815.0,38418.0,4.6,8.0,239.0,17.0,25.0,4369.0,118.0,215.0\n380,MINI,Cooper S,Sedan,Europe,Front,19999.0,18137.0,1.6,4.0,163.0,25.0,34.0,2678.0,97.0,144.0\n381,Mazda,Mazda6 i 4dr,Sedan,Asia,Front,19270.0,17817.0,2.3,4.0,160.0,24.0,32.0,3042.0,105.0,187.0\n382,Mazda,RX-8 4dr automatic,Sports,Asia,Rear,25700.0,23794.0,1.3,,197.0,18.0,25.0,3053.0,106.0,174.0\n383,Mercedes-Benz,G500,SUV,Europe,All,76870.0,71540.0,5.0,8.0,292.0,13.0,14.0,5423.0,112.0,186.0\n384,Mercedes-Benz,C240 4dr,Sedan,Europe,Rear,32280.0,30071.0,2.6,6.0,168.0,20.0,25.0,3360.0,107.0,178.0\n385,Mercedes-Benz,C320 4dr,Sedan,Europe,All,38830.0,36162.0,3.2,6.0,215.0,19.0,27.0,3450.0,107.0,178.0\n386,Mercedes-Benz,CLK320 coupe 2dr (convertible),Sedan,Europe,Rear,45707.0,41966.0,3.2,6.0,215.0,20.0,26.0,3770.0,107.0,183.0\n387,Mercedes-Benz,S430 4dr,Sedan,Europe,Rear,74320.0,69168.0,4.3,8.0,275.0,18.0,26.0,4160.0,122.0,203.0\n388,Mercedes-Benz,SL600 convertible 2dr,Sports,Europe,Rear,126670.0,117854.0,5.5,12.0,493.0,13.0,19.0,4429.0,101.0,179.0\n389,Mercedes-Benz,E320,Wagon,Europe,Rear,50670.0,47174.0,3.2,6.0,221.0,19.0,27.0,3966.0,112.0,190.0\n390,Mercury,Grand Marquis GS 4dr,Sedan,USA,Rear,24695.0,23217.0,4.6,8.0,224.0,17.0,25.0,4052.0,115.0,212.0\n391,Mercury,Marauder 4dr,Sedan,USA,Rear,34495.0,31558.0,4.6,8.0,302.0,17.0,23.0,4195.0,115.0,212.0\n392,Mitsubishi,Montero XLS,SUV,Asia,All,33112.0,30763.0,3.8,6.0,215.0,15.0,19.0,4718.0,110.0,190.0\n393,Mitsubishi,Galant ES 2.4L 4dr,Sedan,Asia,Front,19312.0,17957.0,2.4,4.0,160.0,23.0,30.0,3351.0,108.0,191.0\n394,Mitsubishi,Eclipse GTS 2dr,Sports,Asia,Front,25092.0,23456.0,3.0,6.0,210.0,21.0,28.0,3241.0,101.0,177.0\n395,Nissan,Pathfinder Armada SE,SUV,Asia,Front,33840.0,30815.0,5.6,8.0,305.0,13.0,19.0,5013.0,123.0,207.0\n396,Nissan,Sentra 1.8 S 4dr,Sedan,Asia,Front,14740.0,13747.0,1.8,4.0,126.0,28.0,35.0,2581.0,100.0,178.0\n397,Nissan,Maxima SE 4dr,Sedan,Asia,Front,27490.0,25182.0,3.5,6.0,265.0,20.0,28.0,3473.0,111.0,194.0\n398,Nissan,350Z coupe 2dr,Sports,Asia,Rear,26910.0,25203.0,3.5,6.0,287.0,20.0,26.0,3188.0,104.0,169.0\n399,Nissan,Murano SL,Wagon,Asia,Rear,28739.0,27300.0,3.5,6.0,245.0,20.0,25.0,3801.0,111.0,188.0\n400,Pontiac,Aztekt,SUV,USA,Front,21595.0,19810.0,3.4,6.0,185.0,19.0,26.0,3779.0,108.0,182.0\n401,Pontiac,Sunfire 1SC 2dr,Sedan,USA,Front,17735.0,16369.0,2.2,4.0,140.0,24.0,33.0,2771.0,104.0,182.0\n402,Pontiac,Montana EWB,Sedan,USA,All,31370.0,28454.0,3.4,6.0,185.0,18.0,24.0,4431.0,121.0,201.0\n403,Porsche,911 Carrera convertible 2dr (coupe),Sports,Europe,Rear,79165.0,69229.0,3.6,6.0,315.0,18.0,26.0,3135.0,93.0,175.0\n404,Porsche,Boxster convertible 2dr,Sports,Europe,Rear,43365.0,37886.0,2.7,6.0,228.0,20.0,29.0,2811.0,95.0,170.0\n405,Saab,9-5 Arc 4dr,Sedan,Europe,Front,35105.0,33011.0,2.3,4.0,220.0,21.0,29.0,3470.0,106.0,190.0\n406,Saab,9-5 Aero,Wagon,Europe,Front,40845.0,38376.0,2.3,4.0,250.0,19.0,29.0,3620.0,106.0,190.0\n407,Saturn,lon3 4dr,Sedan,USA,Front,15825.0,14811.0,2.2,4.0,140.0,26.0,35.0,2692.0,103.0,185.0\n408,Saturn,L300 2,Wagon,USA,Front,23560.0,21779.0,2.2,4.0,140.0,24.0,34.0,3109.0,107.0,190.0\n409,Subaru,Legacy L 4dr,Sedan,Asia,All,20445.0,18713.0,2.5,4.0,165.0,21.0,28.0,3285.0,104.0,184.0\n410,Subaru,Outback H-6 VDC 4dr,Sedan,Asia,All,31545.0,28603.0,3.0,6.0,212.0,19.0,26.0,3630.0,104.0,184.0\n411,Subaru,Forester X,Wagon,Asia,All,21445.0,19646.0,2.5,4.0,165.0,21.0,28.0,3090.0,99.0,175.0\n412,Suzuki,Aeno S 4dr,Sedan,Asia,Front,12884.0,12719.0,2.3,4.0,155.0,25.0,31.0,2676.0,98.0,171.0\n413,Suzuki,Verona LX 4dr,Sedan,Asia,Front,17262.0,17053.0,2.5,6.0,155.0,20.0,27.0,3380.0,106.0,188.0\n414,Toyota,4Runner SR5 V6,SUV,Asia,Front,27710.0,24801.0,4.0,6.0,245.0,18.0,21.0,4035.0,110.0,189.0\n415,Toyota,Corolla CE 4dr,Sedan,Asia,Front,14085.0,13065.0,1.8,4.0,130.0,32.0,40.0,2502.0,102.0,178.0\n416,Toyota,Echo 2dr auto,Sedan,Asia,Front,11560.0,10896.0,1.5,4.0,108.0,33.0,39.0,2085.0,93.0,163.0\n417,Toyota,Camry Solara SE 2dr,Sedan,Asia,Front,19635.0,17722.0,2.4,4.0,157.0,24.0,33.0,3175.0,107.0,193.0\n418,Toyota,Camry Solara SLE V6 2dr,Sedan,Asia,Front,26510.0,23908.0,3.3,6.0,225.0,20.0,29.0,3439.0,107.0,193.0\n419,Toyota,Celica GT-S 2dr,Sports,Asia,Front,22570.0,20363.0,1.8,4.0,180.0,24.0,33.0,2500.0,102.0,171.0\n420,Toyota,Tundra Access Cab V6 SR5,Truck,Asia,All,25935.0,23520.0,3.4,6.0,190.0,14.0,17.0,4435.0,128.0,218.0\n421,Volkswagen,GTI 1.8T 2dr hatch,Sedan,Europe,Front,19825.0,18109.0,1.8,4.0,180.0,24.0,31.0,2934.0,99.0,168.0\n422,Volkswagen,New Beetle GLS convertible 2dr,Sedan,Europe,Front,23215.0,21689.0,2.0,4.0,115.0,24.0,30.0,3082.0,99.0,161.0\n423,Volkswagen,Phaeton 4dr,Sedan,Europe,Front,65000.0,59912.0,4.2,8.0,335.0,16.0,22.0,5194.0,118.0,204.0\n424,Volkswagen,Passat W8,Wagon,Europe,Front,40235.0,36956.0,4.0,8.0,270.0,18.0,25.0,4067.0,106.0,184.0\n425,Volvo,S60 T5 4dr,Sedan,Europe,Front,34845.0,32902.0,2.3,5.0,247.0,20.0,28.0,3766.0,107.0,180.0\n426,Volvo,C70 LPT convertible 2dr,Sedan,Europe,Front,40565.0,38203.0,2.4,5.0,197.0,21.0,28.0,3450.0,105.0,186.0\n427,Volvo,XC70,Wagon,Europe,All,35145.0,33112.0,2.5,5.0,208.0,20.0,27.0,3823.0,109.0,186.0\n\n" ], [ "print(tbl.to_html())", "<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Make</th>\n <th>Model</th>\n <th>Type</th>\n <th>Origin</th>\n <th>DriveTrain</th>\n <th>MSRP</th>\n <th>Invoice</th>\n <th>EngineSize</th>\n <th>Cylinders</th>\n <th>Horsepower</th>\n <th>MPG_City</th>\n <th>MPG_Highway</th>\n <th>Weight</th>\n <th>Wheelbase</th>\n <th>Length</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>Acura</td>\n <td>MDX</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>36945.0</td>\n <td>33337.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>265.0</td>\n <td>17.0</td>\n <td>23.0</td>\n <td>4451.0</td>\n <td>106.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>1</th>\n <td>Acura</td>\n <td>3.5 RL 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>43755.0</td>\n <td>39014.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>18.0</td>\n <td>24.0</td>\n <td>3880.0</td>\n <td>115.0</td>\n <td>197.0</td>\n </tr>\n <tr>\n <th>2</th>\n <td>Audi</td>\n <td>A41.8T convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>35940.0</td>\n <td>32506.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>170.0</td>\n <td>23.0</td>\n <td>30.0</td>\n <td>3638.0</td>\n <td>105.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>3</th>\n <td>Audi</td>\n <td>A6 3.0 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>36640.0</td>\n <td>33129.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3561.0</td>\n <td>109.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>4</th>\n <td>Audi</td>\n <td>A6 2.7 Turbo Quattro 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>42840.0</td>\n <td>38840.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>250.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3836.0</td>\n <td>109.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>5</th>\n <td>Audi</td>\n <td>RS 6 4dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Front</td>\n <td>84600.0</td>\n <td>76417.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>450.0</td>\n <td>15.0</td>\n <td>22.0</td>\n <td>4024.0</td>\n <td>109.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>6</th>\n <td>Audi</td>\n <td>A6 3.0 Avant Quattro</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>All</td>\n <td>40840.0</td>\n <td>37060.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>4035.0</td>\n <td>109.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>7</th>\n <td>BMW</td>\n <td>325i 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>28495.0</td>\n <td>26155.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>184.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3219.0</td>\n <td>107.0</td>\n <td>176.0</td>\n </tr>\n <tr>\n <th>8</th>\n <td>BMW</td>\n <td>330i 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>35495.0</td>\n <td>32525.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>20.0</td>\n <td>30.0</td>\n <td>3285.0</td>\n <td>107.0</td>\n <td>176.0</td>\n </tr>\n <tr>\n <th>9</th>\n <td>BMW</td>\n <td>330Ci convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>44295.0</td>\n <td>40530.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>19.0</td>\n <td>28.0</td>\n <td>3616.0</td>\n <td>107.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>10</th>\n <td>BMW</td>\n <td>745Li 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>73195.0</td>\n <td>66830.0</td>\n <td>4.4</td>\n <td>8.0</td>\n <td>325.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>4464.0</td>\n <td>123.0</td>\n <td>204.0</td>\n </tr>\n <tr>\n <th>11</th>\n <td>BMW</td>\n <td>Z4 convertible 3.0i 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>41045.0</td>\n <td>37575.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>2998.0</td>\n <td>98.0</td>\n <td>161.0</td>\n </tr>\n <tr>\n <th>12</th>\n <td>Buick</td>\n <td>Century Custom 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>22180.0</td>\n <td>20351.0</td>\n <td>3.1</td>\n <td>6.0</td>\n <td>175.0</td>\n <td>20.0</td>\n <td>30.0</td>\n <td>3353.0</td>\n <td>109.0</td>\n <td>195.0</td>\n </tr>\n <tr>\n <th>13</th>\n <td>Buick</td>\n <td>LeSabre Limited 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>32245.0</td>\n <td>29566.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>205.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3591.0</td>\n <td>112.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>14</th>\n <td>Cadillac</td>\n <td>SRX V8</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>46995.0</td>\n <td>43523.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>320.0</td>\n <td>16.0</td>\n <td>21.0</td>\n <td>4302.0</td>\n <td>116.0</td>\n <td>195.0</td>\n </tr>\n <tr>\n <th>15</th>\n <td>Cadillac</td>\n <td>Seville SLS 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>47955.0</td>\n <td>43841.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>275.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3992.0</td>\n <td>112.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>16</th>\n <td>Chevrolet</td>\n <td>Tahoe LT</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>41465.0</td>\n <td>36287.0</td>\n <td>5.3</td>\n <td>8.0</td>\n <td>295.0</td>\n <td>14.0</td>\n <td>18.0</td>\n <td>5050.0</td>\n <td>116.0</td>\n <td>197.0</td>\n </tr>\n <tr>\n <th>17</th>\n <td>Chevrolet</td>\n <td>Aveo LS 4dr hatch</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>12585.0</td>\n <td>11802.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>103.0</td>\n <td>28.0</td>\n <td>34.0</td>\n <td>2348.0</td>\n <td>98.0</td>\n <td>153.0</td>\n </tr>\n <tr>\n <th>18</th>\n <td>Chevrolet</td>\n <td>Impala 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>21900.0</td>\n <td>20095.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>180.0</td>\n <td>21.0</td>\n <td>32.0</td>\n <td>3465.0</td>\n <td>111.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>19</th>\n <td>Chevrolet</td>\n <td>Impala LS 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>25000.0</td>\n <td>22931.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>20.0</td>\n <td>30.0</td>\n <td>3476.0</td>\n <td>111.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>20</th>\n <td>Chevrolet</td>\n <td>Astro</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>All</td>\n <td>26395.0</td>\n <td>23954.0</td>\n <td>4.3</td>\n <td>6.0</td>\n <td>190.0</td>\n <td>14.0</td>\n <td>17.0</td>\n <td>4605.0</td>\n <td>111.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>21</th>\n <td>Chevrolet</td>\n <td>Avalanche 1500</td>\n <td>Truck</td>\n <td>USA</td>\n <td>All</td>\n <td>36100.0</td>\n <td>31689.0</td>\n <td>5.3</td>\n <td>8.0</td>\n <td>295.0</td>\n <td>14.0</td>\n <td>18.0</td>\n <td>5678.0</td>\n <td>130.0</td>\n <td>222.0</td>\n </tr>\n <tr>\n <th>22</th>\n <td>Chevrolet</td>\n <td>SSR</td>\n <td>Truck</td>\n <td>USA</td>\n <td>Rear</td>\n <td>41995.0</td>\n <td>39306.0</td>\n <td>5.3</td>\n <td>8.0</td>\n <td>300.0</td>\n <td>16.0</td>\n <td>19.0</td>\n <td>4760.0</td>\n <td>116.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>23</th>\n <td>Chrysler</td>\n <td>Sebring 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>19090.0</td>\n <td>17805.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>150.0</td>\n <td>22.0</td>\n <td>30.0</td>\n <td>3173.0</td>\n <td>108.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>24</th>\n <td>Chrysler</td>\n <td>Concorde LXi 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>26860.0</td>\n <td>24909.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>232.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>3548.0</td>\n <td>113.0</td>\n <td>208.0</td>\n </tr>\n <tr>\n <th>25</th>\n <td>Chrysler</td>\n <td>Sebring Limited convertible 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>30950.0</td>\n <td>28613.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3448.0</td>\n <td>106.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>26</th>\n <td>Chrysler</td>\n <td>Pacifica</td>\n <td>Wagon</td>\n <td>USA</td>\n <td>Rear</td>\n <td>31230.0</td>\n <td>28725.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>250.0</td>\n <td>17.0</td>\n <td>23.0</td>\n <td>4675.0</td>\n <td>116.0</td>\n <td>199.0</td>\n </tr>\n <tr>\n <th>27</th>\n <td>Dodge</td>\n <td>Intrepid SE 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>22035.0</td>\n <td>20502.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3469.0</td>\n <td>113.0</td>\n <td>204.0</td>\n </tr>\n <tr>\n <th>28</th>\n <td>Dodge</td>\n <td>Caravan SE</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>21795.0</td>\n <td>20508.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>150.0</td>\n <td>20.0</td>\n <td>26.0</td>\n <td>3862.0</td>\n <td>113.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>29</th>\n <td>Dodge</td>\n <td>Dakota Club Cab</td>\n <td>Truck</td>\n <td>USA</td>\n <td>Rear</td>\n <td>20300.0</td>\n <td>18670.0</td>\n <td>3.7</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>16.0</td>\n <td>22.0</td>\n <td>3829.0</td>\n <td>131.0</td>\n <td>219.0</td>\n </tr>\n <tr>\n <th>30</th>\n <td>Ford</td>\n <td>Explorer XLT V6</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>29670.0</td>\n <td>26983.0</td>\n <td>4.0</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>15.0</td>\n <td>20.0</td>\n <td>4463.0</td>\n <td>114.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>31</th>\n <td>Ford</td>\n <td>Focus SE 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>15460.0</td>\n <td>14496.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>26.0</td>\n <td>33.0</td>\n <td>2606.0</td>\n <td>103.0</td>\n <td>168.0</td>\n </tr>\n <tr>\n <th>32</th>\n <td>Ford</td>\n <td>Taurus SES Duratec 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>22735.0</td>\n <td>20857.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>201.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3313.0</td>\n <td>109.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>33</th>\n <td>Ford</td>\n <td>Freestar SE</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>26930.0</td>\n <td>24498.0</td>\n <td>3.9</td>\n <td>6.0</td>\n <td>193.0</td>\n <td>17.0</td>\n <td>23.0</td>\n <td>4275.0</td>\n <td>121.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>34</th>\n <td>Ford</td>\n <td>F-150 Regular Cab XL</td>\n <td>Truck</td>\n <td>USA</td>\n <td>Rear</td>\n <td>22010.0</td>\n <td>19490.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>231.0</td>\n <td>15.0</td>\n <td>19.0</td>\n <td>4788.0</td>\n <td>126.0</td>\n <td>211.0</td>\n </tr>\n <tr>\n <th>35</th>\n <td>Ford</td>\n <td>Taurus SE</td>\n <td>Wagon</td>\n <td>USA</td>\n <td>Front</td>\n <td>22290.0</td>\n <td>20457.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>155.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3497.0</td>\n <td>109.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>36</th>\n <td>GMC</td>\n <td>Safari SLE</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>25640.0</td>\n <td>23215.0</td>\n <td>4.3</td>\n <td>6.0</td>\n <td>190.0</td>\n <td>16.0</td>\n <td>20.0</td>\n <td>4309.0</td>\n <td>111.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>37</th>\n <td>GMC</td>\n <td>Sonoma Crew Cab</td>\n <td>Truck</td>\n <td>USA</td>\n <td>All</td>\n <td>25395.0</td>\n <td>23043.0</td>\n <td>4.3</td>\n <td>6.0</td>\n <td>190.0</td>\n <td>15.0</td>\n <td>19.0</td>\n <td>4083.0</td>\n <td>123.0</td>\n <td>208.0</td>\n </tr>\n <tr>\n <th>38</th>\n <td>Honda</td>\n <td>CR-V LX</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>19860.0</td>\n <td>18419.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>21.0</td>\n <td>25.0</td>\n <td>3258.0</td>\n <td>103.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>39</th>\n <td>Honda</td>\n <td>Civic LX 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>15850.0</td>\n <td>14531.0</td>\n <td>1.7</td>\n <td>4.0</td>\n <td>115.0</td>\n <td>32.0</td>\n <td>38.0</td>\n <td>2513.0</td>\n <td>103.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>40</th>\n <td>Honda</td>\n <td>Civic Si 2dr hatch</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19490.0</td>\n <td>17849.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>26.0</td>\n <td>30.0</td>\n <td>2782.0</td>\n <td>101.0</td>\n <td>166.0</td>\n </tr>\n <tr>\n <th>41</th>\n <td>Honda</td>\n <td>Odyssey EX</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>27450.0</td>\n <td>24744.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>4365.0</td>\n <td>118.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>42</th>\n <td>Hyundai</td>\n <td>Accent 2dr hatch</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>10539.0</td>\n <td>10107.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>103.0</td>\n <td>29.0</td>\n <td>33.0</td>\n <td>2255.0</td>\n <td>96.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>43</th>\n <td>Hyundai</td>\n <td>Elantra GT 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>15389.0</td>\n <td>14207.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>138.0</td>\n <td>26.0</td>\n <td>34.0</td>\n <td>2635.0</td>\n <td>103.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>44</th>\n <td>Hyundai</td>\n <td>XG350 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>24589.0</td>\n <td>22055.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>194.0</td>\n <td>17.0</td>\n <td>26.0</td>\n <td>3651.0</td>\n <td>108.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>45</th>\n <td>Infiniti</td>\n <td>G35 Sport Coupe 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>29795.0</td>\n <td>27536.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>280.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3416.0</td>\n <td>112.0</td>\n <td>182.0</td>\n </tr>\n <tr>\n <th>46</th>\n <td>Infiniti</td>\n <td>Q45 Luxury 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>52545.0</td>\n <td>47575.0</td>\n <td>4.5</td>\n <td>8.0</td>\n <td>340.0</td>\n <td>17.0</td>\n <td>23.0</td>\n <td>3977.0</td>\n <td>113.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>47</th>\n <td>Isuzu</td>\n <td>Rodeo S</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>Front</td>\n <td>20449.0</td>\n <td>19261.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>193.0</td>\n <td>17.0</td>\n <td>21.0</td>\n <td>3836.0</td>\n <td>106.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>48</th>\n <td>Jaguar</td>\n <td>S-Type 4.2 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>49995.0</td>\n <td>45556.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>294.0</td>\n <td>18.0</td>\n <td>28.0</td>\n <td>3874.0</td>\n <td>115.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>49</th>\n <td>Jaguar</td>\n <td>XJR 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>74995.0</td>\n <td>68306.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>390.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>3948.0</td>\n <td>119.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>50</th>\n <td>Jaguar</td>\n <td>XKR convertible 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>86995.0</td>\n <td>79226.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>390.0</td>\n <td>16.0</td>\n <td>23.0</td>\n <td>4042.0</td>\n <td>102.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>51</th>\n <td>Kia</td>\n <td>Sorento LX</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19635.0</td>\n <td>18630.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>192.0</td>\n <td>16.0</td>\n <td>19.0</td>\n <td>4112.0</td>\n <td>107.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>52</th>\n <td>Kia</td>\n <td>Spectra 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>12360.0</td>\n <td>11630.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>124.0</td>\n <td>24.0</td>\n <td>32.0</td>\n <td>2661.0</td>\n <td>101.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>53</th>\n <td>Kia</td>\n <td>Amanti 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>26000.0</td>\n <td>23764.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>195.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4021.0</td>\n <td>110.0</td>\n <td>196.0</td>\n </tr>\n <tr>\n <th>54</th>\n <td>Land Rover</td>\n <td>Discovery SE</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>39250.0</td>\n <td>35777.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>217.0</td>\n <td>12.0</td>\n <td>16.0</td>\n <td>4576.0</td>\n <td>100.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>55</th>\n <td>Lexus</td>\n <td>RX 330</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>39195.0</td>\n <td>34576.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>230.0</td>\n <td>18.0</td>\n <td>24.0</td>\n <td>4065.0</td>\n <td>107.0</td>\n <td>186.0</td>\n </tr>\n <tr>\n <th>56</th>\n <td>Lexus</td>\n <td>GS 300 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>41010.0</td>\n <td>36196.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3649.0</td>\n <td>110.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>57</th>\n <td>Lexus</td>\n <td>IS 300 SportCross</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>32455.0</td>\n <td>28647.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>18.0</td>\n <td>24.0</td>\n <td>3410.0</td>\n <td>105.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>58</th>\n <td>Lincoln</td>\n <td>LS V6 Premium 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>36895.0</td>\n <td>33929.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>232.0</td>\n <td>20.0</td>\n <td>26.0</td>\n <td>3681.0</td>\n <td>115.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>59</th>\n <td>Lincoln</td>\n <td>Town Car Ultimate 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>44925.0</td>\n <td>41217.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>239.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4369.0</td>\n <td>118.0</td>\n <td>215.0</td>\n </tr>\n <tr>\n <th>60</th>\n <td>Mazda</td>\n <td>Tribute DX 2.0</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>21087.0</td>\n <td>19742.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>22.0</td>\n <td>25.0</td>\n <td>3091.0</td>\n <td>103.0</td>\n <td>173.0</td>\n </tr>\n <tr>\n <th>61</th>\n <td>Mazda</td>\n <td>MPV ES</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>28750.0</td>\n <td>26600.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3812.0</td>\n <td>112.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>62</th>\n <td>Mazda</td>\n <td>RX-8 4dr manual</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>27200.0</td>\n <td>25179.0</td>\n <td>1.3</td>\n <td>NaN</td>\n <td>238.0</td>\n <td>18.0</td>\n <td>24.0</td>\n <td>3029.0</td>\n <td>106.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>63</th>\n <td>Mercedes-Benz</td>\n <td>ML500</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>46470.0</td>\n <td>43268.0</td>\n <td>5.0</td>\n <td>8.0</td>\n <td>288.0</td>\n <td>14.0</td>\n <td>17.0</td>\n <td>4874.0</td>\n <td>111.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>64</th>\n <td>Mercedes-Benz</td>\n <td>C240 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>33480.0</td>\n <td>31187.0</td>\n <td>2.6</td>\n <td>6.0</td>\n <td>168.0</td>\n <td>19.0</td>\n <td>25.0</td>\n <td>3360.0</td>\n <td>107.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>65</th>\n <td>Mercedes-Benz</td>\n <td>C32 AMG 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>52120.0</td>\n <td>48522.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>349.0</td>\n <td>16.0</td>\n <td>21.0</td>\n <td>3540.0</td>\n <td>107.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>66</th>\n <td>Mercedes-Benz</td>\n <td>CLK500 coupe 2dr (convertible)</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>52800.0</td>\n <td>49104.0</td>\n <td>5.0</td>\n <td>8.0</td>\n <td>302.0</td>\n <td>17.0</td>\n <td>22.0</td>\n <td>3585.0</td>\n <td>107.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>67</th>\n <td>Mercedes-Benz</td>\n <td>S500 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>86970.0</td>\n <td>80939.0</td>\n <td>5.0</td>\n <td>8.0</td>\n <td>302.0</td>\n <td>16.0</td>\n <td>24.0</td>\n <td>4390.0</td>\n <td>122.0</td>\n <td>203.0</td>\n </tr>\n <tr>\n <th>68</th>\n <td>Mercedes-Benz</td>\n <td>SLK230 convertible 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>40320.0</td>\n <td>37548.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>192.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3055.0</td>\n <td>95.0</td>\n <td>158.0</td>\n </tr>\n <tr>\n <th>69</th>\n <td>Mercedes-Benz</td>\n <td>E500</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>All</td>\n <td>60670.0</td>\n <td>56474.0</td>\n <td>5.0</td>\n <td>8.0</td>\n <td>302.0</td>\n <td>16.0</td>\n <td>24.0</td>\n <td>4230.0</td>\n <td>112.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>70</th>\n <td>Mercury</td>\n <td>Grand Marquis LS Premium 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>29595.0</td>\n <td>27148.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>224.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4052.0</td>\n <td>115.0</td>\n <td>212.0</td>\n </tr>\n <tr>\n <th>71</th>\n <td>Mercury</td>\n <td>Monterey Luxury</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>33995.0</td>\n <td>30846.0</td>\n <td>4.2</td>\n <td>6.0</td>\n <td>201.0</td>\n <td>16.0</td>\n <td>23.0</td>\n <td>4340.0</td>\n <td>121.0</td>\n <td>202.0</td>\n </tr>\n <tr>\n <th>72</th>\n <td>Mitsubishi</td>\n <td>Outlander LS</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>Front</td>\n <td>18892.0</td>\n <td>17569.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>21.0</td>\n <td>27.0</td>\n <td>3240.0</td>\n <td>103.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>73</th>\n <td>Mitsubishi</td>\n <td>Lancer OZ Rally 4dr auto</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>17232.0</td>\n <td>16196.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>120.0</td>\n <td>25.0</td>\n <td>31.0</td>\n <td>2744.0</td>\n <td>102.0</td>\n <td>181.0</td>\n </tr>\n <tr>\n <th>74</th>\n <td>Mitsubishi</td>\n <td>Eclipse Spyder GT convertible 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Front</td>\n <td>26992.0</td>\n <td>25218.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3296.0</td>\n <td>101.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>75</th>\n <td>Nissan</td>\n <td>Pathfinder SE</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>Front</td>\n <td>27339.0</td>\n <td>25972.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>16.0</td>\n <td>21.0</td>\n <td>3871.0</td>\n <td>106.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>76</th>\n <td>Nissan</td>\n <td>Altima S 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19240.0</td>\n <td>18030.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>175.0</td>\n <td>21.0</td>\n <td>26.0</td>\n <td>3039.0</td>\n <td>110.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>77</th>\n <td>Nissan</td>\n <td>Maxima SL 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>29440.0</td>\n <td>26966.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>265.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3476.0</td>\n <td>111.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>78</th>\n <td>Nissan</td>\n <td>350Z Enthusiast convertible 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>34390.0</td>\n <td>31845.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>287.0</td>\n <td>20.0</td>\n <td>26.0</td>\n <td>3428.0</td>\n <td>104.0</td>\n <td>169.0</td>\n </tr>\n <tr>\n <th>79</th>\n <td>Oldsmobile</td>\n <td>Alero GX 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>18825.0</td>\n <td>17642.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>24.0</td>\n <td>32.0</td>\n <td>2946.0</td>\n <td>107.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>80</th>\n <td>Pontiac</td>\n <td>Sunfire 1SA 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>15495.0</td>\n <td>14375.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>24.0</td>\n <td>33.0</td>\n <td>2771.0</td>\n <td>104.0</td>\n <td>182.0</td>\n </tr>\n <tr>\n <th>81</th>\n <td>Pontiac</td>\n <td>Grand Prix GT2 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>24295.0</td>\n <td>22284.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>20.0</td>\n <td>30.0</td>\n <td>3484.0</td>\n <td>111.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>82</th>\n <td>Pontiac</td>\n <td>GTO 2dr</td>\n <td>Sports</td>\n <td>USA</td>\n <td>Rear</td>\n <td>33500.0</td>\n <td>30710.0</td>\n <td>5.7</td>\n <td>8.0</td>\n <td>340.0</td>\n <td>16.0</td>\n <td>20.0</td>\n <td>3725.0</td>\n <td>110.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>83</th>\n <td>Porsche</td>\n <td>911 Carrera 4S coupe 2dr (convert)</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>All</td>\n <td>84165.0</td>\n <td>72206.0</td>\n <td>3.6</td>\n <td>6.0</td>\n <td>315.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>3240.0</td>\n <td>93.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>84</th>\n <td>Porsche</td>\n <td>Boxster S convertible 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>52365.0</td>\n <td>45766.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>258.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>2911.0</td>\n <td>95.0</td>\n <td>170.0</td>\n </tr>\n <tr>\n <th>85</th>\n <td>Saab</td>\n <td>9-5 Aero 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>39465.0</td>\n <td>37721.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>250.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3470.0</td>\n <td>106.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>86</th>\n <td>Saturn</td>\n <td>VUE</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>20585.0</td>\n <td>19238.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>143.0</td>\n <td>21.0</td>\n <td>26.0</td>\n <td>3381.0</td>\n <td>107.0</td>\n <td>181.0</td>\n </tr>\n <tr>\n <th>87</th>\n <td>Saturn</td>\n <td>lon2 quad coupe 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>14850.0</td>\n <td>13904.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>26.0</td>\n <td>35.0</td>\n <td>2751.0</td>\n <td>103.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>88</th>\n <td>Scion</td>\n <td>xA 4dr hatch</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>12965.0</td>\n <td>12340.0</td>\n <td>1.5</td>\n <td>4.0</td>\n <td>108.0</td>\n <td>32.0</td>\n <td>38.0</td>\n <td>2340.0</td>\n <td>93.0</td>\n <td>154.0</td>\n </tr>\n <tr>\n <th>89</th>\n <td>Subaru</td>\n <td>Legacy GT 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>All</td>\n <td>25645.0</td>\n <td>23336.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>165.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3395.0</td>\n <td>104.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>90</th>\n <td>Subaru</td>\n <td>Impreza WRX 4dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>All</td>\n <td>25045.0</td>\n <td>23022.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>227.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3085.0</td>\n <td>99.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>91</th>\n <td>Subaru</td>\n <td>Outback</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>All</td>\n <td>23895.0</td>\n <td>21773.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>165.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3430.0</td>\n <td>104.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>92</th>\n <td>Suzuki</td>\n <td>Aerio LX 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>14500.0</td>\n <td>14317.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>155.0</td>\n <td>25.0</td>\n <td>31.0</td>\n <td>2676.0</td>\n <td>98.0</td>\n <td>171.0</td>\n </tr>\n <tr>\n <th>93</th>\n <td>Suzuki</td>\n <td>Aerio SX</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>All</td>\n <td>16497.0</td>\n <td>16291.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>155.0</td>\n <td>24.0</td>\n <td>29.0</td>\n <td>2932.0</td>\n <td>98.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>94</th>\n <td>Toyota</td>\n <td>Highlander V6</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>27930.0</td>\n <td>24915.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>230.0</td>\n <td>18.0</td>\n <td>24.0</td>\n <td>3935.0</td>\n <td>107.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>95</th>\n <td>Toyota</td>\n <td>Corolla S 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>15030.0</td>\n <td>13650.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>32.0</td>\n <td>40.0</td>\n <td>2524.0</td>\n <td>102.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>96</th>\n <td>Toyota</td>\n <td>Echo 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>11290.0</td>\n <td>10642.0</td>\n <td>1.5</td>\n <td>4.0</td>\n <td>108.0</td>\n <td>35.0</td>\n <td>43.0</td>\n <td>2055.0</td>\n <td>93.0</td>\n <td>163.0</td>\n </tr>\n <tr>\n <th>97</th>\n <td>Toyota</td>\n <td>Camry Solara SE V6 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>21965.0</td>\n <td>19819.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3417.0</td>\n <td>107.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>98</th>\n <td>Toyota</td>\n <td>Avalon XLS 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>30920.0</td>\n <td>27271.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3439.0</td>\n <td>107.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>99</th>\n <td>Toyota</td>\n <td>MR2 Spyder convertible 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>25130.0</td>\n <td>22787.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>138.0</td>\n <td>26.0</td>\n <td>32.0</td>\n <td>2195.0</td>\n <td>97.0</td>\n <td>153.0</td>\n </tr>\n <tr>\n <th>100</th>\n <td>Toyota</td>\n <td>Matrix XR</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>Front</td>\n <td>16695.0</td>\n <td>15156.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>29.0</td>\n <td>36.0</td>\n <td>2679.0</td>\n <td>102.0</td>\n <td>171.0</td>\n </tr>\n <tr>\n <th>101</th>\n <td>Volkswagen</td>\n <td>Jetta GLS TDI 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>21055.0</td>\n <td>19638.0</td>\n <td>1.9</td>\n <td>4.0</td>\n <td>100.0</td>\n <td>38.0</td>\n <td>46.0</td>\n <td>3003.0</td>\n <td>99.0</td>\n <td>172.0</td>\n </tr>\n <tr>\n <th>102</th>\n <td>Volkswagen</td>\n <td>Passat GLS 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>23955.0</td>\n <td>21898.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>170.0</td>\n <td>22.0</td>\n <td>31.0</td>\n <td>3241.0</td>\n <td>106.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>103</th>\n <td>Volkswagen</td>\n <td>Phaeton W12 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>75000.0</td>\n <td>69130.0</td>\n <td>6.0</td>\n <td>12.0</td>\n <td>420.0</td>\n <td>12.0</td>\n <td>19.0</td>\n <td>5399.0</td>\n <td>118.0</td>\n <td>204.0</td>\n </tr>\n <tr>\n <th>104</th>\n <td>Volvo</td>\n <td>XC90 T6</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>41250.0</td>\n <td>38851.0</td>\n <td>2.9</td>\n <td>6.0</td>\n <td>268.0</td>\n <td>15.0</td>\n <td>20.0</td>\n <td>4638.0</td>\n <td>113.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>105</th>\n <td>Volvo</td>\n <td>S60 R 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>37560.0</td>\n <td>35382.0</td>\n <td>2.5</td>\n <td>5.0</td>\n <td>300.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3571.0</td>\n <td>107.0</td>\n <td>181.0</td>\n </tr>\n <tr>\n <th>106</th>\n <td>Volvo</td>\n <td>C70 HPT convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>42565.0</td>\n <td>40083.0</td>\n <td>2.3</td>\n <td>5.0</td>\n <td>242.0</td>\n <td>20.0</td>\n <td>26.0</td>\n <td>3450.0</td>\n <td>105.0</td>\n <td>186.0</td>\n </tr>\n <tr>\n <th>107</th>\n <td>Acura</td>\n <td>RSX Type S 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>23820.0</td>\n <td>21761.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>200.0</td>\n <td>24.0</td>\n <td>31.0</td>\n <td>2778.0</td>\n <td>101.0</td>\n <td>172.0</td>\n </tr>\n <tr>\n <th>108</th>\n <td>Acura</td>\n <td>3.5 RL w/Navigation 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>46100.0</td>\n <td>41100.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>18.0</td>\n <td>24.0</td>\n <td>3893.0</td>\n <td>115.0</td>\n <td>197.0</td>\n </tr>\n <tr>\n <th>109</th>\n <td>Audi</td>\n <td>A4 3.0 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>31840.0</td>\n <td>28846.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3462.0</td>\n <td>104.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>110</th>\n <td>Audi</td>\n <td>A6 3.0 Quattro 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>39640.0</td>\n <td>35992.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3880.0</td>\n <td>109.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>111</th>\n <td>Audi</td>\n <td>A6 4.2 Quattro 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>49690.0</td>\n <td>44936.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>300.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>4024.0</td>\n <td>109.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>112</th>\n <td>Audi</td>\n <td>TT 1.8 convertible 2dr (coupe)</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Front</td>\n <td>35940.0</td>\n <td>32512.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>180.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3131.0</td>\n <td>95.0</td>\n <td>159.0</td>\n </tr>\n <tr>\n <th>113</th>\n <td>Audi</td>\n <td>S4 Avant Quattro</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>All</td>\n <td>49090.0</td>\n <td>44446.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>340.0</td>\n <td>15.0</td>\n <td>21.0</td>\n <td>3936.0</td>\n <td>104.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>114</th>\n <td>BMW</td>\n <td>325Ci 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>30795.0</td>\n <td>28245.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>184.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3197.0</td>\n <td>107.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>115</th>\n <td>BMW</td>\n <td>330Ci 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>36995.0</td>\n <td>33890.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>20.0</td>\n <td>30.0</td>\n <td>3285.0</td>\n <td>107.0</td>\n <td>176.0</td>\n </tr>\n <tr>\n <th>116</th>\n <td>BMW</td>\n <td>530i 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>44995.0</td>\n <td>41170.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>20.0</td>\n <td>30.0</td>\n <td>3472.0</td>\n <td>114.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>117</th>\n <td>BMW</td>\n <td>M3 coupe 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>48195.0</td>\n <td>44170.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>333.0</td>\n <td>16.0</td>\n <td>24.0</td>\n <td>3415.0</td>\n <td>108.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>118</th>\n <td>BMW</td>\n <td>325xi Sport</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>All</td>\n <td>32845.0</td>\n <td>30110.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>184.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3594.0</td>\n <td>107.0</td>\n <td>176.0</td>\n </tr>\n <tr>\n <th>119</th>\n <td>Buick</td>\n <td>LeSabre Custom 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>26470.0</td>\n <td>24282.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>205.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3567.0</td>\n <td>112.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>120</th>\n <td>Buick</td>\n <td>Park Avenue 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>35545.0</td>\n <td>32244.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>205.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3778.0</td>\n <td>114.0</td>\n <td>207.0</td>\n </tr>\n <tr>\n <th>121</th>\n <td>Cadillac</td>\n <td>CTS VVT 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>30835.0</td>\n <td>28575.0</td>\n <td>3.6</td>\n <td>6.0</td>\n <td>255.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3694.0</td>\n <td>113.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>122</th>\n <td>Cadillac</td>\n <td>XLR convertible 2dr</td>\n <td>Sports</td>\n <td>USA</td>\n <td>Rear</td>\n <td>76200.0</td>\n <td>70546.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>320.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>3647.0</td>\n <td>106.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>123</th>\n <td>Chevrolet</td>\n <td>TrailBlazer LT</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>30295.0</td>\n <td>27479.0</td>\n <td>4.2</td>\n <td>6.0</td>\n <td>275.0</td>\n <td>16.0</td>\n <td>21.0</td>\n <td>4425.0</td>\n <td>113.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>124</th>\n <td>Chevrolet</td>\n <td>Cavalier 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>14610.0</td>\n <td>13697.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>26.0</td>\n <td>37.0</td>\n <td>2617.0</td>\n <td>104.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>125</th>\n <td>Chevrolet</td>\n <td>Malibu 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>18995.0</td>\n <td>17434.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>145.0</td>\n <td>24.0</td>\n <td>34.0</td>\n <td>3174.0</td>\n <td>106.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>126</th>\n <td>Chevrolet</td>\n <td>Impala SS 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>27995.0</td>\n <td>25672.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>18.0</td>\n <td>28.0</td>\n <td>3606.0</td>\n <td>111.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>127</th>\n <td>Chevrolet</td>\n <td>Venture LS</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>27020.0</td>\n <td>24518.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>185.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3699.0</td>\n <td>112.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>128</th>\n <td>Chevrolet</td>\n <td>Colorado Z85</td>\n <td>Truck</td>\n <td>USA</td>\n <td>All</td>\n <td>18760.0</td>\n <td>17070.0</td>\n <td>2.8</td>\n <td>4.0</td>\n <td>175.0</td>\n <td>18.0</td>\n <td>23.0</td>\n <td>3623.0</td>\n <td>111.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>129</th>\n <td>Chevrolet</td>\n <td>Malibu Maxx LS</td>\n <td>Wagon</td>\n <td>USA</td>\n <td>Front</td>\n <td>22225.0</td>\n <td>20394.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>22.0</td>\n <td>30.0</td>\n <td>3458.0</td>\n <td>112.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>130</th>\n <td>Chrysler</td>\n <td>Sebring Touring 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>21840.0</td>\n <td>20284.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3222.0</td>\n <td>108.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>131</th>\n <td>Chrysler</td>\n <td>PT Cruiser GT 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>25955.0</td>\n <td>24172.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>220.0</td>\n <td>21.0</td>\n <td>27.0</td>\n <td>3217.0</td>\n <td>103.0</td>\n <td>169.0</td>\n </tr>\n <tr>\n <th>132</th>\n <td>Chrysler</td>\n <td>Town and Country LX</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>27490.0</td>\n <td>25371.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>180.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>4068.0</td>\n <td>119.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>133</th>\n <td>Dodge</td>\n <td>Durango SLT</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>32235.0</td>\n <td>29472.0</td>\n <td>4.7</td>\n <td>8.0</td>\n <td>230.0</td>\n <td>15.0</td>\n <td>21.0</td>\n <td>4987.0</td>\n <td>119.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>134</th>\n <td>Dodge</td>\n <td>Stratus SXT 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>18820.0</td>\n <td>17512.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>150.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3182.0</td>\n <td>108.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>135</th>\n <td>Dodge</td>\n <td>Grand Caravan SXT</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>All</td>\n <td>32660.0</td>\n <td>29812.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>4440.0</td>\n <td>119.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>136</th>\n <td>Dodge</td>\n <td>Ram 1500 Regular Cab ST</td>\n <td>Truck</td>\n <td>USA</td>\n <td>Rear</td>\n <td>20215.0</td>\n <td>18076.0</td>\n <td>3.7</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>16.0</td>\n <td>21.0</td>\n <td>4542.0</td>\n <td>121.0</td>\n <td>208.0</td>\n </tr>\n <tr>\n <th>137</th>\n <td>Ford</td>\n <td>Escape XLS</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>22515.0</td>\n <td>20907.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>201.0</td>\n <td>18.0</td>\n <td>23.0</td>\n <td>3346.0</td>\n <td>103.0</td>\n <td>173.0</td>\n </tr>\n <tr>\n <th>138</th>\n <td>Ford</td>\n <td>Focus ZX5 5dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>15580.0</td>\n <td>14607.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>26.0</td>\n <td>33.0</td>\n <td>2691.0</td>\n <td>103.0</td>\n <td>168.0</td>\n </tr>\n <tr>\n <th>139</th>\n <td>Ford</td>\n <td>Crown Victoria 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>24345.0</td>\n <td>22856.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>224.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4057.0</td>\n <td>115.0</td>\n <td>212.0</td>\n </tr>\n <tr>\n <th>140</th>\n <td>Ford</td>\n <td>Mustang 2dr (convertible)</td>\n <td>Sports</td>\n <td>USA</td>\n <td>Rear</td>\n <td>18345.0</td>\n <td>16943.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>193.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3290.0</td>\n <td>101.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>141</th>\n <td>Ford</td>\n <td>F-150 Supercab Lariat</td>\n <td>Truck</td>\n <td>USA</td>\n <td>All</td>\n <td>33540.0</td>\n <td>29405.0</td>\n <td>5.4</td>\n <td>8.0</td>\n <td>300.0</td>\n <td>14.0</td>\n <td>18.0</td>\n <td>5464.0</td>\n <td>133.0</td>\n <td>218.0</td>\n </tr>\n <tr>\n <th>142</th>\n <td>GMC</td>\n <td>Envoy XUV SLE</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>31890.0</td>\n <td>28922.0</td>\n <td>4.2</td>\n <td>6.0</td>\n <td>275.0</td>\n <td>15.0</td>\n <td>19.0</td>\n <td>4945.0</td>\n <td>129.0</td>\n <td>208.0</td>\n </tr>\n <tr>\n <th>143</th>\n <td>GMC</td>\n <td>Canyon Z85 SL Regular Cab</td>\n <td>Truck</td>\n <td>USA</td>\n <td>Rear</td>\n <td>16530.0</td>\n <td>14877.0</td>\n <td>2.8</td>\n <td>4.0</td>\n <td>175.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3351.0</td>\n <td>111.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>144</th>\n <td>Honda</td>\n <td>Civic Hybrid 4dr manual (gas/electric)</td>\n <td>Hybrid</td>\n <td>Asia</td>\n <td>Front</td>\n <td>20140.0</td>\n <td>18451.0</td>\n <td>1.4</td>\n <td>4.0</td>\n <td>93.0</td>\n <td>46.0</td>\n <td>51.0</td>\n <td>2732.0</td>\n <td>103.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>145</th>\n <td>Honda</td>\n <td>Element LX</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>18690.0</td>\n <td>17334.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>21.0</td>\n <td>24.0</td>\n <td>3468.0</td>\n <td>101.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>146</th>\n <td>Honda</td>\n <td>Accord LX 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19860.0</td>\n <td>17924.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>26.0</td>\n <td>34.0</td>\n <td>2994.0</td>\n <td>105.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>147</th>\n <td>Honda</td>\n <td>Accord LX V6 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>23760.0</td>\n <td>21428.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>21.0</td>\n <td>30.0</td>\n <td>3349.0</td>\n <td>108.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>148</th>\n <td>Honda</td>\n <td>S2000 convertible 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>33260.0</td>\n <td>29965.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>240.0</td>\n <td>20.0</td>\n <td>25.0</td>\n <td>2835.0</td>\n <td>95.0</td>\n <td>162.0</td>\n </tr>\n <tr>\n <th>149</th>\n <td>Hyundai</td>\n <td>Accent GL 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>11839.0</td>\n <td>11116.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>103.0</td>\n <td>29.0</td>\n <td>33.0</td>\n <td>2290.0</td>\n <td>96.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>150</th>\n <td>Hyundai</td>\n <td>Elantra GT 4dr hatch</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>15389.0</td>\n <td>14207.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>138.0</td>\n <td>26.0</td>\n <td>34.0</td>\n <td>2698.0</td>\n <td>103.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>151</th>\n <td>Hyundai</td>\n <td>XG350 L 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>26189.0</td>\n <td>23486.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>194.0</td>\n <td>17.0</td>\n <td>26.0</td>\n <td>3651.0</td>\n <td>108.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>152</th>\n <td>Infiniti</td>\n <td>G35 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>All</td>\n <td>32445.0</td>\n <td>29783.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>260.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3677.0</td>\n <td>112.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>153</th>\n <td>Infiniti</td>\n <td>FX35</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>34895.0</td>\n <td>31756.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>280.0</td>\n <td>16.0</td>\n <td>22.0</td>\n <td>4056.0</td>\n <td>112.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>154</th>\n <td>Jaguar</td>\n <td>X-Type 2.5 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>29995.0</td>\n <td>27355.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>192.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3428.0</td>\n <td>107.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>155</th>\n <td>Jaguar</td>\n <td>S-Type R 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>63120.0</td>\n <td>57499.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>390.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>4046.0</td>\n <td>115.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>156</th>\n <td>Jaguar</td>\n <td>XK8 coupe 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>69995.0</td>\n <td>63756.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>294.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3779.0</td>\n <td>102.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>157</th>\n <td>Jeep</td>\n <td>Grand Cherokee Laredo</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>27905.0</td>\n <td>25686.0</td>\n <td>4.0</td>\n <td>6.0</td>\n <td>195.0</td>\n <td>16.0</td>\n <td>21.0</td>\n <td>3790.0</td>\n <td>106.0</td>\n <td>181.0</td>\n </tr>\n <tr>\n <th>158</th>\n <td>Kia</td>\n <td>Optima LX 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>16040.0</td>\n <td>14910.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>138.0</td>\n <td>23.0</td>\n <td>30.0</td>\n <td>3281.0</td>\n <td>106.0</td>\n <td>186.0</td>\n </tr>\n <tr>\n <th>159</th>\n <td>Kia</td>\n <td>Spectra GS 4dr hatch</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>13580.0</td>\n <td>12830.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>124.0</td>\n <td>24.0</td>\n <td>32.0</td>\n <td>2686.0</td>\n <td>101.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>160</th>\n <td>Kia</td>\n <td>Sedona LX</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>20615.0</td>\n <td>19400.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>195.0</td>\n <td>16.0</td>\n <td>22.0</td>\n <td>4802.0</td>\n <td>115.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>161</th>\n <td>Land Rover</td>\n <td>Freelander SE</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>25995.0</td>\n <td>23969.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>174.0</td>\n <td>18.0</td>\n <td>21.0</td>\n <td>3577.0</td>\n <td>101.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>162</th>\n <td>Lexus</td>\n <td>ES 330 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>32350.0</td>\n <td>28755.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3460.0</td>\n <td>107.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>163</th>\n <td>Lexus</td>\n <td>GS 430 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>48450.0</td>\n <td>42232.0</td>\n <td>4.3</td>\n <td>8.0</td>\n <td>300.0</td>\n <td>18.0</td>\n <td>23.0</td>\n <td>3715.0</td>\n <td>110.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>164</th>\n <td>Lincoln</td>\n <td>Navigator Luxury</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>52775.0</td>\n <td>46360.0</td>\n <td>5.4</td>\n <td>8.0</td>\n <td>300.0</td>\n <td>13.0</td>\n <td>18.0</td>\n <td>5969.0</td>\n <td>119.0</td>\n <td>206.0</td>\n </tr>\n <tr>\n <th>165</th>\n <td>Lincoln</td>\n <td>LS V8 Sport 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>40095.0</td>\n <td>36809.0</td>\n <td>3.9</td>\n <td>8.0</td>\n <td>280.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>3768.0</td>\n <td>115.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>166</th>\n <td>Lincoln</td>\n <td>Town Car Ultimate L 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>50470.0</td>\n <td>46208.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>239.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4474.0</td>\n <td>124.0</td>\n <td>221.0</td>\n </tr>\n <tr>\n <th>167</th>\n <td>Mazda</td>\n <td>Mazda3 i 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>15500.0</td>\n <td>14525.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>148.0</td>\n <td>26.0</td>\n <td>34.0</td>\n <td>2696.0</td>\n <td>104.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>168</th>\n <td>Mazda</td>\n <td>MX-5 Miata convertible 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>22388.0</td>\n <td>20701.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>142.0</td>\n <td>23.0</td>\n <td>28.0</td>\n <td>2387.0</td>\n <td>89.0</td>\n <td>156.0</td>\n </tr>\n <tr>\n <th>169</th>\n <td>Mazda</td>\n <td>B2300 SX Regular Cab</td>\n <td>Truck</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>14840.0</td>\n <td>14070.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>143.0</td>\n <td>24.0</td>\n <td>29.0</td>\n <td>2960.0</td>\n <td>112.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>170</th>\n <td>Mercedes-Benz</td>\n <td>C230 Sport 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>26060.0</td>\n <td>24249.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>189.0</td>\n <td>22.0</td>\n <td>30.0</td>\n <td>3250.0</td>\n <td>107.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>171</th>\n <td>Mercedes-Benz</td>\n <td>C320 Sport 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>35920.0</td>\n <td>33456.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3430.0</td>\n <td>107.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>172</th>\n <td>Mercedes-Benz</td>\n <td>CL500 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>94820.0</td>\n <td>88324.0</td>\n <td>5.0</td>\n <td>8.0</td>\n <td>302.0</td>\n <td>16.0</td>\n <td>24.0</td>\n <td>4085.0</td>\n <td>114.0</td>\n <td>196.0</td>\n </tr>\n <tr>\n <th>173</th>\n <td>Mercedes-Benz</td>\n <td>E320 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>48170.0</td>\n <td>44849.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>221.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>3635.0</td>\n <td>112.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>174</th>\n <td>Mercedes-Benz</td>\n <td>SL500 convertible 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>90520.0</td>\n <td>84325.0</td>\n <td>5.0</td>\n <td>8.0</td>\n <td>302.0</td>\n <td>16.0</td>\n <td>23.0</td>\n <td>4065.0</td>\n <td>101.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>175</th>\n <td>Mercedes-Benz</td>\n <td>SLK32 AMG 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>56170.0</td>\n <td>52289.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>349.0</td>\n <td>17.0</td>\n <td>22.0</td>\n <td>3220.0</td>\n <td>95.0</td>\n <td>158.0</td>\n </tr>\n <tr>\n <th>176</th>\n <td>Mercury</td>\n <td>Mountaineer</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>29995.0</td>\n <td>27317.0</td>\n <td>4.0</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>16.0</td>\n <td>21.0</td>\n <td>4374.0</td>\n <td>114.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>177</th>\n <td>Mercury</td>\n <td>Sable LS Premium 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>23895.0</td>\n <td>21918.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>201.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3315.0</td>\n <td>109.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>178</th>\n <td>Mercury</td>\n <td>Sable GS</td>\n <td>Wagon</td>\n <td>USA</td>\n <td>Front</td>\n <td>22595.0</td>\n <td>20748.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>155.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3488.0</td>\n <td>109.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>179</th>\n <td>Mitsubishi</td>\n <td>Lancer ES 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>14622.0</td>\n <td>13751.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>120.0</td>\n <td>25.0</td>\n <td>31.0</td>\n <td>2656.0</td>\n <td>102.0</td>\n <td>181.0</td>\n </tr>\n <tr>\n <th>180</th>\n <td>Mitsubishi</td>\n <td>Diamante LS 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>29282.0</td>\n <td>27250.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>205.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3549.0</td>\n <td>107.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>181</th>\n <td>Mitsubishi</td>\n <td>Lancer Evolution 4dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Front</td>\n <td>29562.0</td>\n <td>27466.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>271.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3263.0</td>\n <td>103.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>182</th>\n <td>Nissan</td>\n <td>Xterra XE V6</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>Front</td>\n <td>20939.0</td>\n <td>19512.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>180.0</td>\n <td>17.0</td>\n <td>20.0</td>\n <td>3760.0</td>\n <td>104.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>183</th>\n <td>Nissan</td>\n <td>Sentra SE-R 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>17640.0</td>\n <td>16444.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>165.0</td>\n <td>23.0</td>\n <td>28.0</td>\n <td>2761.0</td>\n <td>100.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>184</th>\n <td>Nissan</td>\n <td>Quest S</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>24780.0</td>\n <td>22958.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>4012.0</td>\n <td>124.0</td>\n <td>204.0</td>\n </tr>\n <tr>\n <th>185</th>\n <td>Nissan</td>\n <td>Frontier King Cab XE V6</td>\n <td>Truck</td>\n <td>Asia</td>\n <td>All</td>\n <td>19479.0</td>\n <td>18253.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>180.0</td>\n <td>17.0</td>\n <td>20.0</td>\n <td>3932.0</td>\n <td>116.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>186</th>\n <td>Oldsmobile</td>\n <td>Alero GLS 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>23675.0</td>\n <td>21485.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>170.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3085.0</td>\n <td>107.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>187</th>\n <td>Pontiac</td>\n <td>Grand Am GT 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>22450.0</td>\n <td>20595.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>175.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3118.0</td>\n <td>107.0</td>\n <td>186.0</td>\n </tr>\n <tr>\n <th>188</th>\n <td>Pontiac</td>\n <td>Bonneville GXP 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>35995.0</td>\n <td>32997.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>275.0</td>\n <td>17.0</td>\n <td>20.0</td>\n <td>3790.0</td>\n <td>112.0</td>\n <td>203.0</td>\n </tr>\n <tr>\n <th>189</th>\n <td>Pontiac</td>\n <td>Vibe</td>\n <td>Wagon</td>\n <td>USA</td>\n <td>Rear</td>\n <td>17045.0</td>\n <td>15973.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>29.0</td>\n <td>36.0</td>\n <td>2701.0</td>\n <td>102.0</td>\n <td>172.0</td>\n </tr>\n <tr>\n <th>190</th>\n <td>Porsche</td>\n <td>911 Targa coupe 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>76765.0</td>\n <td>67128.0</td>\n <td>3.6</td>\n <td>6.0</td>\n <td>315.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3119.0</td>\n <td>93.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>191</th>\n <td>Saab</td>\n <td>9-3 Arc Sport 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>30860.0</td>\n <td>29269.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>210.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3175.0</td>\n <td>105.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>192</th>\n <td>Saab</td>\n <td>9-3 Arc convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>40670.0</td>\n <td>38520.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>210.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3480.0</td>\n <td>105.0</td>\n <td>182.0</td>\n </tr>\n <tr>\n <th>193</th>\n <td>Saturn</td>\n <td>Ion1 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>10995.0</td>\n <td>10319.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>26.0</td>\n <td>35.0</td>\n <td>2692.0</td>\n <td>103.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>194</th>\n <td>Saturn</td>\n <td>lon3 quad coupe 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>16350.0</td>\n <td>15299.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>26.0</td>\n <td>35.0</td>\n <td>2751.0</td>\n <td>103.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>195</th>\n <td>Scion</td>\n <td>xB</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>Front</td>\n <td>14165.0</td>\n <td>13480.0</td>\n <td>1.5</td>\n <td>4.0</td>\n <td>108.0</td>\n <td>31.0</td>\n <td>35.0</td>\n <td>2425.0</td>\n <td>98.0</td>\n <td>155.0</td>\n </tr>\n <tr>\n <th>196</th>\n <td>Subaru</td>\n <td>Outback Limited Sedan 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>All</td>\n <td>27145.0</td>\n <td>24687.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>165.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3495.0</td>\n <td>104.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>197</th>\n <td>Subaru</td>\n <td>Impreza WRX STi 4dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>All</td>\n <td>31545.0</td>\n <td>29130.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>300.0</td>\n <td>18.0</td>\n <td>24.0</td>\n <td>3263.0</td>\n <td>100.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>198</th>\n <td>Suzuki</td>\n <td>XL-7 EX</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>Front</td>\n <td>23699.0</td>\n <td>22307.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>185.0</td>\n <td>18.0</td>\n <td>22.0</td>\n <td>3682.0</td>\n <td>110.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>199</th>\n <td>Suzuki</td>\n <td>Forenza S 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>12269.0</td>\n <td>12116.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>119.0</td>\n <td>24.0</td>\n <td>31.0</td>\n <td>2701.0</td>\n <td>102.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>200</th>\n <td>Toyota</td>\n <td>Prius 4dr (gas/electric)</td>\n <td>Hybrid</td>\n <td>Asia</td>\n <td>Front</td>\n <td>20510.0</td>\n <td>18926.0</td>\n <td>1.5</td>\n <td>4.0</td>\n <td>110.0</td>\n <td>59.0</td>\n <td>51.0</td>\n <td>2890.0</td>\n <td>106.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>201</th>\n <td>Toyota</td>\n <td>Land Cruiser</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>54765.0</td>\n <td>47986.0</td>\n <td>4.7</td>\n <td>8.0</td>\n <td>325.0</td>\n <td>13.0</td>\n <td>17.0</td>\n <td>5390.0</td>\n <td>112.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>202</th>\n <td>Toyota</td>\n <td>Corolla LE 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>15295.0</td>\n <td>13889.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>32.0</td>\n <td>40.0</td>\n <td>2524.0</td>\n <td>102.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>203</th>\n <td>Toyota</td>\n <td>Camry LE 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19560.0</td>\n <td>17558.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>157.0</td>\n <td>24.0</td>\n <td>33.0</td>\n <td>3086.0</td>\n <td>107.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>204</th>\n <td>Toyota</td>\n <td>Avalon XL 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>26560.0</td>\n <td>23693.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3417.0</td>\n <td>107.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>205</th>\n <td>Toyota</td>\n <td>Sienna CE</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>23495.0</td>\n <td>21198.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>230.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>4120.0</td>\n <td>119.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>206</th>\n <td>Toyota</td>\n <td>Tacoma</td>\n <td>Truck</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>12800.0</td>\n <td>11879.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>142.0</td>\n <td>22.0</td>\n <td>27.0</td>\n <td>2750.0</td>\n <td>103.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>207</th>\n <td>Volkswagen</td>\n <td>Touareg V6</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>35515.0</td>\n <td>32243.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>15.0</td>\n <td>20.0</td>\n <td>5086.0</td>\n <td>112.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>208</th>\n <td>Volkswagen</td>\n <td>New Beetle GLS 1.8T 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>21055.0</td>\n <td>19638.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>150.0</td>\n <td>24.0</td>\n <td>31.0</td>\n <td>2820.0</td>\n <td>99.0</td>\n <td>161.0</td>\n </tr>\n <tr>\n <th>209</th>\n <td>Volkswagen</td>\n <td>Passat GLX V6 4MOTION 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>33180.0</td>\n <td>30583.0</td>\n <td>2.8</td>\n <td>6.0</td>\n <td>190.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3721.0</td>\n <td>106.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>210</th>\n <td>Volkswagen</td>\n <td>Jetta GL</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>Front</td>\n <td>19005.0</td>\n <td>17427.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>115.0</td>\n <td>24.0</td>\n <td>30.0</td>\n <td>3034.0</td>\n <td>99.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>211</th>\n <td>Volvo</td>\n <td>S40 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>25135.0</td>\n <td>23701.0</td>\n <td>1.9</td>\n <td>4.0</td>\n <td>170.0</td>\n <td>22.0</td>\n <td>29.0</td>\n <td>2767.0</td>\n <td>101.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>212</th>\n <td>Volvo</td>\n <td>S80 2.9 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>37730.0</td>\n <td>35542.0</td>\n <td>2.9</td>\n <td>6.0</td>\n <td>208.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3576.0</td>\n <td>110.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>213</th>\n <td>Volvo</td>\n <td>S80 T6 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>45210.0</td>\n <td>42573.0</td>\n <td>2.9</td>\n <td>6.0</td>\n <td>268.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3653.0</td>\n <td>110.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>214</th>\n <td>Acura</td>\n <td>TSX 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>26990.0</td>\n <td>24647.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>200.0</td>\n <td>22.0</td>\n <td>29.0</td>\n <td>3230.0</td>\n <td>105.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>215</th>\n <td>Acura</td>\n <td>NSX coupe 2dr manual S</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>89765.0</td>\n <td>79978.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>290.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>3153.0</td>\n <td>100.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>216</th>\n <td>Audi</td>\n <td>A4 3.0 Quattro 4dr manual</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>33430.0</td>\n <td>30366.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>17.0</td>\n <td>26.0</td>\n <td>3583.0</td>\n <td>104.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>217</th>\n <td>Audi</td>\n <td>A4 3.0 convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>42490.0</td>\n <td>38325.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3814.0</td>\n <td>105.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>218</th>\n <td>Audi</td>\n <td>A8 L Quattro 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>69190.0</td>\n <td>64740.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>330.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>4399.0</td>\n <td>121.0</td>\n <td>204.0</td>\n </tr>\n <tr>\n <th>219</th>\n <td>Audi</td>\n <td>TT 1.8 Quattro 2dr (convertible)</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>All</td>\n <td>37390.0</td>\n <td>33891.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>225.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>2921.0</td>\n <td>96.0</td>\n <td>159.0</td>\n </tr>\n <tr>\n <th>220</th>\n <td>BMW</td>\n <td>X3 3.0i</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>37000.0</td>\n <td>33873.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>16.0</td>\n <td>23.0</td>\n <td>4023.0</td>\n <td>110.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>221</th>\n <td>BMW</td>\n <td>325Ci convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>37995.0</td>\n <td>34800.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>184.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>3560.0</td>\n <td>107.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>222</th>\n <td>BMW</td>\n <td>330xi 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>37245.0</td>\n <td>34115.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3483.0</td>\n <td>107.0</td>\n <td>176.0</td>\n </tr>\n <tr>\n <th>223</th>\n <td>BMW</td>\n <td>545iA 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>54995.0</td>\n <td>50270.0</td>\n <td>4.4</td>\n <td>8.0</td>\n <td>325.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3814.0</td>\n <td>114.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>224</th>\n <td>BMW</td>\n <td>M3 convertible 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>56595.0</td>\n <td>51815.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>333.0</td>\n <td>16.0</td>\n <td>23.0</td>\n <td>3781.0</td>\n <td>108.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>225</th>\n <td>Buick</td>\n <td>Rainier</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>37895.0</td>\n <td>34357.0</td>\n <td>4.2</td>\n <td>6.0</td>\n <td>275.0</td>\n <td>15.0</td>\n <td>21.0</td>\n <td>4600.0</td>\n <td>113.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>226</th>\n <td>Buick</td>\n <td>Regal LS 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>24895.0</td>\n <td>22835.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>20.0</td>\n <td>30.0</td>\n <td>3461.0</td>\n <td>109.0</td>\n <td>196.0</td>\n </tr>\n <tr>\n <th>227</th>\n <td>Buick</td>\n <td>Park Avenue Ultra 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>40720.0</td>\n <td>36927.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>18.0</td>\n <td>28.0</td>\n <td>3909.0</td>\n <td>114.0</td>\n <td>207.0</td>\n </tr>\n <tr>\n <th>228</th>\n <td>Cadillac</td>\n <td>Deville 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>45445.0</td>\n <td>41650.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>275.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3984.0</td>\n <td>115.0</td>\n <td>207.0</td>\n </tr>\n <tr>\n <th>229</th>\n <td>Cadillac</td>\n <td>Escalade EXT</td>\n <td>Truck</td>\n <td>USA</td>\n <td>All</td>\n <td>52975.0</td>\n <td>48541.0</td>\n <td>6.0</td>\n <td>8.0</td>\n <td>345.0</td>\n <td>13.0</td>\n <td>17.0</td>\n <td>5879.0</td>\n <td>130.0</td>\n <td>221.0</td>\n </tr>\n <tr>\n <th>230</th>\n <td>Chevrolet</td>\n <td>Tracker</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>20255.0</td>\n <td>19108.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>165.0</td>\n <td>19.0</td>\n <td>22.0</td>\n <td>2866.0</td>\n <td>98.0</td>\n <td>163.0</td>\n </tr>\n <tr>\n <th>231</th>\n <td>Chevrolet</td>\n <td>Cavalier 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>14810.0</td>\n <td>13884.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>26.0</td>\n <td>37.0</td>\n <td>2676.0</td>\n <td>104.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>232</th>\n <td>Chevrolet</td>\n <td>Malibu LS 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>20370.0</td>\n <td>18639.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>22.0</td>\n <td>30.0</td>\n <td>3297.0</td>\n <td>106.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>233</th>\n <td>Chevrolet</td>\n <td>Malibu LT 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>23495.0</td>\n <td>21551.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>23.0</td>\n <td>32.0</td>\n <td>3315.0</td>\n <td>106.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>234</th>\n <td>Chevrolet</td>\n <td>Corvette 2dr</td>\n <td>Sports</td>\n <td>USA</td>\n <td>Rear</td>\n <td>44535.0</td>\n <td>39068.0</td>\n <td>5.7</td>\n <td>8.0</td>\n <td>350.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3246.0</td>\n <td>105.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>235</th>\n <td>Chevrolet</td>\n <td>Silverado 1500 Regular Cab</td>\n <td>Truck</td>\n <td>USA</td>\n <td>Rear</td>\n <td>20310.0</td>\n <td>18480.0</td>\n <td>4.3</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>15.0</td>\n <td>21.0</td>\n <td>4142.0</td>\n <td>119.0</td>\n <td>206.0</td>\n </tr>\n <tr>\n <th>236</th>\n <td>Chrysler</td>\n <td>PT Cruiser 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>17985.0</td>\n <td>16919.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>150.0</td>\n <td>22.0</td>\n <td>29.0</td>\n <td>3101.0</td>\n <td>103.0</td>\n <td>169.0</td>\n </tr>\n <tr>\n <th>237</th>\n <td>Chrysler</td>\n <td>300M 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>29865.0</td>\n <td>27797.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>250.0</td>\n <td>18.0</td>\n <td>27.0</td>\n <td>3581.0</td>\n <td>113.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>238</th>\n <td>Chrysler</td>\n <td>Sebring convertible 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>25215.0</td>\n <td>23451.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>150.0</td>\n <td>22.0</td>\n <td>30.0</td>\n <td>3357.0</td>\n <td>106.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>239</th>\n <td>Chrysler</td>\n <td>Town and Country Limited</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>38380.0</td>\n <td>35063.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>4331.0</td>\n <td>119.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>240</th>\n <td>Dodge</td>\n <td>Neon SE 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>13670.0</td>\n <td>12849.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>132.0</td>\n <td>29.0</td>\n <td>36.0</td>\n <td>2581.0</td>\n <td>105.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>241</th>\n <td>Dodge</td>\n <td>Stratus SE 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>20220.0</td>\n <td>18821.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>150.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3175.0</td>\n <td>108.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>242</th>\n <td>Dodge</td>\n <td>Viper SRT-10 convertible 2dr</td>\n <td>Sports</td>\n <td>USA</td>\n <td>Rear</td>\n <td>81795.0</td>\n <td>74451.0</td>\n <td>8.3</td>\n <td>10.0</td>\n <td>500.0</td>\n <td>12.0</td>\n <td>20.0</td>\n <td>3410.0</td>\n <td>99.0</td>\n <td>176.0</td>\n </tr>\n <tr>\n <th>243</th>\n <td>Ford</td>\n <td>Excursion 6.8 XLT</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>41475.0</td>\n <td>36494.0</td>\n <td>6.8</td>\n <td>10.0</td>\n <td>310.0</td>\n <td>10.0</td>\n <td>13.0</td>\n <td>7190.0</td>\n <td>137.0</td>\n <td>227.0</td>\n </tr>\n <tr>\n <th>244</th>\n <td>Ford</td>\n <td>Focus ZX3 2dr hatch</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>13270.0</td>\n <td>12482.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>26.0</td>\n <td>33.0</td>\n <td>2612.0</td>\n <td>103.0</td>\n <td>168.0</td>\n </tr>\n <tr>\n <th>245</th>\n <td>Ford</td>\n <td>Focus SVT 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>19135.0</td>\n <td>17878.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>170.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>2750.0</td>\n <td>103.0</td>\n <td>168.0</td>\n </tr>\n <tr>\n <th>246</th>\n <td>Ford</td>\n <td>Crown Victoria LX 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>27370.0</td>\n <td>25105.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>224.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4057.0</td>\n <td>115.0</td>\n <td>212.0</td>\n </tr>\n <tr>\n <th>247</th>\n <td>Ford</td>\n <td>Mustang GT Premium convertible 2dr</td>\n <td>Sports</td>\n <td>USA</td>\n <td>Rear</td>\n <td>29380.0</td>\n <td>26875.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>260.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>3347.0</td>\n <td>101.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>248</th>\n <td>Ford</td>\n <td>Ranger 2.3 XL Regular Cab</td>\n <td>Truck</td>\n <td>USA</td>\n <td>Rear</td>\n <td>14385.0</td>\n <td>13717.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>143.0</td>\n <td>24.0</td>\n <td>29.0</td>\n <td>3028.0</td>\n <td>111.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>249</th>\n <td>GMC</td>\n <td>Yukon 1500 SLE</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>35725.0</td>\n <td>31361.0</td>\n <td>4.8</td>\n <td>8.0</td>\n <td>285.0</td>\n <td>16.0</td>\n <td>19.0</td>\n <td>5042.0</td>\n <td>116.0</td>\n <td>199.0</td>\n </tr>\n <tr>\n <th>250</th>\n <td>GMC</td>\n <td>Sierra Extended Cab 1500</td>\n <td>Truck</td>\n <td>USA</td>\n <td>Rear</td>\n <td>25717.0</td>\n <td>22604.0</td>\n <td>4.8</td>\n <td>8.0</td>\n <td>285.0</td>\n <td>17.0</td>\n <td>20.0</td>\n <td>4548.0</td>\n <td>144.0</td>\n <td>230.0</td>\n </tr>\n <tr>\n <th>251</th>\n <td>Honda</td>\n <td>Insight 2dr (gas/electric)</td>\n <td>Hybrid</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19110.0</td>\n <td>17911.0</td>\n <td>2.0</td>\n <td>3.0</td>\n <td>73.0</td>\n <td>60.0</td>\n <td>66.0</td>\n <td>1850.0</td>\n <td>95.0</td>\n <td>155.0</td>\n </tr>\n <tr>\n <th>252</th>\n <td>Honda</td>\n <td>Civic DX 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>13270.0</td>\n <td>12175.0</td>\n <td>1.7</td>\n <td>4.0</td>\n <td>115.0</td>\n <td>32.0</td>\n <td>38.0</td>\n <td>2432.0</td>\n <td>103.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>253</th>\n <td>Honda</td>\n <td>Accord EX 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>22260.0</td>\n <td>20080.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>26.0</td>\n <td>34.0</td>\n <td>3047.0</td>\n <td>105.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>254</th>\n <td>Honda</td>\n <td>Accord EX V6 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>26960.0</td>\n <td>24304.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>21.0</td>\n <td>30.0</td>\n <td>3294.0</td>\n <td>105.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>255</th>\n <td>Hummer</td>\n <td>H2</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>49995.0</td>\n <td>45815.0</td>\n <td>6.0</td>\n <td>8.0</td>\n <td>316.0</td>\n <td>10.0</td>\n <td>12.0</td>\n <td>6400.0</td>\n <td>123.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>256</th>\n <td>Hyundai</td>\n <td>Accent GT 2dr hatch</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>11939.0</td>\n <td>11209.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>103.0</td>\n <td>29.0</td>\n <td>33.0</td>\n <td>2339.0</td>\n <td>96.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>257</th>\n <td>Hyundai</td>\n <td>Sonata GLS 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19339.0</td>\n <td>17574.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>170.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>3217.0</td>\n <td>106.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>258</th>\n <td>Hyundai</td>\n <td>Tiburon GT V6 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Front</td>\n <td>18739.0</td>\n <td>17101.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>172.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3023.0</td>\n <td>100.0</td>\n <td>173.0</td>\n </tr>\n <tr>\n <th>259</th>\n <td>Infiniti</td>\n <td>I35 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>31145.0</td>\n <td>28320.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>255.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3306.0</td>\n <td>108.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>260</th>\n <td>Infiniti</td>\n <td>FX45</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>All</td>\n <td>36395.0</td>\n <td>33121.0</td>\n <td>4.5</td>\n <td>8.0</td>\n <td>315.0</td>\n <td>15.0</td>\n <td>19.0</td>\n <td>4309.0</td>\n <td>112.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>261</th>\n <td>Jaguar</td>\n <td>X-Type 3.0 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>33995.0</td>\n <td>30995.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>227.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3516.0</td>\n <td>107.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>262</th>\n <td>Jaguar</td>\n <td>Vanden Plas 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>68995.0</td>\n <td>62846.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>294.0</td>\n <td>18.0</td>\n <td>28.0</td>\n <td>3803.0</td>\n <td>119.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>263</th>\n <td>Jaguar</td>\n <td>XK8 convertible 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>74995.0</td>\n <td>68306.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>294.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3980.0</td>\n <td>102.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>264</th>\n <td>Jeep</td>\n <td>Liberty Sport</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>20130.0</td>\n <td>18973.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>150.0</td>\n <td>20.0</td>\n <td>24.0</td>\n <td>3826.0</td>\n <td>104.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>265</th>\n <td>Kia</td>\n <td>Rio 4dr manual</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>10280.0</td>\n <td>9875.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>104.0</td>\n <td>26.0</td>\n <td>33.0</td>\n <td>2403.0</td>\n <td>95.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>266</th>\n <td>Kia</td>\n <td>Spectra GSX 4dr hatch</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>14630.0</td>\n <td>13790.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>124.0</td>\n <td>24.0</td>\n <td>32.0</td>\n <td>2697.0</td>\n <td>101.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>267</th>\n <td>Kia</td>\n <td>Rio Cinco</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>Front</td>\n <td>11905.0</td>\n <td>11410.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>104.0</td>\n <td>26.0</td>\n <td>33.0</td>\n <td>2447.0</td>\n <td>95.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>268</th>\n <td>Lexus</td>\n <td>GX 470</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>45700.0</td>\n <td>39838.0</td>\n <td>4.7</td>\n <td>8.0</td>\n <td>235.0</td>\n <td>15.0</td>\n <td>19.0</td>\n <td>4740.0</td>\n <td>110.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>269</th>\n <td>Lexus</td>\n <td>IS 300 4dr manual</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>31045.0</td>\n <td>27404.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3255.0</td>\n <td>105.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>270</th>\n <td>Lexus</td>\n <td>LS 430 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>55750.0</td>\n <td>48583.0</td>\n <td>4.3</td>\n <td>8.0</td>\n <td>290.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3990.0</td>\n <td>115.0</td>\n <td>197.0</td>\n </tr>\n <tr>\n <th>271</th>\n <td>Lincoln</td>\n <td>Aviator Ultimate</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>42915.0</td>\n <td>39443.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>302.0</td>\n <td>13.0</td>\n <td>18.0</td>\n <td>4834.0</td>\n <td>114.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>272</th>\n <td>Lincoln</td>\n <td>LS V8 Ultimate 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>43495.0</td>\n <td>39869.0</td>\n <td>3.9</td>\n <td>8.0</td>\n <td>280.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>3768.0</td>\n <td>115.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>273</th>\n <td>MINI</td>\n <td>Cooper</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>16999.0</td>\n <td>15437.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>115.0</td>\n <td>28.0</td>\n <td>37.0</td>\n <td>2524.0</td>\n <td>97.0</td>\n <td>143.0</td>\n </tr>\n <tr>\n <th>274</th>\n <td>Mazda</td>\n <td>Mazda3 s 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>17200.0</td>\n <td>15922.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>25.0</td>\n <td>31.0</td>\n <td>2762.0</td>\n <td>104.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>275</th>\n <td>Mazda</td>\n <td>MX-5 Miata LS convertible 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>25193.0</td>\n <td>23285.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>142.0</td>\n <td>23.0</td>\n <td>28.0</td>\n <td>2387.0</td>\n <td>89.0</td>\n <td>156.0</td>\n </tr>\n <tr>\n <th>276</th>\n <td>Mazda</td>\n <td>B4000 SE Cab Plus</td>\n <td>Truck</td>\n <td>Asia</td>\n <td>All</td>\n <td>22350.0</td>\n <td>20482.0</td>\n <td>4.0</td>\n <td>6.0</td>\n <td>207.0</td>\n <td>15.0</td>\n <td>19.0</td>\n <td>3571.0</td>\n <td>126.0</td>\n <td>203.0</td>\n </tr>\n <tr>\n <th>277</th>\n <td>Mercedes-Benz</td>\n <td>C320 Sport 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>28370.0</td>\n <td>26435.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3430.0</td>\n <td>107.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>278</th>\n <td>Mercedes-Benz</td>\n <td>C320 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>37630.0</td>\n <td>35046.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>20.0</td>\n <td>26.0</td>\n <td>3450.0</td>\n <td>107.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>279</th>\n <td>Mercedes-Benz</td>\n <td>CL600 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>128420.0</td>\n <td>119600.0</td>\n <td>5.5</td>\n <td>12.0</td>\n <td>493.0</td>\n <td>13.0</td>\n <td>19.0</td>\n <td>4473.0</td>\n <td>114.0</td>\n <td>196.0</td>\n </tr>\n <tr>\n <th>280</th>\n <td>Mercedes-Benz</td>\n <td>E500 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>57270.0</td>\n <td>53382.0</td>\n <td>5.0</td>\n <td>8.0</td>\n <td>302.0</td>\n <td>16.0</td>\n <td>20.0</td>\n <td>3815.0</td>\n <td>112.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>281</th>\n <td>Mercedes-Benz</td>\n <td>SL55 AMG 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>121770.0</td>\n <td>113388.0</td>\n <td>5.5</td>\n <td>8.0</td>\n <td>493.0</td>\n <td>14.0</td>\n <td>21.0</td>\n <td>4235.0</td>\n <td>101.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>282</th>\n <td>Mercedes-Benz</td>\n <td>C240</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>33780.0</td>\n <td>31466.0</td>\n <td>2.6</td>\n <td>6.0</td>\n <td>168.0</td>\n <td>19.0</td>\n <td>25.0</td>\n <td>3470.0</td>\n <td>107.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>283</th>\n <td>Mercury</td>\n <td>Sable GS 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>21595.0</td>\n <td>19848.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>155.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3308.0</td>\n <td>109.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>284</th>\n <td>Mercury</td>\n <td>Grand Marquis LS Ultimate 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>30895.0</td>\n <td>28318.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>224.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4052.0</td>\n <td>115.0</td>\n <td>212.0</td>\n </tr>\n <tr>\n <th>285</th>\n <td>Mitsubishi</td>\n <td>Endeavor XLS</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>30492.0</td>\n <td>28330.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>17.0</td>\n <td>21.0</td>\n <td>4134.0</td>\n <td>109.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>286</th>\n <td>Mitsubishi</td>\n <td>Lancer LS 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>16722.0</td>\n <td>15718.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>120.0</td>\n <td>25.0</td>\n <td>31.0</td>\n <td>2795.0</td>\n <td>102.0</td>\n <td>181.0</td>\n </tr>\n <tr>\n <th>287</th>\n <td>Mitsubishi</td>\n <td>Galant GTS 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>25700.0</td>\n <td>23883.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>230.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3649.0</td>\n <td>108.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>288</th>\n <td>Mitsubishi</td>\n <td>Lancer Sportback LS</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>Front</td>\n <td>17495.0</td>\n <td>16295.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>25.0</td>\n <td>31.0</td>\n <td>3020.0</td>\n <td>102.0</td>\n <td>181.0</td>\n </tr>\n <tr>\n <th>289</th>\n <td>Nissan</td>\n <td>Sentra 1.8 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>12740.0</td>\n <td>12205.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>126.0</td>\n <td>28.0</td>\n <td>35.0</td>\n <td>2513.0</td>\n <td>100.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>290</th>\n <td>Nissan</td>\n <td>Altima SE 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>23290.0</td>\n <td>21580.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>245.0</td>\n <td>21.0</td>\n <td>26.0</td>\n <td>3197.0</td>\n <td>110.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>291</th>\n <td>Nissan</td>\n <td>Quest SE</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>32780.0</td>\n <td>30019.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>4175.0</td>\n <td>124.0</td>\n <td>204.0</td>\n </tr>\n <tr>\n <th>292</th>\n <td>Nissan</td>\n <td>Titan King Cab XE</td>\n <td>Truck</td>\n <td>Asia</td>\n <td>All</td>\n <td>26650.0</td>\n <td>24926.0</td>\n <td>5.6</td>\n <td>8.0</td>\n <td>305.0</td>\n <td>14.0</td>\n <td>18.0</td>\n <td>5287.0</td>\n <td>140.0</td>\n <td>224.0</td>\n </tr>\n <tr>\n <th>293</th>\n <td>Oldsmobile</td>\n <td>Silhouette GL</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>28790.0</td>\n <td>26120.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>185.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3948.0</td>\n <td>120.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>294</th>\n <td>Pontiac</td>\n <td>Grand Prix GT1 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>22395.0</td>\n <td>20545.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>20.0</td>\n <td>30.0</td>\n <td>3477.0</td>\n <td>111.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>295</th>\n <td>Pontiac</td>\n <td>Montana</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>23845.0</td>\n <td>21644.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>185.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3803.0</td>\n <td>112.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>296</th>\n <td>Porsche</td>\n <td>Cayenne S</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>56665.0</td>\n <td>49865.0</td>\n <td>4.5</td>\n <td>8.0</td>\n <td>340.0</td>\n <td>14.0</td>\n <td>18.0</td>\n <td>4950.0</td>\n <td>112.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>297</th>\n <td>Porsche</td>\n <td>911 GT2 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>192465.0</td>\n <td>173560.0</td>\n <td>3.6</td>\n <td>6.0</td>\n <td>477.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>3131.0</td>\n <td>93.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>298</th>\n <td>Saab</td>\n <td>9-3 Aero 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>33360.0</td>\n <td>31562.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>210.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3175.0</td>\n <td>105.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>299</th>\n <td>Saab</td>\n <td>9-3 Aero convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>43175.0</td>\n <td>40883.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>210.0</td>\n <td>21.0</td>\n <td>30.0</td>\n <td>3700.0</td>\n <td>105.0</td>\n <td>182.0</td>\n </tr>\n <tr>\n <th>300</th>\n <td>Saturn</td>\n <td>lon2 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>14300.0</td>\n <td>13393.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>26.0</td>\n <td>35.0</td>\n <td>2692.0</td>\n <td>103.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>301</th>\n <td>Saturn</td>\n <td>L300-2 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>21410.0</td>\n <td>19801.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>182.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3197.0</td>\n <td>107.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>302</th>\n <td>Subaru</td>\n <td>Impreza 2.5 RS 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>All</td>\n <td>19945.0</td>\n <td>18399.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>165.0</td>\n <td>22.0</td>\n <td>28.0</td>\n <td>2965.0</td>\n <td>99.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>303</th>\n <td>Subaru</td>\n <td>Outback H6 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>All</td>\n <td>29345.0</td>\n <td>26660.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>212.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3610.0</td>\n <td>104.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>304</th>\n <td>Subaru</td>\n <td>Baja</td>\n <td>Truck</td>\n <td>Asia</td>\n <td>All</td>\n <td>24520.0</td>\n <td>22304.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>165.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3485.0</td>\n <td>104.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>305</th>\n <td>Suzuki</td>\n <td>Vitara LX</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>17163.0</td>\n <td>16949.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>165.0</td>\n <td>19.0</td>\n <td>22.0</td>\n <td>3020.0</td>\n <td>98.0</td>\n <td>163.0</td>\n </tr>\n <tr>\n <th>306</th>\n <td>Suzuki</td>\n <td>Forenza EX 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>15568.0</td>\n <td>15378.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>119.0</td>\n <td>22.0</td>\n <td>30.0</td>\n <td>2756.0</td>\n <td>102.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>307</th>\n <td>Toyota</td>\n <td>Sequoia SR5</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>35695.0</td>\n <td>31827.0</td>\n <td>4.7</td>\n <td>8.0</td>\n <td>240.0</td>\n <td>14.0</td>\n <td>17.0</td>\n <td>5270.0</td>\n <td>118.0</td>\n <td>204.0</td>\n </tr>\n <tr>\n <th>308</th>\n <td>Toyota</td>\n <td>RAV4</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>20290.0</td>\n <td>18553.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>161.0</td>\n <td>22.0</td>\n <td>27.0</td>\n <td>3119.0</td>\n <td>98.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>309</th>\n <td>Toyota</td>\n <td>Echo 2dr manual</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>10760.0</td>\n <td>10144.0</td>\n <td>1.5</td>\n <td>4.0</td>\n <td>108.0</td>\n <td>35.0</td>\n <td>43.0</td>\n <td>2035.0</td>\n <td>93.0</td>\n <td>163.0</td>\n </tr>\n <tr>\n <th>310</th>\n <td>Toyota</td>\n <td>Camry LE V6 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>22775.0</td>\n <td>20325.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3296.0</td>\n <td>107.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>311</th>\n <td>Toyota</td>\n <td>Camry XLE V6 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>25920.0</td>\n <td>23125.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3362.0</td>\n <td>107.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>312</th>\n <td>Toyota</td>\n <td>Sienna XLE Limited</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>28800.0</td>\n <td>25690.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>230.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>4165.0</td>\n <td>119.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>313</th>\n <td>Toyota</td>\n <td>Tundra Regular Cab V6</td>\n <td>Truck</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>16495.0</td>\n <td>14978.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>190.0</td>\n <td>16.0</td>\n <td>18.0</td>\n <td>3925.0</td>\n <td>128.0</td>\n <td>218.0</td>\n </tr>\n <tr>\n <th>314</th>\n <td>Volkswagen</td>\n <td>Golf GLS 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>18715.0</td>\n <td>17478.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>115.0</td>\n <td>24.0</td>\n <td>31.0</td>\n <td>2897.0</td>\n <td>99.0</td>\n <td>165.0</td>\n </tr>\n <tr>\n <th>315</th>\n <td>Volkswagen</td>\n <td>Jetta GLI VR6 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>23785.0</td>\n <td>21686.0</td>\n <td>2.8</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>21.0</td>\n <td>30.0</td>\n <td>3179.0</td>\n <td>99.0</td>\n <td>172.0</td>\n </tr>\n <tr>\n <th>316</th>\n <td>Volkswagen</td>\n <td>Passat W8 4MOTION 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>39235.0</td>\n <td>36052.0</td>\n <td>4.0</td>\n <td>8.0</td>\n <td>270.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3953.0</td>\n <td>106.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>317</th>\n <td>Volkswagen</td>\n <td>Passat GLS 1.8T</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>Front</td>\n <td>24955.0</td>\n <td>22801.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>170.0</td>\n <td>22.0</td>\n <td>31.0</td>\n <td>3338.0</td>\n <td>106.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>318</th>\n <td>Volvo</td>\n <td>S60 2.5 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>31745.0</td>\n <td>29916.0</td>\n <td>2.5</td>\n <td>5.0</td>\n <td>208.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3903.0</td>\n <td>107.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>319</th>\n <td>Volvo</td>\n <td>S80 2.5T 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>37885.0</td>\n <td>35688.0</td>\n <td>2.5</td>\n <td>5.0</td>\n <td>194.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3691.0</td>\n <td>110.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>320</th>\n <td>Volvo</td>\n <td>V40</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>Front</td>\n <td>26135.0</td>\n <td>24641.0</td>\n <td>1.9</td>\n <td>4.0</td>\n <td>170.0</td>\n <td>22.0</td>\n <td>29.0</td>\n <td>2822.0</td>\n <td>101.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>321</th>\n <td>Acura</td>\n <td>TL 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>33195.0</td>\n <td>30299.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>270.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3575.0</td>\n <td>108.0</td>\n <td>186.0</td>\n </tr>\n <tr>\n <th>322</th>\n <td>Audi</td>\n <td>A4 1.8T 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>25940.0</td>\n <td>23508.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>170.0</td>\n <td>22.0</td>\n <td>31.0</td>\n <td>3252.0</td>\n <td>104.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>323</th>\n <td>Audi</td>\n <td>A4 3.0 Quattro 4dr auto</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>34480.0</td>\n <td>31388.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3627.0</td>\n <td>104.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>324</th>\n <td>Audi</td>\n <td>A4 3.0 Quattro convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>44240.0</td>\n <td>40075.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>220.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>4013.0</td>\n <td>105.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>325</th>\n <td>Audi</td>\n <td>S4 Quattro 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>48040.0</td>\n <td>43556.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>340.0</td>\n <td>14.0</td>\n <td>20.0</td>\n <td>3825.0</td>\n <td>104.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>326</th>\n <td>Audi</td>\n <td>TT 3.2 coupe 2dr (convertible)</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>All</td>\n <td>40590.0</td>\n <td>36739.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>250.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3351.0</td>\n <td>96.0</td>\n <td>159.0</td>\n </tr>\n <tr>\n <th>327</th>\n <td>BMW</td>\n <td>X5 4.4i</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>52195.0</td>\n <td>47720.0</td>\n <td>4.4</td>\n <td>8.0</td>\n <td>325.0</td>\n <td>16.0</td>\n <td>22.0</td>\n <td>4824.0</td>\n <td>111.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>328</th>\n <td>BMW</td>\n <td>325xi 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>30245.0</td>\n <td>27745.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>184.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>3461.0</td>\n <td>107.0</td>\n <td>176.0</td>\n </tr>\n <tr>\n <th>329</th>\n <td>BMW</td>\n <td>525i 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>39995.0</td>\n <td>36620.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>184.0</td>\n <td>19.0</td>\n <td>28.0</td>\n <td>3428.0</td>\n <td>114.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>330</th>\n <td>BMW</td>\n <td>745i 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>69195.0</td>\n <td>63190.0</td>\n <td>4.4</td>\n <td>8.0</td>\n <td>325.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>4376.0</td>\n <td>118.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>331</th>\n <td>BMW</td>\n <td>Z4 convertible 2.5i 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>33895.0</td>\n <td>31065.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>184.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>2932.0</td>\n <td>98.0</td>\n <td>161.0</td>\n </tr>\n <tr>\n <th>332</th>\n <td>Buick</td>\n <td>Rendezvous CX</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>26545.0</td>\n <td>24085.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>185.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>4024.0</td>\n <td>112.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>333</th>\n <td>Buick</td>\n <td>Regal GS 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>28345.0</td>\n <td>26047.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>18.0</td>\n <td>28.0</td>\n <td>3536.0</td>\n <td>109.0</td>\n <td>196.0</td>\n </tr>\n <tr>\n <th>334</th>\n <td>Cadillac</td>\n <td>Escalade</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>52795.0</td>\n <td>48377.0</td>\n <td>5.3</td>\n <td>8.0</td>\n <td>295.0</td>\n <td>14.0</td>\n <td>18.0</td>\n <td>5367.0</td>\n <td>116.0</td>\n <td>199.0</td>\n </tr>\n <tr>\n <th>335</th>\n <td>Cadillac</td>\n <td>Deville DTS 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>50595.0</td>\n <td>46362.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>300.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>4044.0</td>\n <td>115.0</td>\n <td>207.0</td>\n </tr>\n <tr>\n <th>336</th>\n <td>Chevrolet</td>\n <td>Suburban 1500 LT</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>42735.0</td>\n <td>37422.0</td>\n <td>5.3</td>\n <td>8.0</td>\n <td>295.0</td>\n <td>14.0</td>\n <td>18.0</td>\n <td>4947.0</td>\n <td>130.0</td>\n <td>219.0</td>\n </tr>\n <tr>\n <th>337</th>\n <td>Chevrolet</td>\n <td>Aveo 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>11690.0</td>\n <td>10965.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>103.0</td>\n <td>28.0</td>\n <td>34.0</td>\n <td>2370.0</td>\n <td>98.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>338</th>\n <td>Chevrolet</td>\n <td>Cavalier LS 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>16385.0</td>\n <td>15357.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>26.0</td>\n <td>37.0</td>\n <td>2617.0</td>\n <td>104.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>339</th>\n <td>Chevrolet</td>\n <td>Monte Carlo LS 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>21825.0</td>\n <td>20026.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>180.0</td>\n <td>21.0</td>\n <td>32.0</td>\n <td>3340.0</td>\n <td>111.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>340</th>\n <td>Chevrolet</td>\n <td>Monte Carlo SS 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>24225.0</td>\n <td>22222.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>18.0</td>\n <td>28.0</td>\n <td>3434.0</td>\n <td>111.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>341</th>\n <td>Chevrolet</td>\n <td>Corvette convertible 2dr</td>\n <td>Sports</td>\n <td>USA</td>\n <td>Rear</td>\n <td>51535.0</td>\n <td>45193.0</td>\n <td>5.7</td>\n <td>8.0</td>\n <td>350.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3248.0</td>\n <td>105.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>342</th>\n <td>Chevrolet</td>\n <td>Silverado SS</td>\n <td>Truck</td>\n <td>USA</td>\n <td>All</td>\n <td>40340.0</td>\n <td>35399.0</td>\n <td>6.0</td>\n <td>8.0</td>\n <td>300.0</td>\n <td>13.0</td>\n <td>17.0</td>\n <td>4804.0</td>\n <td>144.0</td>\n <td>238.0</td>\n </tr>\n <tr>\n <th>343</th>\n <td>Chrysler</td>\n <td>PT Cruiser Limited 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>22000.0</td>\n <td>20573.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>150.0</td>\n <td>22.0</td>\n <td>29.0</td>\n <td>3105.0</td>\n <td>103.0</td>\n <td>169.0</td>\n </tr>\n <tr>\n <th>344</th>\n <td>Chrysler</td>\n <td>Concorde LX 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>24130.0</td>\n <td>22452.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>200.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3479.0</td>\n <td>113.0</td>\n <td>208.0</td>\n </tr>\n <tr>\n <th>345</th>\n <td>Chrysler</td>\n <td>300M Special Edition 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>33295.0</td>\n <td>30884.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>255.0</td>\n <td>18.0</td>\n <td>27.0</td>\n <td>3650.0</td>\n <td>113.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>346</th>\n <td>Chrysler</td>\n <td>Crossfire 2dr</td>\n <td>Sports</td>\n <td>USA</td>\n <td>Rear</td>\n <td>34495.0</td>\n <td>32033.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>3060.0</td>\n <td>95.0</td>\n <td>160.0</td>\n </tr>\n <tr>\n <th>347</th>\n <td>Dodge</td>\n <td>Neon SXT 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>15040.0</td>\n <td>14086.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>132.0</td>\n <td>29.0</td>\n <td>36.0</td>\n <td>2626.0</td>\n <td>105.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>348</th>\n <td>Dodge</td>\n <td>Intrepid ES 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>24885.0</td>\n <td>23058.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>232.0</td>\n <td>18.0</td>\n <td>27.0</td>\n <td>3487.0</td>\n <td>113.0</td>\n <td>204.0</td>\n </tr>\n <tr>\n <th>349</th>\n <td>Dodge</td>\n <td>Dakota Regular Cab</td>\n <td>Truck</td>\n <td>USA</td>\n <td>Rear</td>\n <td>17630.0</td>\n <td>16264.0</td>\n <td>3.7</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>16.0</td>\n <td>22.0</td>\n <td>3714.0</td>\n <td>112.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>350</th>\n <td>Ford</td>\n <td>Expedition 4.6 XLT</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>34560.0</td>\n <td>30468.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>232.0</td>\n <td>15.0</td>\n <td>19.0</td>\n <td>5000.0</td>\n <td>119.0</td>\n <td>206.0</td>\n </tr>\n <tr>\n <th>351</th>\n <td>Ford</td>\n <td>Focus LX 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>13730.0</td>\n <td>12906.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>110.0</td>\n <td>27.0</td>\n <td>36.0</td>\n <td>2606.0</td>\n <td>103.0</td>\n <td>168.0</td>\n </tr>\n <tr>\n <th>352</th>\n <td>Ford</td>\n <td>Taurus LX 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>20320.0</td>\n <td>18881.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>155.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3306.0</td>\n <td>109.0</td>\n <td>198.0</td>\n </tr>\n <tr>\n <th>353</th>\n <td>Ford</td>\n <td>Crown Victoria LX Sport 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>30315.0</td>\n <td>27756.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>239.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4057.0</td>\n <td>115.0</td>\n <td>212.0</td>\n </tr>\n <tr>\n <th>354</th>\n <td>Ford</td>\n <td>Thunderbird Deluxe convert w/hardtop 2d</td>\n <td>Sports</td>\n <td>USA</td>\n <td>Front</td>\n <td>37530.0</td>\n <td>34483.0</td>\n <td>3.9</td>\n <td>8.0</td>\n <td>280.0</td>\n <td>17.0</td>\n <td>24.0</td>\n <td>3780.0</td>\n <td>107.0</td>\n <td>186.0</td>\n </tr>\n <tr>\n <th>355</th>\n <td>Ford</td>\n <td>Focus ZTW</td>\n <td>Wagon</td>\n <td>USA</td>\n <td>Front</td>\n <td>17475.0</td>\n <td>16375.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>26.0</td>\n <td>33.0</td>\n <td>2702.0</td>\n <td>103.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>356</th>\n <td>GMC</td>\n <td>Yukon XL 2500 SLT</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>46265.0</td>\n <td>40534.0</td>\n <td>6.0</td>\n <td>8.0</td>\n <td>325.0</td>\n <td>13.0</td>\n <td>17.0</td>\n <td>6133.0</td>\n <td>130.0</td>\n <td>219.0</td>\n </tr>\n <tr>\n <th>357</th>\n <td>GMC</td>\n <td>Sierra HD 2500</td>\n <td>Truck</td>\n <td>USA</td>\n <td>All</td>\n <td>29322.0</td>\n <td>25759.0</td>\n <td>6.0</td>\n <td>8.0</td>\n <td>300.0</td>\n <td>13.0</td>\n <td>18.0</td>\n <td>5440.0</td>\n <td>133.0</td>\n <td>222.0</td>\n </tr>\n <tr>\n <th>358</th>\n <td>Honda</td>\n <td>Pilot LX</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>27560.0</td>\n <td>24843.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>17.0</td>\n <td>22.0</td>\n <td>4387.0</td>\n <td>106.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>359</th>\n <td>Honda</td>\n <td>Civic HX 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>14170.0</td>\n <td>12996.0</td>\n <td>1.7</td>\n <td>4.0</td>\n <td>117.0</td>\n <td>36.0</td>\n <td>44.0</td>\n <td>2500.0</td>\n <td>103.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>360</th>\n <td>Honda</td>\n <td>Civic EX 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>17750.0</td>\n <td>16265.0</td>\n <td>1.7</td>\n <td>4.0</td>\n <td>127.0</td>\n <td>32.0</td>\n <td>37.0</td>\n <td>2601.0</td>\n <td>103.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>361</th>\n <td>Honda</td>\n <td>Odyssey LX</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>24950.0</td>\n <td>22498.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>240.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>4310.0</td>\n <td>118.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>362</th>\n <td>Hyundai</td>\n <td>Santa Fe GLS</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>Front</td>\n <td>21589.0</td>\n <td>20201.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>173.0</td>\n <td>20.0</td>\n <td>26.0</td>\n <td>3549.0</td>\n <td>103.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>363</th>\n <td>Hyundai</td>\n <td>Elantra GLS 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>13839.0</td>\n <td>12781.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>138.0</td>\n <td>26.0</td>\n <td>34.0</td>\n <td>2635.0</td>\n <td>103.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>364</th>\n <td>Hyundai</td>\n <td>Sonata LX 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>20339.0</td>\n <td>18380.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>170.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>3217.0</td>\n <td>106.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>365</th>\n <td>Infiniti</td>\n <td>G35 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>28495.0</td>\n <td>26157.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>260.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3336.0</td>\n <td>112.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>366</th>\n <td>Infiniti</td>\n <td>M45 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>42845.0</td>\n <td>38792.0</td>\n <td>4.5</td>\n <td>8.0</td>\n <td>340.0</td>\n <td>17.0</td>\n <td>23.0</td>\n <td>3851.0</td>\n <td>110.0</td>\n <td>197.0</td>\n </tr>\n <tr>\n <th>367</th>\n <td>Isuzu</td>\n <td>Ascender S</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>31849.0</td>\n <td>29977.0</td>\n <td>4.2</td>\n <td>6.0</td>\n <td>275.0</td>\n <td>15.0</td>\n <td>20.0</td>\n <td>4967.0</td>\n <td>129.0</td>\n <td>208.0</td>\n </tr>\n <tr>\n <th>368</th>\n <td>Jaguar</td>\n <td>S-Type 3.0 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>43895.0</td>\n <td>40004.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>235.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3777.0</td>\n <td>115.0</td>\n <td>192.0</td>\n </tr>\n <tr>\n <th>369</th>\n <td>Jaguar</td>\n <td>XJ8 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>59995.0</td>\n <td>54656.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>294.0</td>\n <td>18.0</td>\n <td>28.0</td>\n <td>3803.0</td>\n <td>119.0</td>\n <td>200.0</td>\n </tr>\n <tr>\n <th>370</th>\n <td>Jaguar</td>\n <td>XKR coupe 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>81995.0</td>\n <td>74676.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>390.0</td>\n <td>16.0</td>\n <td>23.0</td>\n <td>3865.0</td>\n <td>102.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>371</th>\n <td>Jeep</td>\n <td>Wrangler Sahara convertible 2dr</td>\n <td>SUV</td>\n <td>USA</td>\n <td>All</td>\n <td>25520.0</td>\n <td>23275.0</td>\n <td>4.0</td>\n <td>6.0</td>\n <td>190.0</td>\n <td>16.0</td>\n <td>19.0</td>\n <td>3575.0</td>\n <td>93.0</td>\n <td>150.0</td>\n </tr>\n <tr>\n <th>372</th>\n <td>Kia</td>\n <td>Rio 4dr auto</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>11155.0</td>\n <td>10705.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>104.0</td>\n <td>25.0</td>\n <td>32.0</td>\n <td>2458.0</td>\n <td>95.0</td>\n <td>167.0</td>\n </tr>\n <tr>\n <th>373</th>\n <td>Kia</td>\n <td>Optima LX V6 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>18435.0</td>\n <td>16850.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>170.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3279.0</td>\n <td>106.0</td>\n <td>186.0</td>\n </tr>\n <tr>\n <th>374</th>\n <td>Land Rover</td>\n <td>Range Rover HSE</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>72250.0</td>\n <td>65807.0</td>\n <td>4.4</td>\n <td>8.0</td>\n <td>282.0</td>\n <td>12.0</td>\n <td>16.0</td>\n <td>5379.0</td>\n <td>113.0</td>\n <td>195.0</td>\n </tr>\n <tr>\n <th>375</th>\n <td>Lexus</td>\n <td>LX 470</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>64800.0</td>\n <td>56455.0</td>\n <td>4.7</td>\n <td>8.0</td>\n <td>235.0</td>\n <td>13.0</td>\n <td>17.0</td>\n <td>5590.0</td>\n <td>112.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>376</th>\n <td>Lexus</td>\n <td>IS 300 4dr auto</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>32415.0</td>\n <td>28611.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>18.0</td>\n <td>24.0</td>\n <td>3285.0</td>\n <td>105.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>377</th>\n <td>Lexus</td>\n <td>SC 430 convertible 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>63200.0</td>\n <td>55063.0</td>\n <td>4.3</td>\n <td>8.0</td>\n <td>300.0</td>\n <td>18.0</td>\n <td>23.0</td>\n <td>3840.0</td>\n <td>103.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>378</th>\n <td>Lincoln</td>\n <td>LS V6 Luxury 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>32495.0</td>\n <td>29969.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>232.0</td>\n <td>20.0</td>\n <td>26.0</td>\n <td>3681.0</td>\n <td>115.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>379</th>\n <td>Lincoln</td>\n <td>Town Car Signature 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>41815.0</td>\n <td>38418.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>239.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4369.0</td>\n <td>118.0</td>\n <td>215.0</td>\n </tr>\n <tr>\n <th>380</th>\n <td>MINI</td>\n <td>Cooper S</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>19999.0</td>\n <td>18137.0</td>\n <td>1.6</td>\n <td>4.0</td>\n <td>163.0</td>\n <td>25.0</td>\n <td>34.0</td>\n <td>2678.0</td>\n <td>97.0</td>\n <td>144.0</td>\n </tr>\n <tr>\n <th>381</th>\n <td>Mazda</td>\n <td>Mazda6 i 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19270.0</td>\n <td>17817.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>24.0</td>\n <td>32.0</td>\n <td>3042.0</td>\n <td>105.0</td>\n <td>187.0</td>\n </tr>\n <tr>\n <th>382</th>\n <td>Mazda</td>\n <td>RX-8 4dr automatic</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>25700.0</td>\n <td>23794.0</td>\n <td>1.3</td>\n <td>NaN</td>\n <td>197.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>3053.0</td>\n <td>106.0</td>\n <td>174.0</td>\n </tr>\n <tr>\n <th>383</th>\n <td>Mercedes-Benz</td>\n <td>G500</td>\n <td>SUV</td>\n <td>Europe</td>\n <td>All</td>\n <td>76870.0</td>\n <td>71540.0</td>\n <td>5.0</td>\n <td>8.0</td>\n <td>292.0</td>\n <td>13.0</td>\n <td>14.0</td>\n <td>5423.0</td>\n <td>112.0</td>\n <td>186.0</td>\n </tr>\n <tr>\n <th>384</th>\n <td>Mercedes-Benz</td>\n <td>C240 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>32280.0</td>\n <td>30071.0</td>\n <td>2.6</td>\n <td>6.0</td>\n <td>168.0</td>\n <td>20.0</td>\n <td>25.0</td>\n <td>3360.0</td>\n <td>107.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>385</th>\n <td>Mercedes-Benz</td>\n <td>C320 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>All</td>\n <td>38830.0</td>\n <td>36162.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>3450.0</td>\n <td>107.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>386</th>\n <td>Mercedes-Benz</td>\n <td>CLK320 coupe 2dr (convertible)</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>45707.0</td>\n <td>41966.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>20.0</td>\n <td>26.0</td>\n <td>3770.0</td>\n <td>107.0</td>\n <td>183.0</td>\n </tr>\n <tr>\n <th>387</th>\n <td>Mercedes-Benz</td>\n <td>S430 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>74320.0</td>\n <td>69168.0</td>\n <td>4.3</td>\n <td>8.0</td>\n <td>275.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>4160.0</td>\n <td>122.0</td>\n <td>203.0</td>\n </tr>\n <tr>\n <th>388</th>\n <td>Mercedes-Benz</td>\n <td>SL600 convertible 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>126670.0</td>\n <td>117854.0</td>\n <td>5.5</td>\n <td>12.0</td>\n <td>493.0</td>\n <td>13.0</td>\n <td>19.0</td>\n <td>4429.0</td>\n <td>101.0</td>\n <td>179.0</td>\n </tr>\n <tr>\n <th>389</th>\n <td>Mercedes-Benz</td>\n <td>E320</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>50670.0</td>\n <td>47174.0</td>\n <td>3.2</td>\n <td>6.0</td>\n <td>221.0</td>\n <td>19.0</td>\n <td>27.0</td>\n <td>3966.0</td>\n <td>112.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>390</th>\n <td>Mercury</td>\n <td>Grand Marquis GS 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>24695.0</td>\n <td>23217.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>224.0</td>\n <td>17.0</td>\n <td>25.0</td>\n <td>4052.0</td>\n <td>115.0</td>\n <td>212.0</td>\n </tr>\n <tr>\n <th>391</th>\n <td>Mercury</td>\n <td>Marauder 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Rear</td>\n <td>34495.0</td>\n <td>31558.0</td>\n <td>4.6</td>\n <td>8.0</td>\n <td>302.0</td>\n <td>17.0</td>\n <td>23.0</td>\n <td>4195.0</td>\n <td>115.0</td>\n <td>212.0</td>\n </tr>\n <tr>\n <th>392</th>\n <td>Mitsubishi</td>\n <td>Montero XLS</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>All</td>\n <td>33112.0</td>\n <td>30763.0</td>\n <td>3.8</td>\n <td>6.0</td>\n <td>215.0</td>\n <td>15.0</td>\n <td>19.0</td>\n <td>4718.0</td>\n <td>110.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>393</th>\n <td>Mitsubishi</td>\n <td>Galant ES 2.4L 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19312.0</td>\n <td>17957.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>160.0</td>\n <td>23.0</td>\n <td>30.0</td>\n <td>3351.0</td>\n <td>108.0</td>\n <td>191.0</td>\n </tr>\n <tr>\n <th>394</th>\n <td>Mitsubishi</td>\n <td>Eclipse GTS 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Front</td>\n <td>25092.0</td>\n <td>23456.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>210.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3241.0</td>\n <td>101.0</td>\n <td>177.0</td>\n </tr>\n <tr>\n <th>395</th>\n <td>Nissan</td>\n <td>Pathfinder Armada SE</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>Front</td>\n <td>33840.0</td>\n <td>30815.0</td>\n <td>5.6</td>\n <td>8.0</td>\n <td>305.0</td>\n <td>13.0</td>\n <td>19.0</td>\n <td>5013.0</td>\n <td>123.0</td>\n <td>207.0</td>\n </tr>\n <tr>\n <th>396</th>\n <td>Nissan</td>\n <td>Sentra 1.8 S 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>14740.0</td>\n <td>13747.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>126.0</td>\n <td>28.0</td>\n <td>35.0</td>\n <td>2581.0</td>\n <td>100.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>397</th>\n <td>Nissan</td>\n <td>Maxima SE 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>27490.0</td>\n <td>25182.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>265.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3473.0</td>\n <td>111.0</td>\n <td>194.0</td>\n </tr>\n <tr>\n <th>398</th>\n <td>Nissan</td>\n <td>350Z coupe 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>26910.0</td>\n <td>25203.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>287.0</td>\n <td>20.0</td>\n <td>26.0</td>\n <td>3188.0</td>\n <td>104.0</td>\n <td>169.0</td>\n </tr>\n <tr>\n <th>399</th>\n <td>Nissan</td>\n <td>Murano SL</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>Rear</td>\n <td>28739.0</td>\n <td>27300.0</td>\n <td>3.5</td>\n <td>6.0</td>\n <td>245.0</td>\n <td>20.0</td>\n <td>25.0</td>\n <td>3801.0</td>\n <td>111.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>400</th>\n <td>Pontiac</td>\n <td>Aztekt</td>\n <td>SUV</td>\n <td>USA</td>\n <td>Front</td>\n <td>21595.0</td>\n <td>19810.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>185.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3779.0</td>\n <td>108.0</td>\n <td>182.0</td>\n </tr>\n <tr>\n <th>401</th>\n <td>Pontiac</td>\n <td>Sunfire 1SC 2dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>17735.0</td>\n <td>16369.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>24.0</td>\n <td>33.0</td>\n <td>2771.0</td>\n <td>104.0</td>\n <td>182.0</td>\n </tr>\n <tr>\n <th>402</th>\n <td>Pontiac</td>\n <td>Montana EWB</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>All</td>\n <td>31370.0</td>\n <td>28454.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>185.0</td>\n <td>18.0</td>\n <td>24.0</td>\n <td>4431.0</td>\n <td>121.0</td>\n <td>201.0</td>\n </tr>\n <tr>\n <th>403</th>\n <td>Porsche</td>\n <td>911 Carrera convertible 2dr (coupe)</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>79165.0</td>\n <td>69229.0</td>\n <td>3.6</td>\n <td>6.0</td>\n <td>315.0</td>\n <td>18.0</td>\n <td>26.0</td>\n <td>3135.0</td>\n <td>93.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>404</th>\n <td>Porsche</td>\n <td>Boxster convertible 2dr</td>\n <td>Sports</td>\n <td>Europe</td>\n <td>Rear</td>\n <td>43365.0</td>\n <td>37886.0</td>\n <td>2.7</td>\n <td>6.0</td>\n <td>228.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>2811.0</td>\n <td>95.0</td>\n <td>170.0</td>\n </tr>\n <tr>\n <th>405</th>\n <td>Saab</td>\n <td>9-5 Arc 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>35105.0</td>\n <td>33011.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>220.0</td>\n <td>21.0</td>\n <td>29.0</td>\n <td>3470.0</td>\n <td>106.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>406</th>\n <td>Saab</td>\n <td>9-5 Aero</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>Front</td>\n <td>40845.0</td>\n <td>38376.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>250.0</td>\n <td>19.0</td>\n <td>29.0</td>\n <td>3620.0</td>\n <td>106.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>407</th>\n <td>Saturn</td>\n <td>lon3 4dr</td>\n <td>Sedan</td>\n <td>USA</td>\n <td>Front</td>\n <td>15825.0</td>\n <td>14811.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>26.0</td>\n <td>35.0</td>\n <td>2692.0</td>\n <td>103.0</td>\n <td>185.0</td>\n </tr>\n <tr>\n <th>408</th>\n <td>Saturn</td>\n <td>L300 2</td>\n <td>Wagon</td>\n <td>USA</td>\n <td>Front</td>\n <td>23560.0</td>\n <td>21779.0</td>\n <td>2.2</td>\n <td>4.0</td>\n <td>140.0</td>\n <td>24.0</td>\n <td>34.0</td>\n <td>3109.0</td>\n <td>107.0</td>\n <td>190.0</td>\n </tr>\n <tr>\n <th>409</th>\n <td>Subaru</td>\n <td>Legacy L 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>All</td>\n <td>20445.0</td>\n <td>18713.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>165.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3285.0</td>\n <td>104.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>410</th>\n <td>Subaru</td>\n <td>Outback H-6 VDC 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>All</td>\n <td>31545.0</td>\n <td>28603.0</td>\n <td>3.0</td>\n <td>6.0</td>\n <td>212.0</td>\n <td>19.0</td>\n <td>26.0</td>\n <td>3630.0</td>\n <td>104.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>411</th>\n <td>Subaru</td>\n <td>Forester X</td>\n <td>Wagon</td>\n <td>Asia</td>\n <td>All</td>\n <td>21445.0</td>\n <td>19646.0</td>\n <td>2.5</td>\n <td>4.0</td>\n <td>165.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3090.0</td>\n <td>99.0</td>\n <td>175.0</td>\n </tr>\n <tr>\n <th>412</th>\n <td>Suzuki</td>\n <td>Aeno S 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>12884.0</td>\n <td>12719.0</td>\n <td>2.3</td>\n <td>4.0</td>\n <td>155.0</td>\n <td>25.0</td>\n <td>31.0</td>\n <td>2676.0</td>\n <td>98.0</td>\n <td>171.0</td>\n </tr>\n <tr>\n <th>413</th>\n <td>Suzuki</td>\n <td>Verona LX 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>17262.0</td>\n <td>17053.0</td>\n <td>2.5</td>\n <td>6.0</td>\n <td>155.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3380.0</td>\n <td>106.0</td>\n <td>188.0</td>\n </tr>\n <tr>\n <th>414</th>\n <td>Toyota</td>\n <td>4Runner SR5 V6</td>\n <td>SUV</td>\n <td>Asia</td>\n <td>Front</td>\n <td>27710.0</td>\n <td>24801.0</td>\n <td>4.0</td>\n <td>6.0</td>\n <td>245.0</td>\n <td>18.0</td>\n <td>21.0</td>\n <td>4035.0</td>\n <td>110.0</td>\n <td>189.0</td>\n </tr>\n <tr>\n <th>415</th>\n <td>Toyota</td>\n <td>Corolla CE 4dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>14085.0</td>\n <td>13065.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>130.0</td>\n <td>32.0</td>\n <td>40.0</td>\n <td>2502.0</td>\n <td>102.0</td>\n <td>178.0</td>\n </tr>\n <tr>\n <th>416</th>\n <td>Toyota</td>\n <td>Echo 2dr auto</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>11560.0</td>\n <td>10896.0</td>\n <td>1.5</td>\n <td>4.0</td>\n <td>108.0</td>\n <td>33.0</td>\n <td>39.0</td>\n <td>2085.0</td>\n <td>93.0</td>\n <td>163.0</td>\n </tr>\n <tr>\n <th>417</th>\n <td>Toyota</td>\n <td>Camry Solara SE 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>19635.0</td>\n <td>17722.0</td>\n <td>2.4</td>\n <td>4.0</td>\n <td>157.0</td>\n <td>24.0</td>\n <td>33.0</td>\n <td>3175.0</td>\n <td>107.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>418</th>\n <td>Toyota</td>\n <td>Camry Solara SLE V6 2dr</td>\n <td>Sedan</td>\n <td>Asia</td>\n <td>Front</td>\n <td>26510.0</td>\n <td>23908.0</td>\n <td>3.3</td>\n <td>6.0</td>\n <td>225.0</td>\n <td>20.0</td>\n <td>29.0</td>\n <td>3439.0</td>\n <td>107.0</td>\n <td>193.0</td>\n </tr>\n <tr>\n <th>419</th>\n <td>Toyota</td>\n <td>Celica GT-S 2dr</td>\n <td>Sports</td>\n <td>Asia</td>\n <td>Front</td>\n <td>22570.0</td>\n <td>20363.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>180.0</td>\n <td>24.0</td>\n <td>33.0</td>\n <td>2500.0</td>\n <td>102.0</td>\n <td>171.0</td>\n </tr>\n <tr>\n <th>420</th>\n <td>Toyota</td>\n <td>Tundra Access Cab V6 SR5</td>\n <td>Truck</td>\n <td>Asia</td>\n <td>All</td>\n <td>25935.0</td>\n <td>23520.0</td>\n <td>3.4</td>\n <td>6.0</td>\n <td>190.0</td>\n <td>14.0</td>\n <td>17.0</td>\n <td>4435.0</td>\n <td>128.0</td>\n <td>218.0</td>\n </tr>\n <tr>\n <th>421</th>\n <td>Volkswagen</td>\n <td>GTI 1.8T 2dr hatch</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>19825.0</td>\n <td>18109.0</td>\n <td>1.8</td>\n <td>4.0</td>\n <td>180.0</td>\n <td>24.0</td>\n <td>31.0</td>\n <td>2934.0</td>\n <td>99.0</td>\n <td>168.0</td>\n </tr>\n <tr>\n <th>422</th>\n <td>Volkswagen</td>\n <td>New Beetle GLS convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>23215.0</td>\n <td>21689.0</td>\n <td>2.0</td>\n <td>4.0</td>\n <td>115.0</td>\n <td>24.0</td>\n <td>30.0</td>\n <td>3082.0</td>\n <td>99.0</td>\n <td>161.0</td>\n </tr>\n <tr>\n <th>423</th>\n <td>Volkswagen</td>\n <td>Phaeton 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>65000.0</td>\n <td>59912.0</td>\n <td>4.2</td>\n <td>8.0</td>\n <td>335.0</td>\n <td>16.0</td>\n <td>22.0</td>\n <td>5194.0</td>\n <td>118.0</td>\n <td>204.0</td>\n </tr>\n <tr>\n <th>424</th>\n <td>Volkswagen</td>\n <td>Passat W8</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>Front</td>\n <td>40235.0</td>\n <td>36956.0</td>\n <td>4.0</td>\n <td>8.0</td>\n <td>270.0</td>\n <td>18.0</td>\n <td>25.0</td>\n <td>4067.0</td>\n <td>106.0</td>\n <td>184.0</td>\n </tr>\n <tr>\n <th>425</th>\n <td>Volvo</td>\n <td>S60 T5 4dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>34845.0</td>\n <td>32902.0</td>\n <td>2.3</td>\n <td>5.0</td>\n <td>247.0</td>\n <td>20.0</td>\n <td>28.0</td>\n <td>3766.0</td>\n <td>107.0</td>\n <td>180.0</td>\n </tr>\n <tr>\n <th>426</th>\n <td>Volvo</td>\n <td>C70 LPT convertible 2dr</td>\n <td>Sedan</td>\n <td>Europe</td>\n <td>Front</td>\n <td>40565.0</td>\n <td>38203.0</td>\n <td>2.4</td>\n <td>5.0</td>\n <td>197.0</td>\n <td>21.0</td>\n <td>28.0</td>\n <td>3450.0</td>\n <td>105.0</td>\n <td>186.0</td>\n </tr>\n <tr>\n <th>427</th>\n <td>Volvo</td>\n <td>XC70</td>\n <td>Wagon</td>\n <td>Europe</td>\n <td>All</td>\n <td>35145.0</td>\n <td>33112.0</td>\n <td>2.5</td>\n <td>5.0</td>\n <td>208.0</td>\n <td>20.0</td>\n <td>27.0</td>\n <td>3823.0</td>\n <td>109.0</td>\n <td>186.0</td>\n </tr>\n </tbody>\n</table>\n" ], [ "print(tbl.to_latex())", "\\begin{tabular}{llllllrrrrrrrrrr}\n\\toprule\n{} & Make & Model & Type & Origin & DriveTrain & MSRP & Invoice & EngineSize & Cylinders & Horsepower & MPG\\_City & MPG\\_Highway & Weight & Wheelbase & Length \\\\\n\\midrule\n0 & Acura & MDX & SUV & Asia & All & 36945.0 & 33337.0 & 3.5 & 6.0 & 265.0 & 17.0 & 23.0 & 4451.0 & 106.0 & 189.0 \\\\\n1 & Acura & 3.5 RL 4dr & Sedan & Asia & Front & 43755.0 & 39014.0 & 3.5 & 6.0 & 225.0 & 18.0 & 24.0 & 3880.0 & 115.0 & 197.0 \\\\\n2 & Audi & A41.8T convertible 2dr & Sedan & Europe & Front & 35940.0 & 32506.0 & 1.8 & 4.0 & 170.0 & 23.0 & 30.0 & 3638.0 & 105.0 & 180.0 \\\\\n3 & Audi & A6 3.0 4dr & Sedan & Europe & Front & 36640.0 & 33129.0 & 3.0 & 6.0 & 220.0 & 20.0 & 27.0 & 3561.0 & 109.0 & 192.0 \\\\\n4 & Audi & A6 2.7 Turbo Quattro 4dr & Sedan & Europe & All & 42840.0 & 38840.0 & 2.7 & 6.0 & 250.0 & 18.0 & 25.0 & 3836.0 & 109.0 & 192.0 \\\\\n5 & Audi & RS 6 4dr & Sports & Europe & Front & 84600.0 & 76417.0 & 4.2 & 8.0 & 450.0 & 15.0 & 22.0 & 4024.0 & 109.0 & 191.0 \\\\\n6 & Audi & A6 3.0 Avant Quattro & Wagon & Europe & All & 40840.0 & 37060.0 & 3.0 & 6.0 & 220.0 & 18.0 & 25.0 & 4035.0 & 109.0 & 192.0 \\\\\n7 & BMW & 325i 4dr & Sedan & Europe & Rear & 28495.0 & 26155.0 & 2.5 & 6.0 & 184.0 & 20.0 & 29.0 & 3219.0 & 107.0 & 176.0 \\\\\n8 & BMW & 330i 4dr & Sedan & Europe & Rear & 35495.0 & 32525.0 & 3.0 & 6.0 & 225.0 & 20.0 & 30.0 & 3285.0 & 107.0 & 176.0 \\\\\n9 & BMW & 330Ci convertible 2dr & Sedan & Europe & Rear & 44295.0 & 40530.0 & 3.0 & 6.0 & 225.0 & 19.0 & 28.0 & 3616.0 & 107.0 & 177.0 \\\\\n10 & BMW & 745Li 4dr & Sedan & Europe & Rear & 73195.0 & 66830.0 & 4.4 & 8.0 & 325.0 & 18.0 & 26.0 & 4464.0 & 123.0 & 204.0 \\\\\n11 & BMW & Z4 convertible 3.0i 2dr & Sports & Europe & Rear & 41045.0 & 37575.0 & 3.0 & 6.0 & 225.0 & 21.0 & 29.0 & 2998.0 & 98.0 & 161.0 \\\\\n12 & Buick & Century Custom 4dr & Sedan & USA & Front & 22180.0 & 20351.0 & 3.1 & 6.0 & 175.0 & 20.0 & 30.0 & 3353.0 & 109.0 & 195.0 \\\\\n13 & Buick & LeSabre Limited 4dr & Sedan & USA & Front & 32245.0 & 29566.0 & 3.8 & 6.0 & 205.0 & 20.0 & 29.0 & 3591.0 & 112.0 & 200.0 \\\\\n14 & Cadillac & SRX V8 & SUV & USA & Front & 46995.0 & 43523.0 & 4.6 & 8.0 & 320.0 & 16.0 & 21.0 & 4302.0 & 116.0 & 195.0 \\\\\n15 & Cadillac & Seville SLS 4dr & Sedan & USA & Front & 47955.0 & 43841.0 & 4.6 & 8.0 & 275.0 & 18.0 & 26.0 & 3992.0 & 112.0 & 201.0 \\\\\n16 & Chevrolet & Tahoe LT & SUV & USA & All & 41465.0 & 36287.0 & 5.3 & 8.0 & 295.0 & 14.0 & 18.0 & 5050.0 & 116.0 & 197.0 \\\\\n17 & Chevrolet & Aveo LS 4dr hatch & Sedan & USA & Front & 12585.0 & 11802.0 & 1.6 & 4.0 & 103.0 & 28.0 & 34.0 & 2348.0 & 98.0 & 153.0 \\\\\n18 & Chevrolet & Impala 4dr & Sedan & USA & Front & 21900.0 & 20095.0 & 3.4 & 6.0 & 180.0 & 21.0 & 32.0 & 3465.0 & 111.0 & 200.0 \\\\\n19 & Chevrolet & Impala LS 4dr & Sedan & USA & Front & 25000.0 & 22931.0 & 3.8 & 6.0 & 200.0 & 20.0 & 30.0 & 3476.0 & 111.0 & 200.0 \\\\\n20 & Chevrolet & Astro & Sedan & USA & All & 26395.0 & 23954.0 & 4.3 & 6.0 & 190.0 & 14.0 & 17.0 & 4605.0 & 111.0 & 190.0 \\\\\n21 & Chevrolet & Avalanche 1500 & Truck & USA & All & 36100.0 & 31689.0 & 5.3 & 8.0 & 295.0 & 14.0 & 18.0 & 5678.0 & 130.0 & 222.0 \\\\\n22 & Chevrolet & SSR & Truck & USA & Rear & 41995.0 & 39306.0 & 5.3 & 8.0 & 300.0 & 16.0 & 19.0 & 4760.0 & 116.0 & 191.0 \\\\\n23 & Chrysler & Sebring 4dr & Sedan & USA & Front & 19090.0 & 17805.0 & 2.4 & 4.0 & 150.0 & 22.0 & 30.0 & 3173.0 & 108.0 & 191.0 \\\\\n24 & Chrysler & Concorde LXi 4dr & Sedan & USA & Front & 26860.0 & 24909.0 & 3.5 & 6.0 & 232.0 & 19.0 & 27.0 & 3548.0 & 113.0 & 208.0 \\\\\n25 & Chrysler & Sebring Limited convertible 2dr & Sedan & USA & Front & 30950.0 & 28613.0 & 2.7 & 6.0 & 200.0 & 21.0 & 28.0 & 3448.0 & 106.0 & 194.0 \\\\\n26 & Chrysler & Pacifica & Wagon & USA & Rear & 31230.0 & 28725.0 & 3.5 & 6.0 & 250.0 & 17.0 & 23.0 & 4675.0 & 116.0 & 199.0 \\\\\n27 & Dodge & Intrepid SE 4dr & Sedan & USA & Front & 22035.0 & 20502.0 & 2.7 & 6.0 & 200.0 & 21.0 & 29.0 & 3469.0 & 113.0 & 204.0 \\\\\n28 & Dodge & Caravan SE & Sedan & USA & Front & 21795.0 & 20508.0 & 2.4 & 4.0 & 150.0 & 20.0 & 26.0 & 3862.0 & 113.0 & 189.0 \\\\\n29 & Dodge & Dakota Club Cab & Truck & USA & Rear & 20300.0 & 18670.0 & 3.7 & 6.0 & 210.0 & 16.0 & 22.0 & 3829.0 & 131.0 & 219.0 \\\\\n30 & Ford & Explorer XLT V6 & SUV & USA & All & 29670.0 & 26983.0 & 4.0 & 6.0 & 210.0 & 15.0 & 20.0 & 4463.0 & 114.0 & 190.0 \\\\\n31 & Ford & Focus SE 4dr & Sedan & USA & Front & 15460.0 & 14496.0 & 2.0 & 4.0 & 130.0 & 26.0 & 33.0 & 2606.0 & 103.0 & 168.0 \\\\\n32 & Ford & Taurus SES Duratec 4dr & Sedan & USA & Front & 22735.0 & 20857.0 & 3.0 & 6.0 & 201.0 & 19.0 & 26.0 & 3313.0 & 109.0 & 198.0 \\\\\n33 & Ford & Freestar SE & Sedan & USA & Front & 26930.0 & 24498.0 & 3.9 & 6.0 & 193.0 & 17.0 & 23.0 & 4275.0 & 121.0 & 201.0 \\\\\n34 & Ford & F-150 Regular Cab XL & Truck & USA & Rear & 22010.0 & 19490.0 & 4.6 & 8.0 & 231.0 & 15.0 & 19.0 & 4788.0 & 126.0 & 211.0 \\\\\n35 & Ford & Taurus SE & Wagon & USA & Front & 22290.0 & 20457.0 & 3.0 & 6.0 & 155.0 & 19.0 & 26.0 & 3497.0 & 109.0 & 198.0 \\\\\n36 & GMC & Safari SLE & Sedan & USA & Rear & 25640.0 & 23215.0 & 4.3 & 6.0 & 190.0 & 16.0 & 20.0 & 4309.0 & 111.0 & 190.0 \\\\\n37 & GMC & Sonoma Crew Cab & Truck & USA & All & 25395.0 & 23043.0 & 4.3 & 6.0 & 190.0 & 15.0 & 19.0 & 4083.0 & 123.0 & 208.0 \\\\\n38 & Honda & CR-V LX & SUV & Asia & All & 19860.0 & 18419.0 & 2.4 & 4.0 & 160.0 & 21.0 & 25.0 & 3258.0 & 103.0 & 179.0 \\\\\n39 & Honda & Civic LX 4dr & Sedan & Asia & Front & 15850.0 & 14531.0 & 1.7 & 4.0 & 115.0 & 32.0 & 38.0 & 2513.0 & 103.0 & 175.0 \\\\\n40 & Honda & Civic Si 2dr hatch & Sedan & Asia & Front & 19490.0 & 17849.0 & 2.0 & 4.0 & 160.0 & 26.0 & 30.0 & 2782.0 & 101.0 & 166.0 \\\\\n41 & Honda & Odyssey EX & Sedan & Asia & Front & 27450.0 & 24744.0 & 3.5 & 6.0 & 240.0 & 18.0 & 25.0 & 4365.0 & 118.0 & 201.0 \\\\\n42 & Hyundai & Accent 2dr hatch & Sedan & Asia & Front & 10539.0 & 10107.0 & 1.6 & 4.0 & 103.0 & 29.0 & 33.0 & 2255.0 & 96.0 & 167.0 \\\\\n43 & Hyundai & Elantra GT 4dr & Sedan & Asia & Front & 15389.0 & 14207.0 & 2.0 & 4.0 & 138.0 & 26.0 & 34.0 & 2635.0 & 103.0 & 178.0 \\\\\n44 & Hyundai & XG350 4dr & Sedan & Asia & Front & 24589.0 & 22055.0 & 3.5 & 6.0 & 194.0 & 17.0 & 26.0 & 3651.0 & 108.0 & 192.0 \\\\\n45 & Infiniti & G35 Sport Coupe 2dr & Sedan & Asia & Rear & 29795.0 & 27536.0 & 3.5 & 6.0 & 280.0 & 18.0 & 26.0 & 3416.0 & 112.0 & 182.0 \\\\\n46 & Infiniti & Q45 Luxury 4dr & Sedan & Asia & Rear & 52545.0 & 47575.0 & 4.5 & 8.0 & 340.0 & 17.0 & 23.0 & 3977.0 & 113.0 & 200.0 \\\\\n47 & Isuzu & Rodeo S & SUV & Asia & Front & 20449.0 & 19261.0 & 3.2 & 6.0 & 193.0 & 17.0 & 21.0 & 3836.0 & 106.0 & 178.0 \\\\\n48 & Jaguar & S-Type 4.2 4dr & Sedan & Europe & Rear & 49995.0 & 45556.0 & 4.2 & 8.0 & 294.0 & 18.0 & 28.0 & 3874.0 & 115.0 & 192.0 \\\\\n49 & Jaguar & XJR 4dr & Sedan & Europe & Rear & 74995.0 & 68306.0 & 4.2 & 8.0 & 390.0 & 17.0 & 24.0 & 3948.0 & 119.0 & 200.0 \\\\\n50 & Jaguar & XKR convertible 2dr & Sports & Europe & Rear & 86995.0 & 79226.0 & 4.2 & 8.0 & 390.0 & 16.0 & 23.0 & 4042.0 & 102.0 & 187.0 \\\\\n51 & Kia & Sorento LX & SUV & Asia & Front & 19635.0 & 18630.0 & 3.5 & 6.0 & 192.0 & 16.0 & 19.0 & 4112.0 & 107.0 & 180.0 \\\\\n52 & Kia & Spectra 4dr & Sedan & Asia & Front & 12360.0 & 11630.0 & 1.8 & 4.0 & 124.0 & 24.0 & 32.0 & 2661.0 & 101.0 & 178.0 \\\\\n53 & Kia & Amanti 4dr & Sedan & Asia & Front & 26000.0 & 23764.0 & 3.5 & 6.0 & 195.0 & 17.0 & 25.0 & 4021.0 & 110.0 & 196.0 \\\\\n54 & Land Rover & Discovery SE & SUV & Europe & All & 39250.0 & 35777.0 & 4.6 & 8.0 & 217.0 & 12.0 & 16.0 & 4576.0 & 100.0 & 185.0 \\\\\n55 & Lexus & RX 330 & SUV & Asia & All & 39195.0 & 34576.0 & 3.3 & 6.0 & 230.0 & 18.0 & 24.0 & 4065.0 & 107.0 & 186.0 \\\\\n56 & Lexus & GS 300 4dr & Sedan & Asia & Rear & 41010.0 & 36196.0 & 3.0 & 6.0 & 220.0 & 18.0 & 25.0 & 3649.0 & 110.0 & 189.0 \\\\\n57 & Lexus & IS 300 SportCross & Wagon & Asia & Rear & 32455.0 & 28647.0 & 3.0 & 6.0 & 215.0 & 18.0 & 24.0 & 3410.0 & 105.0 & 177.0 \\\\\n58 & Lincoln & LS V6 Premium 4dr & Sedan & USA & Rear & 36895.0 & 33929.0 & 3.0 & 6.0 & 232.0 & 20.0 & 26.0 & 3681.0 & 115.0 & 194.0 \\\\\n59 & Lincoln & Town Car Ultimate 4dr & Sedan & USA & Rear & 44925.0 & 41217.0 & 4.6 & 8.0 & 239.0 & 17.0 & 25.0 & 4369.0 & 118.0 & 215.0 \\\\\n60 & Mazda & Tribute DX 2.0 & SUV & Asia & All & 21087.0 & 19742.0 & 2.0 & 4.0 & 130.0 & 22.0 & 25.0 & 3091.0 & 103.0 & 173.0 \\\\\n61 & Mazda & MPV ES & Sedan & Asia & Front & 28750.0 & 26600.0 & 3.0 & 6.0 & 200.0 & 18.0 & 25.0 & 3812.0 & 112.0 & 188.0 \\\\\n62 & Mazda & RX-8 4dr manual & Sports & Asia & Rear & 27200.0 & 25179.0 & 1.3 & NaN & 238.0 & 18.0 & 24.0 & 3029.0 & 106.0 & 174.0 \\\\\n63 & Mercedes-Benz & ML500 & SUV & Europe & All & 46470.0 & 43268.0 & 5.0 & 8.0 & 288.0 & 14.0 & 17.0 & 4874.0 & 111.0 & 183.0 \\\\\n64 & Mercedes-Benz & C240 4dr & Sedan & Europe & All & 33480.0 & 31187.0 & 2.6 & 6.0 & 168.0 & 19.0 & 25.0 & 3360.0 & 107.0 & 178.0 \\\\\n65 & Mercedes-Benz & C32 AMG 4dr & Sedan & Europe & Rear & 52120.0 & 48522.0 & 3.2 & 6.0 & 349.0 & 16.0 & 21.0 & 3540.0 & 107.0 & 178.0 \\\\\n66 & Mercedes-Benz & CLK500 coupe 2dr (convertible) & Sedan & Europe & Rear & 52800.0 & 49104.0 & 5.0 & 8.0 & 302.0 & 17.0 & 22.0 & 3585.0 & 107.0 & 183.0 \\\\\n67 & Mercedes-Benz & S500 4dr & Sedan & Europe & All & 86970.0 & 80939.0 & 5.0 & 8.0 & 302.0 & 16.0 & 24.0 & 4390.0 & 122.0 & 203.0 \\\\\n68 & Mercedes-Benz & SLK230 convertible 2dr & Sports & Europe & Rear & 40320.0 & 37548.0 & 2.3 & 4.0 & 192.0 & 21.0 & 29.0 & 3055.0 & 95.0 & 158.0 \\\\\n69 & Mercedes-Benz & E500 & Wagon & Europe & All & 60670.0 & 56474.0 & 5.0 & 8.0 & 302.0 & 16.0 & 24.0 & 4230.0 & 112.0 & 190.0 \\\\\n70 & Mercury & Grand Marquis LS Premium 4dr & Sedan & USA & Rear & 29595.0 & 27148.0 & 4.6 & 8.0 & 224.0 & 17.0 & 25.0 & 4052.0 & 115.0 & 212.0 \\\\\n71 & Mercury & Monterey Luxury & Sedan & USA & Front & 33995.0 & 30846.0 & 4.2 & 6.0 & 201.0 & 16.0 & 23.0 & 4340.0 & 121.0 & 202.0 \\\\\n72 & Mitsubishi & Outlander LS & SUV & Asia & Front & 18892.0 & 17569.0 & 2.4 & 4.0 & 160.0 & 21.0 & 27.0 & 3240.0 & 103.0 & 179.0 \\\\\n73 & Mitsubishi & Lancer OZ Rally 4dr auto & Sedan & Asia & Front & 17232.0 & 16196.0 & 2.0 & 4.0 & 120.0 & 25.0 & 31.0 & 2744.0 & 102.0 & 181.0 \\\\\n74 & Mitsubishi & Eclipse Spyder GT convertible 2dr & Sports & Asia & Front & 26992.0 & 25218.0 & 3.0 & 6.0 & 210.0 & 21.0 & 28.0 & 3296.0 & 101.0 & 177.0 \\\\\n75 & Nissan & Pathfinder SE & SUV & Asia & Front & 27339.0 & 25972.0 & 3.5 & 6.0 & 240.0 & 16.0 & 21.0 & 3871.0 & 106.0 & 183.0 \\\\\n76 & Nissan & Altima S 4dr & Sedan & Asia & Front & 19240.0 & 18030.0 & 2.5 & 4.0 & 175.0 & 21.0 & 26.0 & 3039.0 & 110.0 & 192.0 \\\\\n77 & Nissan & Maxima SL 4dr & Sedan & Asia & Front & 29440.0 & 26966.0 & 3.5 & 6.0 & 265.0 & 20.0 & 28.0 & 3476.0 & 111.0 & 194.0 \\\\\n78 & Nissan & 350Z Enthusiast convertible 2dr & Sports & Asia & Rear & 34390.0 & 31845.0 & 3.5 & 6.0 & 287.0 & 20.0 & 26.0 & 3428.0 & 104.0 & 169.0 \\\\\n79 & Oldsmobile & Alero GX 2dr & Sedan & USA & Front & 18825.0 & 17642.0 & 2.2 & 4.0 & 140.0 & 24.0 & 32.0 & 2946.0 & 107.0 & 187.0 \\\\\n80 & Pontiac & Sunfire 1SA 2dr & Sedan & USA & Front & 15495.0 & 14375.0 & 2.2 & 4.0 & 140.0 & 24.0 & 33.0 & 2771.0 & 104.0 & 182.0 \\\\\n81 & Pontiac & Grand Prix GT2 4dr & Sedan & USA & Front & 24295.0 & 22284.0 & 3.8 & 6.0 & 200.0 & 20.0 & 30.0 & 3484.0 & 111.0 & 198.0 \\\\\n82 & Pontiac & GTO 2dr & Sports & USA & Rear & 33500.0 & 30710.0 & 5.7 & 8.0 & 340.0 & 16.0 & 20.0 & 3725.0 & 110.0 & 190.0 \\\\\n83 & Porsche & 911 Carrera 4S coupe 2dr (convert) & Sports & Europe & All & 84165.0 & 72206.0 & 3.6 & 6.0 & 315.0 & 17.0 & 24.0 & 3240.0 & 93.0 & 175.0 \\\\\n84 & Porsche & Boxster S convertible 2dr & Sports & Europe & Rear & 52365.0 & 45766.0 & 3.2 & 6.0 & 258.0 & 18.0 & 26.0 & 2911.0 & 95.0 & 170.0 \\\\\n85 & Saab & 9-5 Aero 4dr & Sedan & Europe & Front & 39465.0 & 37721.0 & 2.3 & 4.0 & 250.0 & 21.0 & 29.0 & 3470.0 & 106.0 & 190.0 \\\\\n86 & Saturn & VUE & SUV & USA & All & 20585.0 & 19238.0 & 2.2 & 4.0 & 143.0 & 21.0 & 26.0 & 3381.0 & 107.0 & 181.0 \\\\\n87 & Saturn & lon2 quad coupe 2dr & Sedan & USA & Front & 14850.0 & 13904.0 & 2.2 & 4.0 & 140.0 & 26.0 & 35.0 & 2751.0 & 103.0 & 185.0 \\\\\n88 & Scion & xA 4dr hatch & Sedan & Asia & Front & 12965.0 & 12340.0 & 1.5 & 4.0 & 108.0 & 32.0 & 38.0 & 2340.0 & 93.0 & 154.0 \\\\\n89 & Subaru & Legacy GT 4dr & Sedan & Asia & All & 25645.0 & 23336.0 & 2.5 & 4.0 & 165.0 & 21.0 & 28.0 & 3395.0 & 104.0 & 184.0 \\\\\n90 & Subaru & Impreza WRX 4dr & Sports & Asia & All & 25045.0 & 23022.0 & 2.0 & 4.0 & 227.0 & 20.0 & 27.0 & 3085.0 & 99.0 & 174.0 \\\\\n91 & Subaru & Outback & Wagon & Asia & All & 23895.0 & 21773.0 & 2.5 & 4.0 & 165.0 & 21.0 & 28.0 & 3430.0 & 104.0 & 187.0 \\\\\n92 & Suzuki & Aerio LX 4dr & Sedan & Asia & Front & 14500.0 & 14317.0 & 2.3 & 4.0 & 155.0 & 25.0 & 31.0 & 2676.0 & 98.0 & 171.0 \\\\\n93 & Suzuki & Aerio SX & Wagon & Asia & All & 16497.0 & 16291.0 & 2.3 & 4.0 & 155.0 & 24.0 & 29.0 & 2932.0 & 98.0 & 167.0 \\\\\n94 & Toyota & Highlander V6 & SUV & Asia & All & 27930.0 & 24915.0 & 3.3 & 6.0 & 230.0 & 18.0 & 24.0 & 3935.0 & 107.0 & 185.0 \\\\\n95 & Toyota & Corolla S 4dr & Sedan & Asia & Front & 15030.0 & 13650.0 & 1.8 & 4.0 & 130.0 & 32.0 & 40.0 & 2524.0 & 102.0 & 178.0 \\\\\n96 & Toyota & Echo 4dr & Sedan & Asia & Front & 11290.0 & 10642.0 & 1.5 & 4.0 & 108.0 & 35.0 & 43.0 & 2055.0 & 93.0 & 163.0 \\\\\n97 & Toyota & Camry Solara SE V6 2dr & Sedan & Asia & Front & 21965.0 & 19819.0 & 3.3 & 6.0 & 225.0 & 20.0 & 29.0 & 3417.0 & 107.0 & 193.0 \\\\\n98 & Toyota & Avalon XLS 4dr & Sedan & Asia & Front & 30920.0 & 27271.0 & 3.0 & 6.0 & 210.0 & 21.0 & 29.0 & 3439.0 & 107.0 & 192.0 \\\\\n99 & Toyota & MR2 Spyder convertible 2dr & Sports & Asia & Rear & 25130.0 & 22787.0 & 1.8 & 4.0 & 138.0 & 26.0 & 32.0 & 2195.0 & 97.0 & 153.0 \\\\\n100 & Toyota & Matrix XR & Wagon & Asia & Front & 16695.0 & 15156.0 & 1.8 & 4.0 & 130.0 & 29.0 & 36.0 & 2679.0 & 102.0 & 171.0 \\\\\n101 & Volkswagen & Jetta GLS TDI 4dr & Sedan & Europe & Front & 21055.0 & 19638.0 & 1.9 & 4.0 & 100.0 & 38.0 & 46.0 & 3003.0 & 99.0 & 172.0 \\\\\n102 & Volkswagen & Passat GLS 4dr & Sedan & Europe & Front & 23955.0 & 21898.0 & 1.8 & 4.0 & 170.0 & 22.0 & 31.0 & 3241.0 & 106.0 & 185.0 \\\\\n103 & Volkswagen & Phaeton W12 4dr & Sedan & Europe & Front & 75000.0 & 69130.0 & 6.0 & 12.0 & 420.0 & 12.0 & 19.0 & 5399.0 & 118.0 & 204.0 \\\\\n104 & Volvo & XC90 T6 & SUV & Europe & All & 41250.0 & 38851.0 & 2.9 & 6.0 & 268.0 & 15.0 & 20.0 & 4638.0 & 113.0 & 189.0 \\\\\n105 & Volvo & S60 R 4dr & Sedan & Europe & All & 37560.0 & 35382.0 & 2.5 & 5.0 & 300.0 & 18.0 & 25.0 & 3571.0 & 107.0 & 181.0 \\\\\n106 & Volvo & C70 HPT convertible 2dr & Sedan & Europe & Front & 42565.0 & 40083.0 & 2.3 & 5.0 & 242.0 & 20.0 & 26.0 & 3450.0 & 105.0 & 186.0 \\\\\n107 & Acura & RSX Type S 2dr & Sedan & Asia & Front & 23820.0 & 21761.0 & 2.0 & 4.0 & 200.0 & 24.0 & 31.0 & 2778.0 & 101.0 & 172.0 \\\\\n108 & Acura & 3.5 RL w/Navigation 4dr & Sedan & Asia & Front & 46100.0 & 41100.0 & 3.5 & 6.0 & 225.0 & 18.0 & 24.0 & 3893.0 & 115.0 & 197.0 \\\\\n109 & Audi & A4 3.0 4dr & Sedan & Europe & Front & 31840.0 & 28846.0 & 3.0 & 6.0 & 220.0 & 20.0 & 28.0 & 3462.0 & 104.0 & 179.0 \\\\\n110 & Audi & A6 3.0 Quattro 4dr & Sedan & Europe & All & 39640.0 & 35992.0 & 3.0 & 6.0 & 220.0 & 18.0 & 25.0 & 3880.0 & 109.0 & 192.0 \\\\\n111 & Audi & A6 4.2 Quattro 4dr & Sedan & Europe & All & 49690.0 & 44936.0 & 4.2 & 8.0 & 300.0 & 17.0 & 24.0 & 4024.0 & 109.0 & 193.0 \\\\\n112 & Audi & TT 1.8 convertible 2dr (coupe) & Sports & Europe & Front & 35940.0 & 32512.0 & 1.8 & 4.0 & 180.0 & 20.0 & 28.0 & 3131.0 & 95.0 & 159.0 \\\\\n113 & Audi & S4 Avant Quattro & Wagon & Europe & All & 49090.0 & 44446.0 & 4.2 & 8.0 & 340.0 & 15.0 & 21.0 & 3936.0 & 104.0 & 179.0 \\\\\n114 & BMW & 325Ci 2dr & Sedan & Europe & Rear & 30795.0 & 28245.0 & 2.5 & 6.0 & 184.0 & 20.0 & 29.0 & 3197.0 & 107.0 & 177.0 \\\\\n115 & BMW & 330Ci 2dr & Sedan & Europe & Rear & 36995.0 & 33890.0 & 3.0 & 6.0 & 225.0 & 20.0 & 30.0 & 3285.0 & 107.0 & 176.0 \\\\\n116 & BMW & 530i 4dr & Sedan & Europe & Rear & 44995.0 & 41170.0 & 3.0 & 6.0 & 225.0 & 20.0 & 30.0 & 3472.0 & 114.0 & 191.0 \\\\\n117 & BMW & M3 coupe 2dr & Sports & Europe & Rear & 48195.0 & 44170.0 & 3.2 & 6.0 & 333.0 & 16.0 & 24.0 & 3415.0 & 108.0 & 177.0 \\\\\n118 & BMW & 325xi Sport & Wagon & Europe & All & 32845.0 & 30110.0 & 2.5 & 6.0 & 184.0 & 19.0 & 26.0 & 3594.0 & 107.0 & 176.0 \\\\\n119 & Buick & LeSabre Custom 4dr & Sedan & USA & Front & 26470.0 & 24282.0 & 3.8 & 6.0 & 205.0 & 20.0 & 29.0 & 3567.0 & 112.0 & 200.0 \\\\\n120 & Buick & Park Avenue 4dr & Sedan & USA & Front & 35545.0 & 32244.0 & 3.8 & 6.0 & 205.0 & 20.0 & 29.0 & 3778.0 & 114.0 & 207.0 \\\\\n121 & Cadillac & CTS VVT 4dr & Sedan & USA & Rear & 30835.0 & 28575.0 & 3.6 & 6.0 & 255.0 & 18.0 & 25.0 & 3694.0 & 113.0 & 190.0 \\\\\n122 & Cadillac & XLR convertible 2dr & Sports & USA & Rear & 76200.0 & 70546.0 & 4.6 & 8.0 & 320.0 & 17.0 & 25.0 & 3647.0 & 106.0 & 178.0 \\\\\n123 & Chevrolet & TrailBlazer LT & SUV & USA & Front & 30295.0 & 27479.0 & 4.2 & 6.0 & 275.0 & 16.0 & 21.0 & 4425.0 & 113.0 & 192.0 \\\\\n124 & Chevrolet & Cavalier 2dr & Sedan & USA & Front & 14610.0 & 13697.0 & 2.2 & 4.0 & 140.0 & 26.0 & 37.0 & 2617.0 & 104.0 & 183.0 \\\\\n125 & Chevrolet & Malibu 4dr & Sedan & USA & Front & 18995.0 & 17434.0 & 2.2 & 4.0 & 145.0 & 24.0 & 34.0 & 3174.0 & 106.0 & 188.0 \\\\\n126 & Chevrolet & Impala SS 4dr & Sedan & USA & Front & 27995.0 & 25672.0 & 3.8 & 6.0 & 240.0 & 18.0 & 28.0 & 3606.0 & 111.0 & 200.0 \\\\\n127 & Chevrolet & Venture LS & Sedan & USA & Front & 27020.0 & 24518.0 & 3.4 & 6.0 & 185.0 & 19.0 & 26.0 & 3699.0 & 112.0 & 187.0 \\\\\n128 & Chevrolet & Colorado Z85 & Truck & USA & All & 18760.0 & 17070.0 & 2.8 & 4.0 & 175.0 & 18.0 & 23.0 & 3623.0 & 111.0 & 192.0 \\\\\n129 & Chevrolet & Malibu Maxx LS & Wagon & USA & Front & 22225.0 & 20394.0 & 3.5 & 6.0 & 200.0 & 22.0 & 30.0 & 3458.0 & 112.0 & 188.0 \\\\\n130 & Chrysler & Sebring Touring 4dr & Sedan & USA & Front & 21840.0 & 20284.0 & 2.7 & 6.0 & 200.0 & 21.0 & 28.0 & 3222.0 & 108.0 & 191.0 \\\\\n131 & Chrysler & PT Cruiser GT 4dr & Sedan & USA & Front & 25955.0 & 24172.0 & 2.4 & 4.0 & 220.0 & 21.0 & 27.0 & 3217.0 & 103.0 & 169.0 \\\\\n132 & Chrysler & Town and Country LX & Sedan & USA & Front & 27490.0 & 25371.0 & 3.3 & 6.0 & 180.0 & 19.0 & 26.0 & 4068.0 & 119.0 & 201.0 \\\\\n133 & Dodge & Durango SLT & SUV & USA & All & 32235.0 & 29472.0 & 4.7 & 8.0 & 230.0 & 15.0 & 21.0 & 4987.0 & 119.0 & 201.0 \\\\\n134 & Dodge & Stratus SXT 4dr & Sedan & USA & Front & 18820.0 & 17512.0 & 2.4 & 4.0 & 150.0 & 21.0 & 28.0 & 3182.0 & 108.0 & 191.0 \\\\\n135 & Dodge & Grand Caravan SXT & Sedan & USA & All & 32660.0 & 29812.0 & 3.8 & 6.0 & 215.0 & 18.0 & 25.0 & 4440.0 & 119.0 & 201.0 \\\\\n136 & Dodge & Ram 1500 Regular Cab ST & Truck & USA & Rear & 20215.0 & 18076.0 & 3.7 & 6.0 & 215.0 & 16.0 & 21.0 & 4542.0 & 121.0 & 208.0 \\\\\n137 & Ford & Escape XLS & SUV & USA & All & 22515.0 & 20907.0 & 3.0 & 6.0 & 201.0 & 18.0 & 23.0 & 3346.0 & 103.0 & 173.0 \\\\\n138 & Ford & Focus ZX5 5dr & Sedan & USA & Front & 15580.0 & 14607.0 & 2.0 & 4.0 & 130.0 & 26.0 & 33.0 & 2691.0 & 103.0 & 168.0 \\\\\n139 & Ford & Crown Victoria 4dr & Sedan & USA & Rear & 24345.0 & 22856.0 & 4.6 & 8.0 & 224.0 & 17.0 & 25.0 & 4057.0 & 115.0 & 212.0 \\\\\n140 & Ford & Mustang 2dr (convertible) & Sports & USA & Rear & 18345.0 & 16943.0 & 3.8 & 6.0 & 193.0 & 20.0 & 29.0 & 3290.0 & 101.0 & 183.0 \\\\\n141 & Ford & F-150 Supercab Lariat & Truck & USA & All & 33540.0 & 29405.0 & 5.4 & 8.0 & 300.0 & 14.0 & 18.0 & 5464.0 & 133.0 & 218.0 \\\\\n142 & GMC & Envoy XUV SLE & SUV & USA & Front & 31890.0 & 28922.0 & 4.2 & 6.0 & 275.0 & 15.0 & 19.0 & 4945.0 & 129.0 & 208.0 \\\\\n143 & GMC & Canyon Z85 SL Regular Cab & Truck & USA & Rear & 16530.0 & 14877.0 & 2.8 & 4.0 & 175.0 & 18.0 & 25.0 & 3351.0 & 111.0 & 192.0 \\\\\n144 & Honda & Civic Hybrid 4dr manual (gas/electric) & Hybrid & Asia & Front & 20140.0 & 18451.0 & 1.4 & 4.0 & 93.0 & 46.0 & 51.0 & 2732.0 & 103.0 & 175.0 \\\\\n145 & Honda & Element LX & SUV & Asia & All & 18690.0 & 17334.0 & 2.4 & 4.0 & 160.0 & 21.0 & 24.0 & 3468.0 & 101.0 & 167.0 \\\\\n146 & Honda & Accord LX 2dr & Sedan & Asia & Front & 19860.0 & 17924.0 & 2.4 & 4.0 & 160.0 & 26.0 & 34.0 & 2994.0 & 105.0 & 188.0 \\\\\n147 & Honda & Accord LX V6 4dr & Sedan & Asia & Front & 23760.0 & 21428.0 & 3.0 & 6.0 & 240.0 & 21.0 & 30.0 & 3349.0 & 108.0 & 190.0 \\\\\n148 & Honda & S2000 convertible 2dr & Sports & Asia & Rear & 33260.0 & 29965.0 & 2.2 & 4.0 & 240.0 & 20.0 & 25.0 & 2835.0 & 95.0 & 162.0 \\\\\n149 & Hyundai & Accent GL 4dr & Sedan & Asia & Front & 11839.0 & 11116.0 & 1.6 & 4.0 & 103.0 & 29.0 & 33.0 & 2290.0 & 96.0 & 167.0 \\\\\n150 & Hyundai & Elantra GT 4dr hatch & Sedan & Asia & Front & 15389.0 & 14207.0 & 2.0 & 4.0 & 138.0 & 26.0 & 34.0 & 2698.0 & 103.0 & 178.0 \\\\\n151 & Hyundai & XG350 L 4dr & Sedan & Asia & Front & 26189.0 & 23486.0 & 3.5 & 6.0 & 194.0 & 17.0 & 26.0 & 3651.0 & 108.0 & 192.0 \\\\\n152 & Infiniti & G35 4dr & Sedan & Asia & All & 32445.0 & 29783.0 & 3.5 & 6.0 & 260.0 & 18.0 & 26.0 & 3677.0 & 112.0 & 187.0 \\\\\n153 & Infiniti & FX35 & Wagon & Asia & Rear & 34895.0 & 31756.0 & 3.5 & 6.0 & 280.0 & 16.0 & 22.0 & 4056.0 & 112.0 & 189.0 \\\\\n154 & Jaguar & X-Type 2.5 4dr & Sedan & Europe & All & 29995.0 & 27355.0 & 2.5 & 6.0 & 192.0 & 18.0 & 26.0 & 3428.0 & 107.0 & 184.0 \\\\\n155 & Jaguar & S-Type R 4dr & Sedan & Europe & Rear & 63120.0 & 57499.0 & 4.2 & 8.0 & 390.0 & 17.0 & 24.0 & 4046.0 & 115.0 & 192.0 \\\\\n156 & Jaguar & XK8 coupe 2dr & Sports & Europe & Rear & 69995.0 & 63756.0 & 4.2 & 8.0 & 294.0 & 18.0 & 26.0 & 3779.0 & 102.0 & 187.0 \\\\\n157 & Jeep & Grand Cherokee Laredo & SUV & USA & Front & 27905.0 & 25686.0 & 4.0 & 6.0 & 195.0 & 16.0 & 21.0 & 3790.0 & 106.0 & 181.0 \\\\\n158 & Kia & Optima LX 4dr & Sedan & Asia & Front & 16040.0 & 14910.0 & 2.4 & 4.0 & 138.0 & 23.0 & 30.0 & 3281.0 & 106.0 & 186.0 \\\\\n159 & Kia & Spectra GS 4dr hatch & Sedan & Asia & Front & 13580.0 & 12830.0 & 1.8 & 4.0 & 124.0 & 24.0 & 32.0 & 2686.0 & 101.0 & 178.0 \\\\\n160 & Kia & Sedona LX & Sedan & Asia & Front & 20615.0 & 19400.0 & 3.5 & 6.0 & 195.0 & 16.0 & 22.0 & 4802.0 & 115.0 & 194.0 \\\\\n161 & Land Rover & Freelander SE & SUV & Europe & All & 25995.0 & 23969.0 & 2.5 & 6.0 & 174.0 & 18.0 & 21.0 & 3577.0 & 101.0 & 175.0 \\\\\n162 & Lexus & ES 330 4dr & Sedan & Asia & Front & 32350.0 & 28755.0 & 3.3 & 6.0 & 225.0 & 20.0 & 29.0 & 3460.0 & 107.0 & 191.0 \\\\\n163 & Lexus & GS 430 4dr & Sedan & Asia & Rear & 48450.0 & 42232.0 & 4.3 & 8.0 & 300.0 & 18.0 & 23.0 & 3715.0 & 110.0 & 189.0 \\\\\n164 & Lincoln & Navigator Luxury & SUV & USA & All & 52775.0 & 46360.0 & 5.4 & 8.0 & 300.0 & 13.0 & 18.0 & 5969.0 & 119.0 & 206.0 \\\\\n165 & Lincoln & LS V8 Sport 4dr & Sedan & USA & Rear & 40095.0 & 36809.0 & 3.9 & 8.0 & 280.0 & 17.0 & 24.0 & 3768.0 & 115.0 & 194.0 \\\\\n166 & Lincoln & Town Car Ultimate L 4dr & Sedan & USA & Rear & 50470.0 & 46208.0 & 4.6 & 8.0 & 239.0 & 17.0 & 25.0 & 4474.0 & 124.0 & 221.0 \\\\\n167 & Mazda & Mazda3 i 4dr & Sedan & Asia & Front & 15500.0 & 14525.0 & 2.0 & 4.0 & 148.0 & 26.0 & 34.0 & 2696.0 & 104.0 & 178.0 \\\\\n168 & Mazda & MX-5 Miata convertible 2dr & Sports & Asia & Rear & 22388.0 & 20701.0 & 1.8 & 4.0 & 142.0 & 23.0 & 28.0 & 2387.0 & 89.0 & 156.0 \\\\\n169 & Mazda & B2300 SX Regular Cab & Truck & Asia & Rear & 14840.0 & 14070.0 & 2.3 & 4.0 & 143.0 & 24.0 & 29.0 & 2960.0 & 112.0 & 188.0 \\\\\n170 & Mercedes-Benz & C230 Sport 2dr & Sedan & Europe & Rear & 26060.0 & 24249.0 & 1.8 & 4.0 & 189.0 & 22.0 & 30.0 & 3250.0 & 107.0 & 178.0 \\\\\n171 & Mercedes-Benz & C320 Sport 4dr & Sedan & Europe & Rear & 35920.0 & 33456.0 & 3.2 & 6.0 & 215.0 & 19.0 & 26.0 & 3430.0 & 107.0 & 178.0 \\\\\n172 & Mercedes-Benz & CL500 2dr & Sedan & Europe & Rear & 94820.0 & 88324.0 & 5.0 & 8.0 & 302.0 & 16.0 & 24.0 & 4085.0 & 114.0 & 196.0 \\\\\n173 & Mercedes-Benz & E320 4dr & Sedan & Europe & Rear & 48170.0 & 44849.0 & 3.2 & 6.0 & 221.0 & 19.0 & 27.0 & 3635.0 & 112.0 & 190.0 \\\\\n174 & Mercedes-Benz & SL500 convertible 2dr & Sports & Europe & Rear & 90520.0 & 84325.0 & 5.0 & 8.0 & 302.0 & 16.0 & 23.0 & 4065.0 & 101.0 & 179.0 \\\\\n175 & Mercedes-Benz & SLK32 AMG 2dr & Sports & Europe & Rear & 56170.0 & 52289.0 & 3.2 & 6.0 & 349.0 & 17.0 & 22.0 & 3220.0 & 95.0 & 158.0 \\\\\n176 & Mercury & Mountaineer & SUV & USA & Front & 29995.0 & 27317.0 & 4.0 & 6.0 & 210.0 & 16.0 & 21.0 & 4374.0 & 114.0 & 190.0 \\\\\n177 & Mercury & Sable LS Premium 4dr & Sedan & USA & Front & 23895.0 & 21918.0 & 3.0 & 6.0 & 201.0 & 19.0 & 26.0 & 3315.0 & 109.0 & 200.0 \\\\\n178 & Mercury & Sable GS & Wagon & USA & Front & 22595.0 & 20748.0 & 3.0 & 6.0 & 155.0 & 19.0 & 26.0 & 3488.0 & 109.0 & 198.0 \\\\\n179 & Mitsubishi & Lancer ES 4dr & Sedan & Asia & Front & 14622.0 & 13751.0 & 2.0 & 4.0 & 120.0 & 25.0 & 31.0 & 2656.0 & 102.0 & 181.0 \\\\\n180 & Mitsubishi & Diamante LS 4dr & Sedan & Asia & Front & 29282.0 & 27250.0 & 3.5 & 6.0 & 205.0 & 18.0 & 25.0 & 3549.0 & 107.0 & 194.0 \\\\\n181 & Mitsubishi & Lancer Evolution 4dr & Sports & Asia & Front & 29562.0 & 27466.0 & 2.0 & 4.0 & 271.0 & 18.0 & 26.0 & 3263.0 & 103.0 & 179.0 \\\\\n182 & Nissan & Xterra XE V6 & SUV & Asia & Front & 20939.0 & 19512.0 & 3.3 & 6.0 & 180.0 & 17.0 & 20.0 & 3760.0 & 104.0 & 178.0 \\\\\n183 & Nissan & Sentra SE-R 4dr & Sedan & Asia & Front & 17640.0 & 16444.0 & 2.5 & 4.0 & 165.0 & 23.0 & 28.0 & 2761.0 & 100.0 & 178.0 \\\\\n184 & Nissan & Quest S & Sedan & Asia & Front & 24780.0 & 22958.0 & 3.5 & 6.0 & 240.0 & 19.0 & 26.0 & 4012.0 & 124.0 & 204.0 \\\\\n185 & Nissan & Frontier King Cab XE V6 & Truck & Asia & All & 19479.0 & 18253.0 & 3.3 & 6.0 & 180.0 & 17.0 & 20.0 & 3932.0 & 116.0 & 191.0 \\\\\n186 & Oldsmobile & Alero GLS 2dr & Sedan & USA & Front & 23675.0 & 21485.0 & 3.4 & 6.0 & 170.0 & 20.0 & 29.0 & 3085.0 & 107.0 & 187.0 \\\\\n187 & Pontiac & Grand Am GT 2dr & Sedan & USA & Front & 22450.0 & 20595.0 & 3.4 & 6.0 & 175.0 & 20.0 & 29.0 & 3118.0 & 107.0 & 186.0 \\\\\n188 & Pontiac & Bonneville GXP 4dr & Sedan & USA & Front & 35995.0 & 32997.0 & 4.6 & 8.0 & 275.0 & 17.0 & 20.0 & 3790.0 & 112.0 & 203.0 \\\\\n189 & Pontiac & Vibe & Wagon & USA & Rear & 17045.0 & 15973.0 & 1.8 & 4.0 & 130.0 & 29.0 & 36.0 & 2701.0 & 102.0 & 172.0 \\\\\n190 & Porsche & 911 Targa coupe 2dr & Sports & Europe & Rear & 76765.0 & 67128.0 & 3.6 & 6.0 & 315.0 & 18.0 & 26.0 & 3119.0 & 93.0 & 175.0 \\\\\n191 & Saab & 9-3 Arc Sport 4dr & Sedan & Europe & Front & 30860.0 & 29269.0 & 2.0 & 4.0 & 210.0 & 20.0 & 28.0 & 3175.0 & 105.0 & 183.0 \\\\\n192 & Saab & 9-3 Arc convertible 2dr & Sedan & Europe & Front & 40670.0 & 38520.0 & 2.0 & 4.0 & 210.0 & 21.0 & 29.0 & 3480.0 & 105.0 & 182.0 \\\\\n193 & Saturn & Ion1 4dr & Sedan & USA & Front & 10995.0 & 10319.0 & 2.2 & 4.0 & 140.0 & 26.0 & 35.0 & 2692.0 & 103.0 & 185.0 \\\\\n194 & Saturn & lon3 quad coupe 2dr & Sedan & USA & Front & 16350.0 & 15299.0 & 2.2 & 4.0 & 140.0 & 26.0 & 35.0 & 2751.0 & 103.0 & 185.0 \\\\\n195 & Scion & xB & Wagon & Asia & Front & 14165.0 & 13480.0 & 1.5 & 4.0 & 108.0 & 31.0 & 35.0 & 2425.0 & 98.0 & 155.0 \\\\\n196 & Subaru & Outback Limited Sedan 4dr & Sedan & Asia & All & 27145.0 & 24687.0 & 2.5 & 4.0 & 165.0 & 20.0 & 27.0 & 3495.0 & 104.0 & 184.0 \\\\\n197 & Subaru & Impreza WRX STi 4dr & Sports & Asia & All & 31545.0 & 29130.0 & 2.5 & 4.0 & 300.0 & 18.0 & 24.0 & 3263.0 & 100.0 & 174.0 \\\\\n198 & Suzuki & XL-7 EX & SUV & Asia & Front & 23699.0 & 22307.0 & 2.7 & 6.0 & 185.0 & 18.0 & 22.0 & 3682.0 & 110.0 & 187.0 \\\\\n199 & Suzuki & Forenza S 4dr & Sedan & Asia & Front & 12269.0 & 12116.0 & 2.0 & 4.0 & 119.0 & 24.0 & 31.0 & 2701.0 & 102.0 & 177.0 \\\\\n200 & Toyota & Prius 4dr (gas/electric) & Hybrid & Asia & Front & 20510.0 & 18926.0 & 1.5 & 4.0 & 110.0 & 59.0 & 51.0 & 2890.0 & 106.0 & 175.0 \\\\\n201 & Toyota & Land Cruiser & SUV & Asia & All & 54765.0 & 47986.0 & 4.7 & 8.0 & 325.0 & 13.0 & 17.0 & 5390.0 & 112.0 & 193.0 \\\\\n202 & Toyota & Corolla LE 4dr & Sedan & Asia & Front & 15295.0 & 13889.0 & 1.8 & 4.0 & 130.0 & 32.0 & 40.0 & 2524.0 & 102.0 & 178.0 \\\\\n203 & Toyota & Camry LE 4dr & Sedan & Asia & Front & 19560.0 & 17558.0 & 2.4 & 4.0 & 157.0 & 24.0 & 33.0 & 3086.0 & 107.0 & 189.0 \\\\\n204 & Toyota & Avalon XL 4dr & Sedan & Asia & Front & 26560.0 & 23693.0 & 3.0 & 6.0 & 210.0 & 21.0 & 29.0 & 3417.0 & 107.0 & 192.0 \\\\\n205 & Toyota & Sienna CE & Sedan & Asia & Front & 23495.0 & 21198.0 & 3.3 & 6.0 & 230.0 & 19.0 & 27.0 & 4120.0 & 119.0 & 200.0 \\\\\n206 & Toyota & Tacoma & Truck & Asia & Rear & 12800.0 & 11879.0 & 2.4 & 4.0 & 142.0 & 22.0 & 27.0 & 2750.0 & 103.0 & 191.0 \\\\\n207 & Volkswagen & Touareg V6 & SUV & Europe & All & 35515.0 & 32243.0 & 3.2 & 6.0 & 220.0 & 15.0 & 20.0 & 5086.0 & 112.0 & 187.0 \\\\\n208 & Volkswagen & New Beetle GLS 1.8T 2dr & Sedan & Europe & Front & 21055.0 & 19638.0 & 1.8 & 4.0 & 150.0 & 24.0 & 31.0 & 2820.0 & 99.0 & 161.0 \\\\\n209 & Volkswagen & Passat GLX V6 4MOTION 4dr & Sedan & Europe & Front & 33180.0 & 30583.0 & 2.8 & 6.0 & 190.0 & 19.0 & 26.0 & 3721.0 & 106.0 & 185.0 \\\\\n210 & Volkswagen & Jetta GL & Wagon & Europe & Front & 19005.0 & 17427.0 & 2.0 & 4.0 & 115.0 & 24.0 & 30.0 & 3034.0 & 99.0 & 174.0 \\\\\n211 & Volvo & S40 4dr & Sedan & Europe & Front & 25135.0 & 23701.0 & 1.9 & 4.0 & 170.0 & 22.0 & 29.0 & 2767.0 & 101.0 & 178.0 \\\\\n212 & Volvo & S80 2.9 4dr & Sedan & Europe & Front & 37730.0 & 35542.0 & 2.9 & 6.0 & 208.0 & 20.0 & 28.0 & 3576.0 & 110.0 & 190.0 \\\\\n213 & Volvo & S80 T6 4dr & Sedan & Europe & Front & 45210.0 & 42573.0 & 2.9 & 6.0 & 268.0 & 19.0 & 26.0 & 3653.0 & 110.0 & 190.0 \\\\\n214 & Acura & TSX 4dr & Sedan & Asia & Front & 26990.0 & 24647.0 & 2.4 & 4.0 & 200.0 & 22.0 & 29.0 & 3230.0 & 105.0 & 183.0 \\\\\n215 & Acura & NSX coupe 2dr manual S & Sports & Asia & Rear & 89765.0 & 79978.0 & 3.2 & 6.0 & 290.0 & 17.0 & 24.0 & 3153.0 & 100.0 & 174.0 \\\\\n216 & Audi & A4 3.0 Quattro 4dr manual & Sedan & Europe & All & 33430.0 & 30366.0 & 3.0 & 6.0 & 220.0 & 17.0 & 26.0 & 3583.0 & 104.0 & 179.0 \\\\\n217 & Audi & A4 3.0 convertible 2dr & Sedan & Europe & Front & 42490.0 & 38325.0 & 3.0 & 6.0 & 220.0 & 20.0 & 27.0 & 3814.0 & 105.0 & 180.0 \\\\\n218 & Audi & A8 L Quattro 4dr & Sedan & Europe & All & 69190.0 & 64740.0 & 4.2 & 8.0 & 330.0 & 17.0 & 24.0 & 4399.0 & 121.0 & 204.0 \\\\\n219 & Audi & TT 1.8 Quattro 2dr (convertible) & Sports & Europe & All & 37390.0 & 33891.0 & 1.8 & 4.0 & 225.0 & 20.0 & 28.0 & 2921.0 & 96.0 & 159.0 \\\\\n220 & BMW & X3 3.0i & SUV & Europe & All & 37000.0 & 33873.0 & 3.0 & 6.0 & 225.0 & 16.0 & 23.0 & 4023.0 & 110.0 & 180.0 \\\\\n221 & BMW & 325Ci convertible 2dr & Sedan & Europe & Rear & 37995.0 & 34800.0 & 2.5 & 6.0 & 184.0 & 19.0 & 27.0 & 3560.0 & 107.0 & 177.0 \\\\\n222 & BMW & 330xi 4dr & Sedan & Europe & All & 37245.0 & 34115.0 & 3.0 & 6.0 & 225.0 & 20.0 & 29.0 & 3483.0 & 107.0 & 176.0 \\\\\n223 & BMW & 545iA 4dr & Sedan & Europe & Rear & 54995.0 & 50270.0 & 4.4 & 8.0 & 325.0 & 18.0 & 26.0 & 3814.0 & 114.0 & 191.0 \\\\\n224 & BMW & M3 convertible 2dr & Sports & Europe & Rear & 56595.0 & 51815.0 & 3.2 & 6.0 & 333.0 & 16.0 & 23.0 & 3781.0 & 108.0 & 177.0 \\\\\n225 & Buick & Rainier & SUV & USA & All & 37895.0 & 34357.0 & 4.2 & 6.0 & 275.0 & 15.0 & 21.0 & 4600.0 & 113.0 & 193.0 \\\\\n226 & Buick & Regal LS 4dr & Sedan & USA & Front & 24895.0 & 22835.0 & 3.8 & 6.0 & 200.0 & 20.0 & 30.0 & 3461.0 & 109.0 & 196.0 \\\\\n227 & Buick & Park Avenue Ultra 4dr & Sedan & USA & Front & 40720.0 & 36927.0 & 3.8 & 6.0 & 240.0 & 18.0 & 28.0 & 3909.0 & 114.0 & 207.0 \\\\\n228 & Cadillac & Deville 4dr & Sedan & USA & Front & 45445.0 & 41650.0 & 4.6 & 8.0 & 275.0 & 18.0 & 26.0 & 3984.0 & 115.0 & 207.0 \\\\\n229 & Cadillac & Escalade EXT & Truck & USA & All & 52975.0 & 48541.0 & 6.0 & 8.0 & 345.0 & 13.0 & 17.0 & 5879.0 & 130.0 & 221.0 \\\\\n230 & Chevrolet & Tracker & SUV & USA & Front & 20255.0 & 19108.0 & 2.5 & 6.0 & 165.0 & 19.0 & 22.0 & 2866.0 & 98.0 & 163.0 \\\\\n231 & Chevrolet & Cavalier 4dr & Sedan & USA & Front & 14810.0 & 13884.0 & 2.2 & 4.0 & 140.0 & 26.0 & 37.0 & 2676.0 & 104.0 & 183.0 \\\\\n232 & Chevrolet & Malibu LS 4dr & Sedan & USA & Front & 20370.0 & 18639.0 & 3.5 & 6.0 & 200.0 & 22.0 & 30.0 & 3297.0 & 106.0 & 188.0 \\\\\n233 & Chevrolet & Malibu LT 4dr & Sedan & USA & Front & 23495.0 & 21551.0 & 3.5 & 6.0 & 200.0 & 23.0 & 32.0 & 3315.0 & 106.0 & 188.0 \\\\\n234 & Chevrolet & Corvette 2dr & Sports & USA & Rear & 44535.0 & 39068.0 & 5.7 & 8.0 & 350.0 & 18.0 & 25.0 & 3246.0 & 105.0 & 180.0 \\\\\n235 & Chevrolet & Silverado 1500 Regular Cab & Truck & USA & Rear & 20310.0 & 18480.0 & 4.3 & 6.0 & 200.0 & 15.0 & 21.0 & 4142.0 & 119.0 & 206.0 \\\\\n236 & Chrysler & PT Cruiser 4dr & Sedan & USA & Front & 17985.0 & 16919.0 & 2.4 & 4.0 & 150.0 & 22.0 & 29.0 & 3101.0 & 103.0 & 169.0 \\\\\n237 & Chrysler & 300M 4dr & Sedan & USA & Front & 29865.0 & 27797.0 & 3.5 & 6.0 & 250.0 & 18.0 & 27.0 & 3581.0 & 113.0 & 198.0 \\\\\n238 & Chrysler & Sebring convertible 2dr & Sedan & USA & Front & 25215.0 & 23451.0 & 2.4 & 4.0 & 150.0 & 22.0 & 30.0 & 3357.0 & 106.0 & 194.0 \\\\\n239 & Chrysler & Town and Country Limited & Sedan & USA & Front & 38380.0 & 35063.0 & 3.8 & 6.0 & 215.0 & 18.0 & 25.0 & 4331.0 & 119.0 & 201.0 \\\\\n240 & Dodge & Neon SE 4dr & Sedan & USA & Front & 13670.0 & 12849.0 & 2.0 & 4.0 & 132.0 & 29.0 & 36.0 & 2581.0 & 105.0 & 174.0 \\\\\n241 & Dodge & Stratus SE 4dr & Sedan & USA & Front & 20220.0 & 18821.0 & 2.4 & 4.0 & 150.0 & 21.0 & 28.0 & 3175.0 & 108.0 & 191.0 \\\\\n242 & Dodge & Viper SRT-10 convertible 2dr & Sports & USA & Rear & 81795.0 & 74451.0 & 8.3 & 10.0 & 500.0 & 12.0 & 20.0 & 3410.0 & 99.0 & 176.0 \\\\\n243 & Ford & Excursion 6.8 XLT & SUV & USA & All & 41475.0 & 36494.0 & 6.8 & 10.0 & 310.0 & 10.0 & 13.0 & 7190.0 & 137.0 & 227.0 \\\\\n244 & Ford & Focus ZX3 2dr hatch & Sedan & USA & Front & 13270.0 & 12482.0 & 2.0 & 4.0 & 130.0 & 26.0 & 33.0 & 2612.0 & 103.0 & 168.0 \\\\\n245 & Ford & Focus SVT 2dr & Sedan & USA & Front & 19135.0 & 17878.0 & 2.0 & 4.0 & 170.0 & 21.0 & 28.0 & 2750.0 & 103.0 & 168.0 \\\\\n246 & Ford & Crown Victoria LX 4dr & Sedan & USA & Rear & 27370.0 & 25105.0 & 4.6 & 8.0 & 224.0 & 17.0 & 25.0 & 4057.0 & 115.0 & 212.0 \\\\\n247 & Ford & Mustang GT Premium convertible 2dr & Sports & USA & Rear & 29380.0 & 26875.0 & 4.6 & 8.0 & 260.0 & 17.0 & 25.0 & 3347.0 & 101.0 & 183.0 \\\\\n248 & Ford & Ranger 2.3 XL Regular Cab & Truck & USA & Rear & 14385.0 & 13717.0 & 2.3 & 4.0 & 143.0 & 24.0 & 29.0 & 3028.0 & 111.0 & 188.0 \\\\\n249 & GMC & Yukon 1500 SLE & SUV & USA & Front & 35725.0 & 31361.0 & 4.8 & 8.0 & 285.0 & 16.0 & 19.0 & 5042.0 & 116.0 & 199.0 \\\\\n250 & GMC & Sierra Extended Cab 1500 & Truck & USA & Rear & 25717.0 & 22604.0 & 4.8 & 8.0 & 285.0 & 17.0 & 20.0 & 4548.0 & 144.0 & 230.0 \\\\\n251 & Honda & Insight 2dr (gas/electric) & Hybrid & Asia & Front & 19110.0 & 17911.0 & 2.0 & 3.0 & 73.0 & 60.0 & 66.0 & 1850.0 & 95.0 & 155.0 \\\\\n252 & Honda & Civic DX 2dr & Sedan & Asia & Front & 13270.0 & 12175.0 & 1.7 & 4.0 & 115.0 & 32.0 & 38.0 & 2432.0 & 103.0 & 175.0 \\\\\n253 & Honda & Accord EX 2dr & Sedan & Asia & Front & 22260.0 & 20080.0 & 2.4 & 4.0 & 160.0 & 26.0 & 34.0 & 3047.0 & 105.0 & 188.0 \\\\\n254 & Honda & Accord EX V6 2dr & Sedan & Asia & Front & 26960.0 & 24304.0 & 3.0 & 6.0 & 240.0 & 21.0 & 30.0 & 3294.0 & 105.0 & 188.0 \\\\\n255 & Hummer & H2 & SUV & USA & All & 49995.0 & 45815.0 & 6.0 & 8.0 & 316.0 & 10.0 & 12.0 & 6400.0 & 123.0 & 190.0 \\\\\n256 & Hyundai & Accent GT 2dr hatch & Sedan & Asia & Front & 11939.0 & 11209.0 & 1.6 & 4.0 & 103.0 & 29.0 & 33.0 & 2339.0 & 96.0 & 167.0 \\\\\n257 & Hyundai & Sonata GLS 4dr & Sedan & Asia & Front & 19339.0 & 17574.0 & 2.7 & 6.0 & 170.0 & 19.0 & 27.0 & 3217.0 & 106.0 & 187.0 \\\\\n258 & Hyundai & Tiburon GT V6 2dr & Sports & Asia & Front & 18739.0 & 17101.0 & 2.7 & 6.0 & 172.0 & 19.0 & 26.0 & 3023.0 & 100.0 & 173.0 \\\\\n259 & Infiniti & I35 4dr & Sedan & Asia & Front & 31145.0 & 28320.0 & 3.5 & 6.0 & 255.0 & 19.0 & 26.0 & 3306.0 & 108.0 & 194.0 \\\\\n260 & Infiniti & FX45 & Wagon & Asia & All & 36395.0 & 33121.0 & 4.5 & 8.0 & 315.0 & 15.0 & 19.0 & 4309.0 & 112.0 & 189.0 \\\\\n261 & Jaguar & X-Type 3.0 4dr & Sedan & Europe & All & 33995.0 & 30995.0 & 3.0 & 6.0 & 227.0 & 18.0 & 25.0 & 3516.0 & 107.0 & 184.0 \\\\\n262 & Jaguar & Vanden Plas 4dr & Sedan & Europe & Rear & 68995.0 & 62846.0 & 4.2 & 8.0 & 294.0 & 18.0 & 28.0 & 3803.0 & 119.0 & 200.0 \\\\\n263 & Jaguar & XK8 convertible 2dr & Sports & Europe & Rear & 74995.0 & 68306.0 & 4.2 & 8.0 & 294.0 & 18.0 & 26.0 & 3980.0 & 102.0 & 187.0 \\\\\n264 & Jeep & Liberty Sport & SUV & USA & All & 20130.0 & 18973.0 & 2.4 & 4.0 & 150.0 & 20.0 & 24.0 & 3826.0 & 104.0 & 174.0 \\\\\n265 & Kia & Rio 4dr manual & Sedan & Asia & Front & 10280.0 & 9875.0 & 1.6 & 4.0 & 104.0 & 26.0 & 33.0 & 2403.0 & 95.0 & 167.0 \\\\\n266 & Kia & Spectra GSX 4dr hatch & Sedan & Asia & Front & 14630.0 & 13790.0 & 1.8 & 4.0 & 124.0 & 24.0 & 32.0 & 2697.0 & 101.0 & 178.0 \\\\\n267 & Kia & Rio Cinco & Wagon & Asia & Front & 11905.0 & 11410.0 & 1.6 & 4.0 & 104.0 & 26.0 & 33.0 & 2447.0 & 95.0 & 167.0 \\\\\n268 & Lexus & GX 470 & SUV & Asia & All & 45700.0 & 39838.0 & 4.7 & 8.0 & 235.0 & 15.0 & 19.0 & 4740.0 & 110.0 & 188.0 \\\\\n269 & Lexus & IS 300 4dr manual & Sedan & Asia & Rear & 31045.0 & 27404.0 & 3.0 & 6.0 & 215.0 & 18.0 & 25.0 & 3255.0 & 105.0 & 177.0 \\\\\n270 & Lexus & LS 430 4dr & Sedan & Asia & Rear & 55750.0 & 48583.0 & 4.3 & 8.0 & 290.0 & 18.0 & 25.0 & 3990.0 & 115.0 & 197.0 \\\\\n271 & Lincoln & Aviator Ultimate & SUV & USA & Front & 42915.0 & 39443.0 & 4.6 & 8.0 & 302.0 & 13.0 & 18.0 & 4834.0 & 114.0 & 193.0 \\\\\n272 & Lincoln & LS V8 Ultimate 4dr & Sedan & USA & Rear & 43495.0 & 39869.0 & 3.9 & 8.0 & 280.0 & 17.0 & 24.0 & 3768.0 & 115.0 & 194.0 \\\\\n273 & MINI & Cooper & Sedan & Europe & Front & 16999.0 & 15437.0 & 1.6 & 4.0 & 115.0 & 28.0 & 37.0 & 2524.0 & 97.0 & 143.0 \\\\\n274 & Mazda & Mazda3 s 4dr & Sedan & Asia & Front & 17200.0 & 15922.0 & 2.3 & 4.0 & 160.0 & 25.0 & 31.0 & 2762.0 & 104.0 & 179.0 \\\\\n275 & Mazda & MX-5 Miata LS convertible 2dr & Sports & Asia & Rear & 25193.0 & 23285.0 & 1.8 & 4.0 & 142.0 & 23.0 & 28.0 & 2387.0 & 89.0 & 156.0 \\\\\n276 & Mazda & B4000 SE Cab Plus & Truck & Asia & All & 22350.0 & 20482.0 & 4.0 & 6.0 & 207.0 & 15.0 & 19.0 & 3571.0 & 126.0 & 203.0 \\\\\n277 & Mercedes-Benz & C320 Sport 2dr & Sedan & Europe & Rear & 28370.0 & 26435.0 & 3.2 & 6.0 & 215.0 & 19.0 & 26.0 & 3430.0 & 107.0 & 178.0 \\\\\n278 & Mercedes-Benz & C320 4dr & Sedan & Europe & Rear & 37630.0 & 35046.0 & 3.2 & 6.0 & 215.0 & 20.0 & 26.0 & 3450.0 & 107.0 & 178.0 \\\\\n279 & Mercedes-Benz & CL600 2dr & Sedan & Europe & Rear & 128420.0 & 119600.0 & 5.5 & 12.0 & 493.0 & 13.0 & 19.0 & 4473.0 & 114.0 & 196.0 \\\\\n280 & Mercedes-Benz & E500 4dr & Sedan & Europe & Rear & 57270.0 & 53382.0 & 5.0 & 8.0 & 302.0 & 16.0 & 20.0 & 3815.0 & 112.0 & 190.0 \\\\\n281 & Mercedes-Benz & SL55 AMG 2dr & Sports & Europe & Rear & 121770.0 & 113388.0 & 5.5 & 8.0 & 493.0 & 14.0 & 21.0 & 4235.0 & 101.0 & 179.0 \\\\\n282 & Mercedes-Benz & C240 & Wagon & Europe & Rear & 33780.0 & 31466.0 & 2.6 & 6.0 & 168.0 & 19.0 & 25.0 & 3470.0 & 107.0 & 179.0 \\\\\n283 & Mercury & Sable GS 4dr & Sedan & USA & Front & 21595.0 & 19848.0 & 3.0 & 6.0 & 155.0 & 20.0 & 27.0 & 3308.0 & 109.0 & 200.0 \\\\\n284 & Mercury & Grand Marquis LS Ultimate 4dr & Sedan & USA & Rear & 30895.0 & 28318.0 & 4.6 & 8.0 & 224.0 & 17.0 & 25.0 & 4052.0 & 115.0 & 212.0 \\\\\n285 & Mitsubishi & Endeavor XLS & SUV & Asia & All & 30492.0 & 28330.0 & 3.8 & 6.0 & 215.0 & 17.0 & 21.0 & 4134.0 & 109.0 & 190.0 \\\\\n286 & Mitsubishi & Lancer LS 4dr & Sedan & Asia & Front & 16722.0 & 15718.0 & 2.0 & 4.0 & 120.0 & 25.0 & 31.0 & 2795.0 & 102.0 & 181.0 \\\\\n287 & Mitsubishi & Galant GTS 4dr & Sedan & Asia & Front & 25700.0 & 23883.0 & 3.8 & 6.0 & 230.0 & 18.0 & 26.0 & 3649.0 & 108.0 & 191.0 \\\\\n288 & Mitsubishi & Lancer Sportback LS & Wagon & Asia & Front & 17495.0 & 16295.0 & 2.4 & 4.0 & 160.0 & 25.0 & 31.0 & 3020.0 & 102.0 & 181.0 \\\\\n289 & Nissan & Sentra 1.8 4dr & Sedan & Asia & Front & 12740.0 & 12205.0 & 1.8 & 4.0 & 126.0 & 28.0 & 35.0 & 2513.0 & 100.0 & 178.0 \\\\\n290 & Nissan & Altima SE 4dr & Sedan & Asia & Front & 23290.0 & 21580.0 & 3.5 & 6.0 & 245.0 & 21.0 & 26.0 & 3197.0 & 110.0 & 192.0 \\\\\n291 & Nissan & Quest SE & Sedan & Asia & Front & 32780.0 & 30019.0 & 3.5 & 6.0 & 240.0 & 18.0 & 25.0 & 4175.0 & 124.0 & 204.0 \\\\\n292 & Nissan & Titan King Cab XE & Truck & Asia & All & 26650.0 & 24926.0 & 5.6 & 8.0 & 305.0 & 14.0 & 18.0 & 5287.0 & 140.0 & 224.0 \\\\\n293 & Oldsmobile & Silhouette GL & Sedan & USA & Front & 28790.0 & 26120.0 & 3.4 & 6.0 & 185.0 & 19.0 & 26.0 & 3948.0 & 120.0 & 201.0 \\\\\n294 & Pontiac & Grand Prix GT1 4dr & Sedan & USA & Front & 22395.0 & 20545.0 & 3.8 & 6.0 & 200.0 & 20.0 & 30.0 & 3477.0 & 111.0 & 198.0 \\\\\n295 & Pontiac & Montana & Sedan & USA & Front & 23845.0 & 21644.0 & 3.4 & 6.0 & 185.0 & 19.0 & 26.0 & 3803.0 & 112.0 & 187.0 \\\\\n296 & Porsche & Cayenne S & SUV & Europe & All & 56665.0 & 49865.0 & 4.5 & 8.0 & 340.0 & 14.0 & 18.0 & 4950.0 & 112.0 & 188.0 \\\\\n297 & Porsche & 911 GT2 2dr & Sports & Europe & Rear & 192465.0 & 173560.0 & 3.6 & 6.0 & 477.0 & 17.0 & 24.0 & 3131.0 & 93.0 & 175.0 \\\\\n298 & Saab & 9-3 Aero 4dr & Sedan & Europe & Front & 33360.0 & 31562.0 & 2.0 & 4.0 & 210.0 & 20.0 & 28.0 & 3175.0 & 105.0 & 183.0 \\\\\n299 & Saab & 9-3 Aero convertible 2dr & Sedan & Europe & Front & 43175.0 & 40883.0 & 2.0 & 4.0 & 210.0 & 21.0 & 30.0 & 3700.0 & 105.0 & 182.0 \\\\\n300 & Saturn & lon2 4dr & Sedan & USA & Front & 14300.0 & 13393.0 & 2.2 & 4.0 & 140.0 & 26.0 & 35.0 & 2692.0 & 103.0 & 185.0 \\\\\n301 & Saturn & L300-2 4dr & Sedan & USA & Front & 21410.0 & 19801.0 & 3.0 & 6.0 & 182.0 & 20.0 & 28.0 & 3197.0 & 107.0 & 190.0 \\\\\n302 & Subaru & Impreza 2.5 RS 4dr & Sedan & Asia & All & 19945.0 & 18399.0 & 2.5 & 4.0 & 165.0 & 22.0 & 28.0 & 2965.0 & 99.0 & 174.0 \\\\\n303 & Subaru & Outback H6 4dr & Sedan & Asia & All & 29345.0 & 26660.0 & 3.0 & 6.0 & 212.0 & 19.0 & 26.0 & 3610.0 & 104.0 & 184.0 \\\\\n304 & Subaru & Baja & Truck & Asia & All & 24520.0 & 22304.0 & 2.5 & 4.0 & 165.0 & 21.0 & 28.0 & 3485.0 & 104.0 & 193.0 \\\\\n305 & Suzuki & Vitara LX & SUV & Asia & All & 17163.0 & 16949.0 & 2.5 & 6.0 & 165.0 & 19.0 & 22.0 & 3020.0 & 98.0 & 163.0 \\\\\n306 & Suzuki & Forenza EX 4dr & Sedan & Asia & Front & 15568.0 & 15378.0 & 2.0 & 4.0 & 119.0 & 22.0 & 30.0 & 2756.0 & 102.0 & 177.0 \\\\\n307 & Toyota & Sequoia SR5 & SUV & Asia & All & 35695.0 & 31827.0 & 4.7 & 8.0 & 240.0 & 14.0 & 17.0 & 5270.0 & 118.0 & 204.0 \\\\\n308 & Toyota & RAV4 & SUV & Asia & All & 20290.0 & 18553.0 & 2.4 & 4.0 & 161.0 & 22.0 & 27.0 & 3119.0 & 98.0 & 167.0 \\\\\n309 & Toyota & Echo 2dr manual & Sedan & Asia & Front & 10760.0 & 10144.0 & 1.5 & 4.0 & 108.0 & 35.0 & 43.0 & 2035.0 & 93.0 & 163.0 \\\\\n310 & Toyota & Camry LE V6 4dr & Sedan & Asia & Front & 22775.0 & 20325.0 & 3.0 & 6.0 & 210.0 & 21.0 & 29.0 & 3296.0 & 107.0 & 189.0 \\\\\n311 & Toyota & Camry XLE V6 4dr & Sedan & Asia & Front & 25920.0 & 23125.0 & 3.0 & 6.0 & 210.0 & 21.0 & 29.0 & 3362.0 & 107.0 & 189.0 \\\\\n312 & Toyota & Sienna XLE Limited & Sedan & Asia & Front & 28800.0 & 25690.0 & 3.3 & 6.0 & 230.0 & 19.0 & 27.0 & 4165.0 & 119.0 & 200.0 \\\\\n313 & Toyota & Tundra Regular Cab V6 & Truck & Asia & Rear & 16495.0 & 14978.0 & 3.4 & 6.0 & 190.0 & 16.0 & 18.0 & 3925.0 & 128.0 & 218.0 \\\\\n314 & Volkswagen & Golf GLS 4dr & Sedan & Europe & Front & 18715.0 & 17478.0 & 2.0 & 4.0 & 115.0 & 24.0 & 31.0 & 2897.0 & 99.0 & 165.0 \\\\\n315 & Volkswagen & Jetta GLI VR6 4dr & Sedan & Europe & Front & 23785.0 & 21686.0 & 2.8 & 6.0 & 200.0 & 21.0 & 30.0 & 3179.0 & 99.0 & 172.0 \\\\\n316 & Volkswagen & Passat W8 4MOTION 4dr & Sedan & Europe & Front & 39235.0 & 36052.0 & 4.0 & 8.0 & 270.0 & 18.0 & 25.0 & 3953.0 & 106.0 & 185.0 \\\\\n317 & Volkswagen & Passat GLS 1.8T & Wagon & Europe & Front & 24955.0 & 22801.0 & 1.8 & 4.0 & 170.0 & 22.0 & 31.0 & 3338.0 & 106.0 & 184.0 \\\\\n318 & Volvo & S60 2.5 4dr & Sedan & Europe & All & 31745.0 & 29916.0 & 2.5 & 5.0 & 208.0 & 20.0 & 27.0 & 3903.0 & 107.0 & 180.0 \\\\\n319 & Volvo & S80 2.5T 4dr & Sedan & Europe & All & 37885.0 & 35688.0 & 2.5 & 5.0 & 194.0 & 20.0 & 27.0 & 3691.0 & 110.0 & 190.0 \\\\\n320 & Volvo & V40 & Wagon & Europe & Front & 26135.0 & 24641.0 & 1.9 & 4.0 & 170.0 & 22.0 & 29.0 & 2822.0 & 101.0 & 180.0 \\\\\n321 & Acura & TL 4dr & Sedan & Asia & Front & 33195.0 & 30299.0 & 3.2 & 6.0 & 270.0 & 20.0 & 28.0 & 3575.0 & 108.0 & 186.0 \\\\\n322 & Audi & A4 1.8T 4dr & Sedan & Europe & Front & 25940.0 & 23508.0 & 1.8 & 4.0 & 170.0 & 22.0 & 31.0 & 3252.0 & 104.0 & 179.0 \\\\\n323 & Audi & A4 3.0 Quattro 4dr auto & Sedan & Europe & All & 34480.0 & 31388.0 & 3.0 & 6.0 & 220.0 & 18.0 & 25.0 & 3627.0 & 104.0 & 179.0 \\\\\n324 & Audi & A4 3.0 Quattro convertible 2dr & Sedan & Europe & All & 44240.0 & 40075.0 & 3.0 & 6.0 & 220.0 & 18.0 & 25.0 & 4013.0 & 105.0 & 180.0 \\\\\n325 & Audi & S4 Quattro 4dr & Sedan & Europe & All & 48040.0 & 43556.0 & 4.2 & 8.0 & 340.0 & 14.0 & 20.0 & 3825.0 & 104.0 & 179.0 \\\\\n326 & Audi & TT 3.2 coupe 2dr (convertible) & Sports & Europe & All & 40590.0 & 36739.0 & 3.2 & 6.0 & 250.0 & 21.0 & 29.0 & 3351.0 & 96.0 & 159.0 \\\\\n327 & BMW & X5 4.4i & SUV & Europe & All & 52195.0 & 47720.0 & 4.4 & 8.0 & 325.0 & 16.0 & 22.0 & 4824.0 & 111.0 & 184.0 \\\\\n328 & BMW & 325xi 4dr & Sedan & Europe & All & 30245.0 & 27745.0 & 2.5 & 6.0 & 184.0 & 19.0 & 27.0 & 3461.0 & 107.0 & 176.0 \\\\\n329 & BMW & 525i 4dr & Sedan & Europe & Rear & 39995.0 & 36620.0 & 2.5 & 6.0 & 184.0 & 19.0 & 28.0 & 3428.0 & 114.0 & 191.0 \\\\\n330 & BMW & 745i 4dr & Sedan & Europe & Rear & 69195.0 & 63190.0 & 4.4 & 8.0 & 325.0 & 18.0 & 26.0 & 4376.0 & 118.0 & 198.0 \\\\\n331 & BMW & Z4 convertible 2.5i 2dr & Sports & Europe & Rear & 33895.0 & 31065.0 & 2.5 & 6.0 & 184.0 & 20.0 & 28.0 & 2932.0 & 98.0 & 161.0 \\\\\n332 & Buick & Rendezvous CX & SUV & USA & Front & 26545.0 & 24085.0 & 3.4 & 6.0 & 185.0 & 19.0 & 26.0 & 4024.0 & 112.0 & 187.0 \\\\\n333 & Buick & Regal GS 4dr & Sedan & USA & Front & 28345.0 & 26047.0 & 3.8 & 6.0 & 240.0 & 18.0 & 28.0 & 3536.0 & 109.0 & 196.0 \\\\\n334 & Cadillac & Escalade & SUV & USA & Front & 52795.0 & 48377.0 & 5.3 & 8.0 & 295.0 & 14.0 & 18.0 & 5367.0 & 116.0 & 199.0 \\\\\n335 & Cadillac & Deville DTS 4dr & Sedan & USA & Front & 50595.0 & 46362.0 & 4.6 & 8.0 & 300.0 & 18.0 & 26.0 & 4044.0 & 115.0 & 207.0 \\\\\n336 & Chevrolet & Suburban 1500 LT & SUV & USA & Front & 42735.0 & 37422.0 & 5.3 & 8.0 & 295.0 & 14.0 & 18.0 & 4947.0 & 130.0 & 219.0 \\\\\n337 & Chevrolet & Aveo 4dr & Sedan & USA & Front & 11690.0 & 10965.0 & 1.6 & 4.0 & 103.0 & 28.0 & 34.0 & 2370.0 & 98.0 & 167.0 \\\\\n338 & Chevrolet & Cavalier LS 2dr & Sedan & USA & Front & 16385.0 & 15357.0 & 2.2 & 4.0 & 140.0 & 26.0 & 37.0 & 2617.0 & 104.0 & 183.0 \\\\\n339 & Chevrolet & Monte Carlo LS 2dr & Sedan & USA & Front & 21825.0 & 20026.0 & 3.4 & 6.0 & 180.0 & 21.0 & 32.0 & 3340.0 & 111.0 & 198.0 \\\\\n340 & Chevrolet & Monte Carlo SS 2dr & Sedan & USA & Front & 24225.0 & 22222.0 & 3.8 & 6.0 & 200.0 & 18.0 & 28.0 & 3434.0 & 111.0 & 198.0 \\\\\n341 & Chevrolet & Corvette convertible 2dr & Sports & USA & Rear & 51535.0 & 45193.0 & 5.7 & 8.0 & 350.0 & 18.0 & 25.0 & 3248.0 & 105.0 & 180.0 \\\\\n342 & Chevrolet & Silverado SS & Truck & USA & All & 40340.0 & 35399.0 & 6.0 & 8.0 & 300.0 & 13.0 & 17.0 & 4804.0 & 144.0 & 238.0 \\\\\n343 & Chrysler & PT Cruiser Limited 4dr & Sedan & USA & Front & 22000.0 & 20573.0 & 2.4 & 4.0 & 150.0 & 22.0 & 29.0 & 3105.0 & 103.0 & 169.0 \\\\\n344 & Chrysler & Concorde LX 4dr & Sedan & USA & Front & 24130.0 & 22452.0 & 2.7 & 6.0 & 200.0 & 21.0 & 29.0 & 3479.0 & 113.0 & 208.0 \\\\\n345 & Chrysler & 300M Special Edition 4dr & Sedan & USA & Front & 33295.0 & 30884.0 & 3.5 & 6.0 & 255.0 & 18.0 & 27.0 & 3650.0 & 113.0 & 198.0 \\\\\n346 & Chrysler & Crossfire 2dr & Sports & USA & Rear & 34495.0 & 32033.0 & 3.2 & 6.0 & 215.0 & 17.0 & 25.0 & 3060.0 & 95.0 & 160.0 \\\\\n347 & Dodge & Neon SXT 4dr & Sedan & USA & Front & 15040.0 & 14086.0 & 2.0 & 4.0 & 132.0 & 29.0 & 36.0 & 2626.0 & 105.0 & 174.0 \\\\\n348 & Dodge & Intrepid ES 4dr & Sedan & USA & Front & 24885.0 & 23058.0 & 3.5 & 6.0 & 232.0 & 18.0 & 27.0 & 3487.0 & 113.0 & 204.0 \\\\\n349 & Dodge & Dakota Regular Cab & Truck & USA & Rear & 17630.0 & 16264.0 & 3.7 & 6.0 & 210.0 & 16.0 & 22.0 & 3714.0 & 112.0 & 193.0 \\\\\n350 & Ford & Expedition 4.6 XLT & SUV & USA & Front & 34560.0 & 30468.0 & 4.6 & 8.0 & 232.0 & 15.0 & 19.0 & 5000.0 & 119.0 & 206.0 \\\\\n351 & Ford & Focus LX 4dr & Sedan & USA & Front & 13730.0 & 12906.0 & 2.0 & 4.0 & 110.0 & 27.0 & 36.0 & 2606.0 & 103.0 & 168.0 \\\\\n352 & Ford & Taurus LX 4dr & Sedan & USA & Front & 20320.0 & 18881.0 & 3.0 & 6.0 & 155.0 & 20.0 & 27.0 & 3306.0 & 109.0 & 198.0 \\\\\n353 & Ford & Crown Victoria LX Sport 4dr & Sedan & USA & Rear & 30315.0 & 27756.0 & 4.6 & 8.0 & 239.0 & 17.0 & 25.0 & 4057.0 & 115.0 & 212.0 \\\\\n354 & Ford & Thunderbird Deluxe convert w/hardtop 2d & Sports & USA & Front & 37530.0 & 34483.0 & 3.9 & 8.0 & 280.0 & 17.0 & 24.0 & 3780.0 & 107.0 & 186.0 \\\\\n355 & Ford & Focus ZTW & Wagon & USA & Front & 17475.0 & 16375.0 & 2.0 & 4.0 & 130.0 & 26.0 & 33.0 & 2702.0 & 103.0 & 178.0 \\\\\n356 & GMC & Yukon XL 2500 SLT & SUV & USA & All & 46265.0 & 40534.0 & 6.0 & 8.0 & 325.0 & 13.0 & 17.0 & 6133.0 & 130.0 & 219.0 \\\\\n357 & GMC & Sierra HD 2500 & Truck & USA & All & 29322.0 & 25759.0 & 6.0 & 8.0 & 300.0 & 13.0 & 18.0 & 5440.0 & 133.0 & 222.0 \\\\\n358 & Honda & Pilot LX & SUV & Asia & All & 27560.0 & 24843.0 & 3.5 & 6.0 & 240.0 & 17.0 & 22.0 & 4387.0 & 106.0 & 188.0 \\\\\n359 & Honda & Civic HX 2dr & Sedan & Asia & Front & 14170.0 & 12996.0 & 1.7 & 4.0 & 117.0 & 36.0 & 44.0 & 2500.0 & 103.0 & 175.0 \\\\\n360 & Honda & Civic EX 4dr & Sedan & Asia & Front & 17750.0 & 16265.0 & 1.7 & 4.0 & 127.0 & 32.0 & 37.0 & 2601.0 & 103.0 & 175.0 \\\\\n361 & Honda & Odyssey LX & Sedan & Asia & Front & 24950.0 & 22498.0 & 3.5 & 6.0 & 240.0 & 18.0 & 25.0 & 4310.0 & 118.0 & 201.0 \\\\\n362 & Hyundai & Santa Fe GLS & SUV & Asia & Front & 21589.0 & 20201.0 & 2.7 & 6.0 & 173.0 & 20.0 & 26.0 & 3549.0 & 103.0 & 177.0 \\\\\n363 & Hyundai & Elantra GLS 4dr & Sedan & Asia & Front & 13839.0 & 12781.0 & 2.0 & 4.0 & 138.0 & 26.0 & 34.0 & 2635.0 & 103.0 & 178.0 \\\\\n364 & Hyundai & Sonata LX 4dr & Sedan & Asia & Front & 20339.0 & 18380.0 & 2.7 & 6.0 & 170.0 & 19.0 & 27.0 & 3217.0 & 106.0 & 187.0 \\\\\n365 & Infiniti & G35 4dr & Sedan & Asia & Rear & 28495.0 & 26157.0 & 3.5 & 6.0 & 260.0 & 18.0 & 26.0 & 3336.0 & 112.0 & 187.0 \\\\\n366 & Infiniti & M45 4dr & Sedan & Asia & Rear & 42845.0 & 38792.0 & 4.5 & 8.0 & 340.0 & 17.0 & 23.0 & 3851.0 & 110.0 & 197.0 \\\\\n367 & Isuzu & Ascender S & SUV & Asia & All & 31849.0 & 29977.0 & 4.2 & 6.0 & 275.0 & 15.0 & 20.0 & 4967.0 & 129.0 & 208.0 \\\\\n368 & Jaguar & S-Type 3.0 4dr & Sedan & Europe & Rear & 43895.0 & 40004.0 & 3.0 & 6.0 & 235.0 & 18.0 & 26.0 & 3777.0 & 115.0 & 192.0 \\\\\n369 & Jaguar & XJ8 4dr & Sedan & Europe & Rear & 59995.0 & 54656.0 & 4.2 & 8.0 & 294.0 & 18.0 & 28.0 & 3803.0 & 119.0 & 200.0 \\\\\n370 & Jaguar & XKR coupe 2dr & Sports & Europe & Rear & 81995.0 & 74676.0 & 4.2 & 8.0 & 390.0 & 16.0 & 23.0 & 3865.0 & 102.0 & 187.0 \\\\\n371 & Jeep & Wrangler Sahara convertible 2dr & SUV & USA & All & 25520.0 & 23275.0 & 4.0 & 6.0 & 190.0 & 16.0 & 19.0 & 3575.0 & 93.0 & 150.0 \\\\\n372 & Kia & Rio 4dr auto & Sedan & Asia & Front & 11155.0 & 10705.0 & 1.6 & 4.0 & 104.0 & 25.0 & 32.0 & 2458.0 & 95.0 & 167.0 \\\\\n373 & Kia & Optima LX V6 4dr & Sedan & Asia & Front & 18435.0 & 16850.0 & 2.7 & 6.0 & 170.0 & 20.0 & 27.0 & 3279.0 & 106.0 & 186.0 \\\\\n374 & Land Rover & Range Rover HSE & SUV & Europe & All & 72250.0 & 65807.0 & 4.4 & 8.0 & 282.0 & 12.0 & 16.0 & 5379.0 & 113.0 & 195.0 \\\\\n375 & Lexus & LX 470 & SUV & Asia & All & 64800.0 & 56455.0 & 4.7 & 8.0 & 235.0 & 13.0 & 17.0 & 5590.0 & 112.0 & 193.0 \\\\\n376 & Lexus & IS 300 4dr auto & Sedan & Asia & Rear & 32415.0 & 28611.0 & 3.0 & 6.0 & 215.0 & 18.0 & 24.0 & 3285.0 & 105.0 & 177.0 \\\\\n377 & Lexus & SC 430 convertible 2dr & Sports & Asia & Rear & 63200.0 & 55063.0 & 4.3 & 8.0 & 300.0 & 18.0 & 23.0 & 3840.0 & 103.0 & 178.0 \\\\\n378 & Lincoln & LS V6 Luxury 4dr & Sedan & USA & Rear & 32495.0 & 29969.0 & 3.0 & 6.0 & 232.0 & 20.0 & 26.0 & 3681.0 & 115.0 & 194.0 \\\\\n379 & Lincoln & Town Car Signature 4dr & Sedan & USA & Rear & 41815.0 & 38418.0 & 4.6 & 8.0 & 239.0 & 17.0 & 25.0 & 4369.0 & 118.0 & 215.0 \\\\\n380 & MINI & Cooper S & Sedan & Europe & Front & 19999.0 & 18137.0 & 1.6 & 4.0 & 163.0 & 25.0 & 34.0 & 2678.0 & 97.0 & 144.0 \\\\\n381 & Mazda & Mazda6 i 4dr & Sedan & Asia & Front & 19270.0 & 17817.0 & 2.3 & 4.0 & 160.0 & 24.0 & 32.0 & 3042.0 & 105.0 & 187.0 \\\\\n382 & Mazda & RX-8 4dr automatic & Sports & Asia & Rear & 25700.0 & 23794.0 & 1.3 & NaN & 197.0 & 18.0 & 25.0 & 3053.0 & 106.0 & 174.0 \\\\\n383 & Mercedes-Benz & G500 & SUV & Europe & All & 76870.0 & 71540.0 & 5.0 & 8.0 & 292.0 & 13.0 & 14.0 & 5423.0 & 112.0 & 186.0 \\\\\n384 & Mercedes-Benz & C240 4dr & Sedan & Europe & Rear & 32280.0 & 30071.0 & 2.6 & 6.0 & 168.0 & 20.0 & 25.0 & 3360.0 & 107.0 & 178.0 \\\\\n385 & Mercedes-Benz & C320 4dr & Sedan & Europe & All & 38830.0 & 36162.0 & 3.2 & 6.0 & 215.0 & 19.0 & 27.0 & 3450.0 & 107.0 & 178.0 \\\\\n386 & Mercedes-Benz & CLK320 coupe 2dr (convertible) & Sedan & Europe & Rear & 45707.0 & 41966.0 & 3.2 & 6.0 & 215.0 & 20.0 & 26.0 & 3770.0 & 107.0 & 183.0 \\\\\n387 & Mercedes-Benz & S430 4dr & Sedan & Europe & Rear & 74320.0 & 69168.0 & 4.3 & 8.0 & 275.0 & 18.0 & 26.0 & 4160.0 & 122.0 & 203.0 \\\\\n388 & Mercedes-Benz & SL600 convertible 2dr & Sports & Europe & Rear & 126670.0 & 117854.0 & 5.5 & 12.0 & 493.0 & 13.0 & 19.0 & 4429.0 & 101.0 & 179.0 \\\\\n389 & Mercedes-Benz & E320 & Wagon & Europe & Rear & 50670.0 & 47174.0 & 3.2 & 6.0 & 221.0 & 19.0 & 27.0 & 3966.0 & 112.0 & 190.0 \\\\\n390 & Mercury & Grand Marquis GS 4dr & Sedan & USA & Rear & 24695.0 & 23217.0 & 4.6 & 8.0 & 224.0 & 17.0 & 25.0 & 4052.0 & 115.0 & 212.0 \\\\\n391 & Mercury & Marauder 4dr & Sedan & USA & Rear & 34495.0 & 31558.0 & 4.6 & 8.0 & 302.0 & 17.0 & 23.0 & 4195.0 & 115.0 & 212.0 \\\\\n392 & Mitsubishi & Montero XLS & SUV & Asia & All & 33112.0 & 30763.0 & 3.8 & 6.0 & 215.0 & 15.0 & 19.0 & 4718.0 & 110.0 & 190.0 \\\\\n393 & Mitsubishi & Galant ES 2.4L 4dr & Sedan & Asia & Front & 19312.0 & 17957.0 & 2.4 & 4.0 & 160.0 & 23.0 & 30.0 & 3351.0 & 108.0 & 191.0 \\\\\n394 & Mitsubishi & Eclipse GTS 2dr & Sports & Asia & Front & 25092.0 & 23456.0 & 3.0 & 6.0 & 210.0 & 21.0 & 28.0 & 3241.0 & 101.0 & 177.0 \\\\\n395 & Nissan & Pathfinder Armada SE & SUV & Asia & Front & 33840.0 & 30815.0 & 5.6 & 8.0 & 305.0 & 13.0 & 19.0 & 5013.0 & 123.0 & 207.0 \\\\\n396 & Nissan & Sentra 1.8 S 4dr & Sedan & Asia & Front & 14740.0 & 13747.0 & 1.8 & 4.0 & 126.0 & 28.0 & 35.0 & 2581.0 & 100.0 & 178.0 \\\\\n397 & Nissan & Maxima SE 4dr & Sedan & Asia & Front & 27490.0 & 25182.0 & 3.5 & 6.0 & 265.0 & 20.0 & 28.0 & 3473.0 & 111.0 & 194.0 \\\\\n398 & Nissan & 350Z coupe 2dr & Sports & Asia & Rear & 26910.0 & 25203.0 & 3.5 & 6.0 & 287.0 & 20.0 & 26.0 & 3188.0 & 104.0 & 169.0 \\\\\n399 & Nissan & Murano SL & Wagon & Asia & Rear & 28739.0 & 27300.0 & 3.5 & 6.0 & 245.0 & 20.0 & 25.0 & 3801.0 & 111.0 & 188.0 \\\\\n400 & Pontiac & Aztekt & SUV & USA & Front & 21595.0 & 19810.0 & 3.4 & 6.0 & 185.0 & 19.0 & 26.0 & 3779.0 & 108.0 & 182.0 \\\\\n401 & Pontiac & Sunfire 1SC 2dr & Sedan & USA & Front & 17735.0 & 16369.0 & 2.2 & 4.0 & 140.0 & 24.0 & 33.0 & 2771.0 & 104.0 & 182.0 \\\\\n402 & Pontiac & Montana EWB & Sedan & USA & All & 31370.0 & 28454.0 & 3.4 & 6.0 & 185.0 & 18.0 & 24.0 & 4431.0 & 121.0 & 201.0 \\\\\n403 & Porsche & 911 Carrera convertible 2dr (coupe) & Sports & Europe & Rear & 79165.0 & 69229.0 & 3.6 & 6.0 & 315.0 & 18.0 & 26.0 & 3135.0 & 93.0 & 175.0 \\\\\n404 & Porsche & Boxster convertible 2dr & Sports & Europe & Rear & 43365.0 & 37886.0 & 2.7 & 6.0 & 228.0 & 20.0 & 29.0 & 2811.0 & 95.0 & 170.0 \\\\\n405 & Saab & 9-5 Arc 4dr & Sedan & Europe & Front & 35105.0 & 33011.0 & 2.3 & 4.0 & 220.0 & 21.0 & 29.0 & 3470.0 & 106.0 & 190.0 \\\\\n406 & Saab & 9-5 Aero & Wagon & Europe & Front & 40845.0 & 38376.0 & 2.3 & 4.0 & 250.0 & 19.0 & 29.0 & 3620.0 & 106.0 & 190.0 \\\\\n407 & Saturn & lon3 4dr & Sedan & USA & Front & 15825.0 & 14811.0 & 2.2 & 4.0 & 140.0 & 26.0 & 35.0 & 2692.0 & 103.0 & 185.0 \\\\\n408 & Saturn & L300 2 & Wagon & USA & Front & 23560.0 & 21779.0 & 2.2 & 4.0 & 140.0 & 24.0 & 34.0 & 3109.0 & 107.0 & 190.0 \\\\\n409 & Subaru & Legacy L 4dr & Sedan & Asia & All & 20445.0 & 18713.0 & 2.5 & 4.0 & 165.0 & 21.0 & 28.0 & 3285.0 & 104.0 & 184.0 \\\\\n410 & Subaru & Outback H-6 VDC 4dr & Sedan & Asia & All & 31545.0 & 28603.0 & 3.0 & 6.0 & 212.0 & 19.0 & 26.0 & 3630.0 & 104.0 & 184.0 \\\\\n411 & Subaru & Forester X & Wagon & Asia & All & 21445.0 & 19646.0 & 2.5 & 4.0 & 165.0 & 21.0 & 28.0 & 3090.0 & 99.0 & 175.0 \\\\\n412 & Suzuki & Aeno S 4dr & Sedan & Asia & Front & 12884.0 & 12719.0 & 2.3 & 4.0 & 155.0 & 25.0 & 31.0 & 2676.0 & 98.0 & 171.0 \\\\\n413 & Suzuki & Verona LX 4dr & Sedan & Asia & Front & 17262.0 & 17053.0 & 2.5 & 6.0 & 155.0 & 20.0 & 27.0 & 3380.0 & 106.0 & 188.0 \\\\\n414 & Toyota & 4Runner SR5 V6 & SUV & Asia & Front & 27710.0 & 24801.0 & 4.0 & 6.0 & 245.0 & 18.0 & 21.0 & 4035.0 & 110.0 & 189.0 \\\\\n415 & Toyota & Corolla CE 4dr & Sedan & Asia & Front & 14085.0 & 13065.0 & 1.8 & 4.0 & 130.0 & 32.0 & 40.0 & 2502.0 & 102.0 & 178.0 \\\\\n416 & Toyota & Echo 2dr auto & Sedan & Asia & Front & 11560.0 & 10896.0 & 1.5 & 4.0 & 108.0 & 33.0 & 39.0 & 2085.0 & 93.0 & 163.0 \\\\\n417 & Toyota & Camry Solara SE 2dr & Sedan & Asia & Front & 19635.0 & 17722.0 & 2.4 & 4.0 & 157.0 & 24.0 & 33.0 & 3175.0 & 107.0 & 193.0 \\\\\n418 & Toyota & Camry Solara SLE V6 2dr & Sedan & Asia & Front & 26510.0 & 23908.0 & 3.3 & 6.0 & 225.0 & 20.0 & 29.0 & 3439.0 & 107.0 & 193.0 \\\\\n419 & Toyota & Celica GT-S 2dr & Sports & Asia & Front & 22570.0 & 20363.0 & 1.8 & 4.0 & 180.0 & 24.0 & 33.0 & 2500.0 & 102.0 & 171.0 \\\\\n420 & Toyota & Tundra Access Cab V6 SR5 & Truck & Asia & All & 25935.0 & 23520.0 & 3.4 & 6.0 & 190.0 & 14.0 & 17.0 & 4435.0 & 128.0 & 218.0 \\\\\n421 & Volkswagen & GTI 1.8T 2dr hatch & Sedan & Europe & Front & 19825.0 & 18109.0 & 1.8 & 4.0 & 180.0 & 24.0 & 31.0 & 2934.0 & 99.0 & 168.0 \\\\\n422 & Volkswagen & New Beetle GLS convertible 2dr & Sedan & Europe & Front & 23215.0 & 21689.0 & 2.0 & 4.0 & 115.0 & 24.0 & 30.0 & 3082.0 & 99.0 & 161.0 \\\\\n423 & Volkswagen & Phaeton 4dr & Sedan & Europe & Front & 65000.0 & 59912.0 & 4.2 & 8.0 & 335.0 & 16.0 & 22.0 & 5194.0 & 118.0 & 204.0 \\\\\n424 & Volkswagen & Passat W8 & Wagon & Europe & Front & 40235.0 & 36956.0 & 4.0 & 8.0 & 270.0 & 18.0 & 25.0 & 4067.0 & 106.0 & 184.0 \\\\\n425 & Volvo & S60 T5 4dr & Sedan & Europe & Front & 34845.0 & 32902.0 & 2.3 & 5.0 & 247.0 & 20.0 & 28.0 & 3766.0 & 107.0 & 180.0 \\\\\n426 & Volvo & C70 LPT convertible 2dr & Sedan & Europe & Front & 40565.0 & 38203.0 & 2.4 & 5.0 & 197.0 & 21.0 & 28.0 & 3450.0 & 105.0 & 186.0 \\\\\n427 & Volvo & XC70 & Wagon & Europe & All & 35145.0 & 33112.0 & 2.5 & 5.0 & 208.0 & 20.0 & 27.0 & 3823.0 & 109.0 & 186.0 \\\\\n\\bottomrule\n\\end{tabular}\n\n" ] ], [ [ "There are many other **to_XXX** methods on the CASTable object, each of which corresponds to the same **to_XXX** method on Pandas DataFrames. The CASTable methods take the same arguments as the DataFrame counterparts, so you can [read the Pandas documentation for more information](http://pandas.pydata.org/pandas-docs/stable/api.html#id12).", "_____no_output_____" ] ], [ [ "conn.close()", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
4aa29bb923839aaa0407202f30bc2e225dc0f619
237,140
ipynb
Jupyter Notebook
pandas-with-iris.ipynb
JohnADeady/Assignments-Programming-of-DA
04b4fe698122fbfb19e792641ace52147b4f282f
[ "Apache-2.0" ]
null
null
null
pandas-with-iris.ipynb
JohnADeady/Assignments-Programming-of-DA
04b4fe698122fbfb19e792641ace52147b4f282f
[ "Apache-2.0" ]
null
null
null
pandas-with-iris.ipynb
JohnADeady/Assignments-Programming-of-DA
04b4fe698122fbfb19e792641ace52147b4f282f
[ "Apache-2.0" ]
null
null
null
72.079027
139,992
0.689251
[ [ [ "# The Iris dataset and pandas\n\n![pandas logo](https://pandas.pydata.org/_static/pandas_logo.png)\n\n![Iris flowers](https://s3.amazonaws.com/assets.datacamp.com/blog_assets/Machine+Learning+R/iris-machinelearning.png)\n\n***\n\n**[Python Data Analysis Library](https://pandas.pydata.org/)**\n\n*[https://pandas.pydata.org/](https://pandas.pydata.org/)*\n\nThe pandas website.\n\n***\n\n**[Wes McKinney - 10-minute tour of pandas](https://vimeo.com/59324550)**\n\n*[https://vimeo.com/59324550](https://vimeo.com/59324550)*\n\nA 10 minutes video introduction to pandas.\n\n***\n\n**[Python for Data Analysis notebooks](https://github.com/wesm/pydata-book)**\n\n*[https://github.com/wesm/pydata-book](https://github.com/wesm/pydata-book)*\n\nMaterials and IPython notebooks for \"Python for Data Analysis\" by Wes McKinney, published by O'Reilly Media\n\n***\n\n**[10 Minutes to pandas](http://pandas.pydata.org/pandas-docs/stable/10min.html)**\n\n*[http://pandas.pydata.org/pandas-docs/stable/10min.html](http://pandas.pydata.org/pandas-docs/stable/10min.html)*\n\nOfficial pandas tutorial.\n\n***\n\n**[UC Irvine Machine Learning Repository: Iris Data Set](https://archive.ics.uci.edu/ml/datasets/iris)**\n\n*[https://archive.ics.uci.edu/ml/datasets/iris](https://archive.ics.uci.edu/ml/datasets/iris)*\n\nAbout the Iris data set from UC Irvine's machine learning repository.", "_____no_output_____" ], [ "## Loading data", "_____no_output_____" ] ], [ [ "# Import pandas.\nimport pandas as pd", "_____no_output_____" ], [ "# Load the iris data set from a URL.\ndf = pd.read_csv(\"https://raw.githubusercontent.com/uiuc-cse/data-fa14/gh-pages/data/iris.csv\")", "_____no_output_____" ], [ "df", "_____no_output_____" ] ], [ [ "***\n\n## Selecting rows and columns", "_____no_output_____" ] ], [ [ "df['species']", "_____no_output_____" ], [ "df[['petal_length', 'species']]", "_____no_output_____" ], [ "df[2:6]", "_____no_output_____" ], [ "df[['petal_length', 'species']][2:6]", "_____no_output_____" ], [ "df.loc[2:6]", "_____no_output_____" ], [ "df.loc[:, 'species']", "_____no_output_____" ], [ "df.loc[:, ['sepal_length', 'species']]", "_____no_output_____" ], [ "df.loc[2:6, ['sepal_length', 'species']]", "_____no_output_____" ], [ "df.iloc[2]", "_____no_output_____" ], [ "df.iloc[2:4, 1]", "_____no_output_____" ], [ "df.at[3, 'species']", "_____no_output_____" ], [ "df.iloc[1:10:2]", "_____no_output_____" ] ], [ [ "***\n\n## Boolean selects", "_____no_output_____" ] ], [ [ "df.loc[:, 'species'] == 'setosa'", "_____no_output_____" ], [ "df.loc[df.loc[:, 'species'] == 'versicolor']", "_____no_output_____" ], [ "x = df.loc[df.loc[:, 'species'] == 'versicolor']", "_____no_output_____" ], [ "x.loc[51]", "_____no_output_____" ], [ "x.iloc[1]", "_____no_output_____" ] ], [ [ "***\n\n## Summary statictics", "_____no_output_____" ] ], [ [ "df.head()", "_____no_output_____" ], [ "df.tail()", "_____no_output_____" ], [ "df.describe()", "_____no_output_____" ], [ "df.mean()", "_____no_output_____" ] ], [ [ "***\n\n## Plots", "_____no_output_____" ] ], [ [ "import seaborn as sns", "_____no_output_____" ], [ "sns.pairplot(df, hue='species')", "C:\\Users\\mclou\\Anaconda3\\lib\\site-packages\\scipy\\stats\\stats.py:1713: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result.\n return np.add.reduce(sorted[indexer] * weights, axis=axis) / sumval\n" ] ], [ [ "***\n\n## End", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ] ]
4aa29d6963e6612a3da39af60695d3b7f3eef032
26,543
ipynb
Jupyter Notebook
ArtificialData/JupyterNotebooks/RunPareto_SecondPrice.ipynb
AlfLobos/DSP
1e1073c6b0da562b0aea3dec9d62bc563a3b46f5
[ "CNRI-Python" ]
null
null
null
ArtificialData/JupyterNotebooks/RunPareto_SecondPrice.ipynb
AlfLobos/DSP
1e1073c6b0da562b0aea3dec9d62bc563a3b46f5
[ "CNRI-Python" ]
null
null
null
ArtificialData/JupyterNotebooks/RunPareto_SecondPrice.ipynb
AlfLobos/DSP
1e1073c6b0da562b0aea3dec9d62bc563a3b46f5
[ "CNRI-Python" ]
null
null
null
51.640078
115
0.629695
[ [ [ "import numpy as np\nimport time\nimport pickle\n## I import sys to kill the program if an option is not correct.\nimport sys\nimport os\nimport csv\n\nfrom RhoAndBeta import CalcRhoAndBetaVectors\nfrom UtilitiesOptimization import SubgrAlgSavPrimDualObjInd, \\\n SubgrAlgSavPrimDualObjFn_L2Ind\nfrom SimulationCode import ExpPareto\nfrom Utilities import CreateTableParetoInd, CreateTableParetoL2_L2Ind_Gr", "_____no_output_____" ], [ "## Read the data created in CreateDataJupNot\nsuffix='InstanceInfo/Ins1LongRun/'\nnum_impressions=pickle.load(open(suffix+'num_impressions'+'.p',\"rb\"))\nnumCampaigns=pickle.load(open(suffix+'numCampaigns'+'.p',\"rb\"))\nnum_edges=pickle.load(open(suffix+'num_edges'+'.p',\"rb\"))\nindex_Imps=pickle.load(open(suffix+'index_Imps'+'.p',\"rb\"))\nindex_sizeCamps=pickle.load(open(suffix+'index_sizeCamps'+'.p',\"rb\"))\nindex_startCamp=pickle.load(open(suffix+'index_startCamp'+'.p',\"rb\"))\nvector_maxbid=pickle.load(open(suffix+'vector_maxbid'+'.p',\"rb\"))\nvector_r=pickle.load(open(suffix+'vector_r'+'.p',\"rb\"))\nvector_s=(pickle.load(open(suffix+'vector_s'+'.p',\"rb\"))).astype(int)\next_s=pickle.load(open(suffix+'ext_s'+'.p',\"rb\"))\nadverPerImp=(pickle.load(open(suffix+'adverPerImp'+'.p',\"rb\"))).astype(int)\nUB_bidsPerImp = pickle.load(open(suffix+'UB_bids'+'.p',\"rb\"))\nvector_ctr=pickle.load(open(suffix+'vector_ctr'+'.p',\"rb\"))\nvector_rctr=pickle.load(open(suffix+'vector_rctr'+'.p',\"rb\"))\nprobImp=pickle.load(open(suffix+'probImp'+'.p',\"rb\"))", "_____no_output_____" ], [ "## In this Experiment we fix a budget to use. In this case we choose 100\nvector_m = np.ones(numCampaigns)*100", "_____no_output_____" ], [ "## If this parameter is true then first price auctions (\\beta_i(b) =b) are used, otherwise second price.\nfirstPrice = False\n## If this parameter is true a parameterSearch will be performd and .csv will be made,\nparameterSearch = False\n## We use $\\tau_k = 1/m_k$\nexpForTau=-1.0\ntau = np.power(vector_m,expForTau)\nUB_bids = UB_bidsPerImp[index_Imps]\n\nsuffix='ResultsPareto/'\ncurrent_directory = os.getcwd()\nresults_directory = os.path.join(current_directory, suffix)\nif not os.path.exists(results_directory):\n os.makedirs(results_directory)", "_____no_output_____" ] ], [ [ "# Parameter Search\n\nThis parameter search looks for a constant $C$, such that the dual method shows \nempirical convergence. Given that here we only look for training convergence, we don't \nneed to cross-validate or similar.", "_____no_output_____" ] ], [ [ "if parameterSearch:\n input_var = input(\"This will execute a simple parameter search.\\\n If you just wanted to run just a full long run do parameterSearch= False \\\n and kill this execution. To continue press enter.\")\n c=np.array([1.0,0.1,0.01,0.001,0.0001,0.00001,0.000001,0.0000001])\n p_grad_Type = 0\n num_it = 1000\n it_per_cal = 250\n init_lam=np.zeros((numCampaigns))\n alphas_pure=np.fromfunction(lambda i, j: (1/(np.sqrt(i + 1))), \\\n (num_it, 1), dtype=int)[:,0] \n \n# dualObjFn, primalObjFn, dualObjFnAvg, primalObjFnAvg, budget_used, \\\n# budget_LamAvgUse, dual_vars, dual_varsAvg = [], [], [], [], [], [], [], []\n\n nameResults='ParameterSearch'+'It_'+str(num_it)\n f = open(suffix+nameResults+'.csv', 'wt')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow( ('Auction Type', 'Problem Type', 'Ite', 'Cte', 'DualFnValue',\\\n 'PrimalValue','DualFnValueAvg','PrimalFnValueAvg') )\n \n print('Using First Price Auctions')\n firstPrice = True\n for c_val in c:\n print('c_val: ',c_val, end =', Methods: ')\n print('Indicator Case', end =' ,')\n p_grad_Type = 0\n alphas=c_val*alphas_pure\n [dual_FnValues,primal_GivenMu,budget_used,dual_vars,dual_AvgLamFnValues,\\\n primal_AvgLamGivenMu,budget_LamAvgUse,dual_varsAvg]= SubgrAlgSavPrimDualObjInd(\\\n init_lam, num_it, alphas, vector_r, vector_ctr, vector_rctr, vector_s, ext_s, \\\n vector_m, num_impressions, numCampaigns, num_edges, index_sizeCamps, index_Imps,\\\n UB_bids, firstPrice, adverPerImp, it_per_cal, p_grad_Type)\n numSaved=len(budget_used)\n for t in range(numSaved):\n writer.writerow(('First price', 'Indicator', (t+1)*it_per_cal,c_val,dual_FnValues[t],\\\n primal_GivenMu[t],dual_AvgLamFnValues[t],primal_AvgLamGivenMu[t]))\n \n print('L2 penalization wout indicator', end =' ,')\n p_grad_Type = 1\n alphas=c_val*alphas_pure\n [dual_FnValues,primal_GivenMu,budget_used,dual_vars,dual_AvgLamFnValues,\\\n primal_AvgLamGivenMu,budget_LamAvgUse,dual_varsAvg]= SubgrAlgSavPrimDualObjFn_L2Ind(\\\n init_lam, num_it, alphas, vector_r, vector_ctr, vector_rctr, vector_s, ext_s, vector_m,\\\n num_impressions, numCampaigns, num_edges, index_sizeCamps, index_Imps, UB_bids, firstPrice,\\\n adverPerImp, it_per_cal, p_grad_Type, tau, False)\n numSaved=len(budget_used)\n for t in range(numSaved):\n writer.writerow(('First price', 'L2 Wout Ind', (t+1)*it_per_cal,c_val,dual_FnValues[t],\\\n primal_GivenMu[t],dual_AvgLamFnValues[t],primal_AvgLamGivenMu[t]))\n \n print('L2 with indicator')\n p_grad_Type = 2\n alphas=c_val*alphas_pure\n [dual_FnValues,primal_GivenMu,budget_used,dual_vars,dual_AvgLamFnValues,\\\n primal_AvgLamGivenMu,budget_LamAvgUse,dual_varsAvg]= SubgrAlgSavPrimDualObjFn_L2Ind(\\\n init_lam, num_it, alphas, vector_r, vector_ctr, vector_rctr, vector_s, ext_s, vector_m,\\\n num_impressions, numCampaigns, num_edges, index_sizeCamps, index_Imps, UB_bids, firstPrice,\\\n adverPerImp, it_per_cal, p_grad_Type, tau, True)\n numSaved=len(budget_used)\n for t in range(numSaved):\n writer.writerow(('First price', 'L2 + Indicator', (t+1)*it_per_cal,c_val,dual_FnValues[t],\\\n primal_GivenMu[t],dual_AvgLamFnValues[t],primal_AvgLamGivenMu[t]))\n\n \n print('Using Second Price Auctions')\n firstPrice = False\n for c_val in c:\n print('c_val: ',c_val, end =', Methods: ')\n print('Indicator Case', end =' ,')\n p_grad_Type = 0\n alphas=c_val*alphas_pure\n [dual_FnValues,primal_GivenMu,budget_used,dual_vars,dual_AvgLamFnValues,\\\n primal_AvgLamGivenMu,budget_LamAvgUse,dual_varsAvg]= SubgrAlgSavPrimDualObjInd(\\\n init_lam, num_it, alphas, vector_r, vector_ctr, vector_rctr, vector_s, ext_s, \\\n vector_m, num_impressions, numCampaigns, num_edges, index_sizeCamps, index_Imps,\\\n UB_bids, firstPrice, adverPerImp, it_per_cal, p_grad_Type)\n numSaved=len(budget_used)\n for t in range(numSaved):\n writer.writerow(('Second price', 'Indicator', (t+1)*it_per_cal,c_val,dual_FnValues[t],\\\n primal_GivenMu[t],dual_AvgLamFnValues[t],primal_AvgLamGivenMu[t]))\n \n print('L2 penalization wout indicator', end =' ,')\n p_grad_Type = 1\n alphas=c_val*alphas_pure\n [dual_FnValues,primal_GivenMu,budget_used,dual_vars,dual_AvgLamFnValues,\\\n primal_AvgLamGivenMu,budget_LamAvgUse,dual_varsAvg]= SubgrAlgSavPrimDualObjFn_L2Ind(\\\n init_lam, num_it, alphas, vector_r, vector_ctr, vector_rctr, vector_s, ext_s, vector_m,\\\n num_impressions, numCampaigns, num_edges, index_sizeCamps, index_Imps, UB_bids, firstPrice,\\\n adverPerImp, it_per_cal, p_grad_Type, tau, False)\n numSaved=len(budget_used)\n for t in range(numSaved):\n writer.writerow(('Second price', 'L2 Wout Ind', (t+1)*it_per_cal,c_val,dual_FnValues[t],\\\n primal_GivenMu[t],dual_AvgLamFnValues[t],primal_AvgLamGivenMu[t]))\n \n print('L2 with indicator')\n p_grad_Type = 2\n alphas=c_val*alphas_pure\n [dual_FnValues,primal_GivenMu,budget_used,dual_vars,dual_AvgLamFnValues,\\\n primal_AvgLamGivenMu,budget_LamAvgUse,dual_varsAvg]= SubgrAlgSavPrimDualObjFn_L2Ind(\\\n init_lam, num_it, alphas, vector_r, vector_ctr, vector_rctr, vector_s, ext_s, vector_m,\\\n num_impressions, numCampaigns, num_edges, index_sizeCamps, index_Imps, UB_bids, firstPrice,\\\n adverPerImp, it_per_cal, p_grad_Type, tau, True)\n numSaved=len(budget_used)\n for t in range(numSaved):\n writer.writerow(('Second price', 'L2 + Indicator', (t+1)*it_per_cal,c_val,dual_FnValues[t],\\\n primal_GivenMu[t],dual_AvgLamFnValues[t],primal_AvgLamGivenMu[t]))\n f.close()", "_____no_output_____" ] ], [ [ "# The following value are obtained just by Looking at The Parameter Search .csv", "_____no_output_____" ] ], [ [ "## Best constants\nconsBestFP = [0.0001, 0.5, 0.5]\nconsBestSP = [0.0001, 0.5, 0.5]", "_____no_output_____" ] ], [ [ "### Run Pareto", "_____no_output_____" ] ], [ [ "num_itInd, num_itL2, num_itL2Ind = 10000, 10000, 10000\ninit_lam = np.zeros(numCampaigns)\n\nshuffle = False\nsim = 100\n\nnp.random.seed(12345)\nvecOfSeeds = np.random.randint(100000, size=sim)", "_____no_output_____" ], [ "multTP = [0.018, 0.020, 0.022, 0.025, 0.027, 0.030, 0.033, 0.037, 0.041, \\\n 0.045, 0.050, 0.055, 0.061, 0.067, 0.074, 0.082, 0.091, 0.100, 0.111, \\\n 0.122, 0.135, 0.150, 0.165, 0.183, 0.202, 0.223, 0.247, 0.273, 0.301, \\\n 0.333, 0.368, 0.407, 0.449, 0.497, 0.549, 0.607, 0.670, 0.741, 0.819, \\\n 0.905, 1.000, 1.105, 1.221, 1.350, 1.492, 1.649, 1.822, 2.014, 2.226, \\\n 2.460]\n\nmultGr = [np.round(0.4+0.02*y, decimals = 2) for y in range(50)]\n\np_grad_TypeInd, p_grad_TypeL2, p_grad_TypeL2Ind = 0, 1, 2\n\nalphasInd, alphasL2, alphasL2Ind = 0, 0, 0\n\nif firstPrice:\n alphasInd = consBestFP[0] * np.array([np.sqrt(1.0/(i + 1)) for i in range(num_itInd)])\n alphasL2 = consBestFP[1] * np.array([np.sqrt(1.0/(i + 1)) for i in range(num_itL2)])\n alphasL2Ind = consBestFP[2] * np.array([np.sqrt(1.0/(i + 1)) for i in range(num_itL2Ind)])\nelse:\n alphasInd = consBestSP[0] * np.array([np.sqrt(1.0/(i + 1)) for i in range(num_itInd)])\n alphasL2 = consBestSP[1] * np.array([np.sqrt(1.0/(i + 1)) for i in range(num_itL2)])\n alphasL2Ind = consBestSP[2] * np.array([np.sqrt(1.0/(i + 1)) for i in range(num_itL2Ind)])\n", "_____no_output_____" ], [ "listToRetInd, dictToRetL2, dictToRetL2Ind, dictToRetGr = ExpPareto(numCampaigns, num_impressions,\\\n num_edges, index_Imps, index_sizeCamps, vector_s, vector_r, vector_m, vector_ctr, vector_rctr, \\\n UB_bidsPerImp, adverPerImp, alphasInd, num_itInd, alphasL2, num_itL2, alphasL2Ind, num_itL2Ind, \\\n p_grad_TypeInd, p_grad_TypeL2, p_grad_TypeL2Ind, multGr, multTP, init_lam, sim, firstPrice, \\\n vecOfSeeds = vecOfSeeds, shuffle = shuffle)", "We will first run the indicator case. \nFinishing running the Indicator part. It took: 505.6240670681 secs.\nGreedy multiplier: 0.4, L2/L2Ind multiplier: 0.018\nFinished running the Primal-Dual Algorithms, it took: 319.88830494880676 secs.\nRunning the simulations took 760.3936347961426 secs.\nGreedy multiplier: 0.42, L2/L2Ind multiplier: 0.02\nFinished running the Primal-Dual Algorithms, it took: 317.21908712387085 secs.\nRunning the simulations took 700.3821308612823 secs.\nGreedy multiplier: 0.44, L2/L2Ind multiplier: 0.022\nFinished running the Primal-Dual Algorithms, it took: 297.78847908973694 secs.\nRunning the simulations took 672.1948730945587 secs.\nGreedy multiplier: 0.46, L2/L2Ind multiplier: 0.025\nFinished running the Primal-Dual Algorithms, it took: 282.8054201602936 secs.\nRunning the simulations took 658.947655916214 secs.\nGreedy multiplier: 0.48, L2/L2Ind multiplier: 0.027\nFinished running the Primal-Dual Algorithms, it took: 280.05561900138855 secs.\nRunning the simulations took 659.825475692749 secs.\nGreedy multiplier: 0.5, L2/L2Ind multiplier: 0.03\nFinished running the Primal-Dual Algorithms, it took: 290.8958718776703 secs.\nRunning the simulations took 702.7375841140747 secs.\nGreedy multiplier: 0.52, L2/L2Ind multiplier: 0.033\nFinished running the Primal-Dual Algorithms, it took: 285.3815031051636 secs.\nRunning the simulations took 677.8423058986664 secs.\nGreedy multiplier: 0.54, L2/L2Ind multiplier: 0.037\nFinished running the Primal-Dual Algorithms, it took: 285.65761399269104 secs.\nRunning the simulations took 677.4114010334015 secs.\nGreedy multiplier: 0.56, L2/L2Ind multiplier: 0.041\nFinished running the Primal-Dual Algorithms, it took: 287.7937479019165 secs.\nRunning the simulations took 676.0073909759521 secs.\nGreedy multiplier: 0.58, L2/L2Ind multiplier: 0.045\nFinished running the Primal-Dual Algorithms, it took: 286.9465379714966 secs.\nRunning the simulations took 678.4446790218353 secs.\nGreedy multiplier: 0.6, L2/L2Ind multiplier: 0.05\nFinished running the Primal-Dual Algorithms, it took: 285.44904088974 secs.\nRunning the simulations took 678.924430847168 secs.\nGreedy multiplier: 0.62, L2/L2Ind multiplier: 0.055\nFinished running the Primal-Dual Algorithms, it took: 285.42219614982605 secs.\nRunning the simulations took 679.832111120224 secs.\nGreedy multiplier: 0.64, L2/L2Ind multiplier: 0.061\nFinished running the Primal-Dual Algorithms, it took: 285.72619891166687 secs.\nRunning the simulations took 681.937490940094 secs.\nGreedy multiplier: 0.66, L2/L2Ind multiplier: 0.067\nFinished running the Primal-Dual Algorithms, it took: 285.108197927475 secs.\nRunning the simulations took 787.5432577133179 secs.\nGreedy multiplier: 0.68, L2/L2Ind multiplier: 0.074\nFinished running the Primal-Dual Algorithms, it took: 346.91245198249817 secs.\nRunning the simulations took 828.7998871803284 secs.\nGreedy multiplier: 0.7, L2/L2Ind multiplier: 0.082\nFinished running the Primal-Dual Algorithms, it took: 323.1621732711792 secs.\nRunning the simulations took 781.3599879741669 secs.\nGreedy multiplier: 0.72, L2/L2Ind multiplier: 0.091\nFinished running the Primal-Dual Algorithms, it took: 322.2443799972534 secs.\nRunning the simulations took 780.1058843135834 secs.\nGreedy multiplier: 0.74, L2/L2Ind multiplier: 0.1\nFinished running the Primal-Dual Algorithms, it took: 314.5276749134064 secs.\nRunning the simulations took 775.4380640983582 secs.\nGreedy multiplier: 0.76, L2/L2Ind multiplier: 0.111\nFinished running the Primal-Dual Algorithms, it took: 314.685852766037 secs.\nRunning the simulations took 831.7088341712952 secs.\nGreedy multiplier: 0.78, L2/L2Ind multiplier: 0.122\nFinished running the Primal-Dual Algorithms, it took: 284.1720917224884 secs.\nRunning the simulations took 689.3170881271362 secs.\nGreedy multiplier: 0.8, L2/L2Ind multiplier: 0.135\nFinished running the Primal-Dual Algorithms, it took: 284.4413869380951 secs.\nRunning the simulations took 692.0559139251709 secs.\nGreedy multiplier: 0.82, L2/L2Ind multiplier: 0.15\nFinished running the Primal-Dual Algorithms, it took: 285.79222893714905 secs.\nRunning the simulations took 694.6812379360199 secs.\nGreedy multiplier: 0.84, L2/L2Ind multiplier: 0.165\nFinished running the Primal-Dual Algorithms, it took: 283.6203532218933 secs.\nRunning the simulations took 693.0177626609802 secs.\nGreedy multiplier: 0.86, L2/L2Ind multiplier: 0.183\nFinished running the Primal-Dual Algorithms, it took: 284.46428894996643 secs.\nRunning the simulations took 694.7215039730072 secs.\nGreedy multiplier: 0.88, L2/L2Ind multiplier: 0.202\nFinished running the Primal-Dual Algorithms, it took: 284.646390914917 secs.\nRunning the simulations took 699.6355938911438 secs.\nGreedy multiplier: 0.9, L2/L2Ind multiplier: 0.223\nFinished running the Primal-Dual Algorithms, it took: 284.63265895843506 secs.\nRunning the simulations took 700.8694310188293 secs.\nGreedy multiplier: 0.92, L2/L2Ind multiplier: 0.247\nFinished running the Primal-Dual Algorithms, it took: 285.0807149410248 secs.\nRunning the simulations took 701.1935060024261 secs.\nGreedy multiplier: 0.94, L2/L2Ind multiplier: 0.273\nFinished running the Primal-Dual Algorithms, it took: 285.7025547027588 secs.\nRunning the simulations took 704.1729238033295 secs.\nGreedy multiplier: 0.96, L2/L2Ind multiplier: 0.301\nFinished running the Primal-Dual Algorithms, it took: 284.39767813682556 secs.\nRunning the simulations took 707.6761286258698 secs.\nGreedy multiplier: 0.98, L2/L2Ind multiplier: 0.333\nFinished running the Primal-Dual Algorithms, it took: 285.3755238056183 secs.\nRunning the simulations took 708.9910800457001 secs.\nGreedy multiplier: 1.0, L2/L2Ind multiplier: 0.368\nFinished running the Primal-Dual Algorithms, it took: 284.04268312454224 secs.\nRunning the simulations took 711.8717231750488 secs.\nGreedy multiplier: 1.02, L2/L2Ind multiplier: 0.407\nFinished running the Primal-Dual Algorithms, it took: 284.9955720901489 secs.\nRunning the simulations took 712.9529500007629 secs.\nGreedy multiplier: 1.04, L2/L2Ind multiplier: 0.449\nFinished running the Primal-Dual Algorithms, it took: 283.3821141719818 secs.\nRunning the simulations took 713.8744041919708 secs.\nGreedy multiplier: 1.06, L2/L2Ind multiplier: 0.497\nFinished running the Primal-Dual Algorithms, it took: 285.1966321468353 secs.\nRunning the simulations took 716.5273337364197 secs.\nGreedy multiplier: 1.08, L2/L2Ind multiplier: 0.549\nFinished running the Primal-Dual Algorithms, it took: 283.55169892311096 secs.\nRunning the simulations took 716.9221308231354 secs.\nGreedy multiplier: 1.1, L2/L2Ind multiplier: 0.607\nFinished running the Primal-Dual Algorithms, it took: 283.96206998825073 secs.\nRunning the simulations took 720.8403007984161 secs.\nGreedy multiplier: 1.12, L2/L2Ind multiplier: 0.67\nFinished running the Primal-Dual Algorithms, it took: 284.5519919395447 secs.\nRunning the simulations took 721.2760670185089 secs.\nGreedy multiplier: 1.14, L2/L2Ind multiplier: 0.741\nFinished running the Primal-Dual Algorithms, it took: 284.1671781539917 secs.\nRunning the simulations took 723.0083332061768 secs.\nGreedy multiplier: 1.16, L2/L2Ind multiplier: 0.819\nFinished running the Primal-Dual Algorithms, it took: 285.06102895736694 secs.\nRunning the simulations took 724.425436258316 secs.\nGreedy multiplier: 1.18, L2/L2Ind multiplier: 0.905\nFinished running the Primal-Dual Algorithms, it took: 286.5007441043854 secs.\nRunning the simulations took 725.2979400157928 secs.\nGreedy multiplier: 1.2, L2/L2Ind multiplier: 1.0\nFinished running the Primal-Dual Algorithms, it took: 285.4062411785126 secs.\nRunning the simulations took 726.8104801177979 secs.\nGreedy multiplier: 1.22, L2/L2Ind multiplier: 1.105\nFinished running the Primal-Dual Algorithms, it took: 284.8896210193634 secs.\nRunning the simulations took 729.5862400531769 secs.\nGreedy multiplier: 1.24, L2/L2Ind multiplier: 1.221\nFinished running the Primal-Dual Algorithms, it took: 283.9998846054077 secs.\nRunning the simulations took 730.681510925293 secs.\nGreedy multiplier: 1.26, L2/L2Ind multiplier: 1.35\nFinished running the Primal-Dual Algorithms, it took: 284.77463388442993 secs.\nRunning the simulations took 731.343080997467 secs.\nGreedy multiplier: 1.28, L2/L2Ind multiplier: 1.492\n" ], [ "from Utilities import CreateTableSensitivity\n\nnameToSaveSP = 'TableParetoLogSP.csv'\nnameToSaveIndSP = 'TableParetoIndLogSP.csv'\n\nTableParetoSP = CreateTableParetoL2_L2Ind_Gr(dictToRetL2, dictToRetL2Ind, dictToRetGr,\\\n vector_m, multTP, multGr, sim)\n\nTableJustIndSP = CreateTableParetoInd(listToRetInd, vector_m, sim, name = \"FP\")\n\n\nwith open(suffix+nameToSaveSP, 'w') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow(['multL2', 'multGr', 'sim',\\\n 'L2-Profit', 'L2-Revenue', 'L2-Cost', 'L2-BidsMade',\\\n 'L2-BidsWon', 'L2-ClicksWon', 'L2-%BudgetUsed',\\\n 'L2Ind-Profit', 'L2Ind-Revenue', 'L2Ind-Cost', 'L2Ind-BidsMade',\\\n 'L2Ind-BidsWon', 'L2Ind-ClicksWon', 'L2Ind-%BudgetUsed',\\\n 'Gr-Profit', 'Gr-Revenue', 'Gr-Cost', 'Gr-BidsMade',\\\n 'Gr-BidsWon', 'Gr-ClicksWon', 'Gr-%BudgetUsed'])\n [writer.writerow(r) for r in TableParetoSP]\n\nwith open(suffix+nameToSaveIndSP, 'w') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow(['sim', 'Ind-Profit', 'Ind-Revenue', 'Ind-Cost',\\\n 'Ind-BidsMade', 'Ind-BidsWon', 'Ind-ClicksWon', 'Ind-%BudgetUsed'])\n [writer.writerow(r) for r in TableJustIndSP]", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
4aa2a0f84c7096605a3942963e09952eb2d15741
19,516
ipynb
Jupyter Notebook
GridModel_GridImpact/PreProcessing/1_Create2019Easiur.ipynb
SiobhanPowell/speech-grid-impact
bc05849e660ba33cbcb5f53538ae40b3e91575d5
[ "BSD-2-Clause" ]
null
null
null
GridModel_GridImpact/PreProcessing/1_Create2019Easiur.ipynb
SiobhanPowell/speech-grid-impact
bc05849e660ba33cbcb5f53538ae40b3e91575d5
[ "BSD-2-Clause" ]
null
null
null
GridModel_GridImpact/PreProcessing/1_Create2019Easiur.ipynb
SiobhanPowell/speech-grid-impact
bc05849e660ba33cbcb5f53538ae40b3e91575d5
[ "BSD-2-Clause" ]
1
2021-12-17T17:10:58.000Z
2021-12-17T17:10:58.000Z
33.021997
178
0.408895
[ [ [ "import pandas as pd", "_____no_output_____" ], [ "plants = pd.read_excel('egrid2019_data.xlsx', 'PLNT'+'19', skiprows=[0])\ndf = pd.read_csv('egrid_2016_plant_easiur.csv')", "_____no_output_____" ] ], [ [ "# Fill in missing data", "_____no_output_____" ] ], [ [ "plants.loc[(plants['NERC']=='WECC')&(plants['LAT'].isna())]", "_____no_output_____" ], [ "# Fill in for three plants with missing location data, based on google maps investigation\n\nplants.loc[7964, 'LON'] = -104.169391; plants.loc[7964, 'LAT'] = 32.356716 # best guess at location, within eddy\nplants.loc[10868, 'LON'] = -106.432; plants.loc[10868, 'LAT'] = 31.9836 # location of nearby other plant\nplants.loc[11481, 'LAT'] = 47.923575; plants.loc[11481, 'LON'] = -122.102489 # industrial location in Snohomish\n\n", "_____no_output_____" ], [ "new_df = plants.loc[:, ['SEQPLT19', 'PSTATABB', 'PNAME', 'ORISPL', 'OPRNAME', 'OPRCODE',\n 'UTLSRVNM', 'UTLSRVID', 'BANAME', 'BACODE', 'NERC', 'SUBRGN', 'SRNAME',\n 'ISORTO', 'FIPSST', 'FIPSCNTY', 'CNTYNAME', 'LAT', 'LON']].copy(deep=True)", "_____no_output_____" ], [ "new_df = new_df.loc[(~(new_df['PSTATABB'].isin(['AK', 'PR', 'HI'])))&(~(new_df['LAT'].isna()))].reset_index(drop=True).copy(deep=True)", "_____no_output_____" ] ], [ [ "# Send this file to https://barney.ce.cmu.edu/~jinhyok/easiur/online/", "_____no_output_____" ] ], [ [ "new_df.loc[:, ['LON', 'LAT']].to_csv('easiur_input_file.csv', header=None, index=None)", "_____no_output_____" ], [ "new_df.loc[(new_df['LAT'].isna())]", "_____no_output_____" ] ], [ [ "# Receive this file back", "_____no_output_____" ] ], [ [ "output = pd.read_csv('easiur_easiurinputfile.csv')\noutput.head()", "_____no_output_____" ] ], [ [ "# Save in correct format for model", "_____no_output_____" ] ], [ [ "new_df = pd.concat((new_df, output.loc[:, output.columns[24:44]]), axis=1)", "_____no_output_____" ], [ "new_df.to_csv('egrid_2019_plant_easiur.csv', index=None)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
4aa2bad8c88a5a21e16bf627f10a45cfd4f240df
10,635
ipynb
Jupyter Notebook
video-analysis/meanshift-camshift/README.ipynb
codearchive/opencv-python-tutorial
7fa1bd6f8804c753718d58d146dd14632d7cc334
[ "Apache-2.0" ]
null
null
null
video-analysis/meanshift-camshift/README.ipynb
codearchive/opencv-python-tutorial
7fa1bd6f8804c753718d58d146dd14632d7cc334
[ "Apache-2.0" ]
null
null
null
video-analysis/meanshift-camshift/README.ipynb
codearchive/opencv-python-tutorial
7fa1bd6f8804c753718d58d146dd14632d7cc334
[ "Apache-2.0" ]
1
2020-12-02T11:50:09.000Z
2020-12-02T11:50:09.000Z
46.644737
858
0.588246
[ [ [ "# Meanshift and Camshift \n\n_You can view [IPython Nootebook](README.ipynb) report._\n\n----\n\n## Contents\n\n- [GOAL](#GOAL)\n- [Meanshift](#Meanshift)\n - [Meanshift in OpenCV](#Meanshift-in-OpenCV)\n- [Camshift](#Camshift)\n - [Camshift in OpenCV](#Camshift-in-OpenCV)\n- [Additional Resources](#Additional-Resources)\n- [Exercises](#Exercises)\n\n\n## GOAL\n\nIn this chapter:\n\n- We will learn about Meanshift and Camshift algorithms to find and track objects in videos.\n\n## Meanshift\n\nThe intuition behind the meanshift is simple. Consider you have a set of points. (It can be a pixel distribution like histogram backprojection). You are given a small window ( may be a circle) and you have to move that window to the area of maximum pixel density (or maximum number of points). It is illustrated in the simple image given below:\n\n![meanshift-basics](../../data/meanshift-basics.jpg)\n\nThe initial window is shown in blue circle with the name \"C1\". Its original center is marked in blue rectangle, named \"C1_o\". But if you find the centroid of the points inside that window, you will get the point \"C1_r\" (marked in small blue circle) which is the real centroid of window. Surely they don't match. So move your window such that circle of the new window matches with previous centroid. Again find the new centroid. Most probably, it won't match. So move it again, and continue the iterations such that center of window and its centroid falls on the same location (or with a small desired error). So finally what you obtain is a window with maximum pixel distribution. It is marked with green circle, named \"C2\". As you can see in image, it has maximum number of points. The whole process is demonstrated on a static image below:\n\n![meanshift-face](../../data/meanshift-face.gif)\n\nSo we normally pass the histogram backprojected image and initial target location. When the object moves, obviously the movement is reflected in histogram backprojected image. As a result, meanshift algorithm moves our window to the new location with maximum density.\n\n### Meanshift in OpenCV\n\nTo use meanshift in OpenCV, first we need to setup the target, find its histogram so that we can backproject the target on each frame for calculation of meanshift. We also need to provide initial location of window. For histogram, only Hue is considered here. Also, to avoid false values due to low light, low light values are discarded using [cv.inRange()](https://docs.opencv.org/3.4.1/d2/de8/group__core__array.html#ga48af0ab51e36436c5d04340e036ce981) function.\n\n```python\nimport numpy as np\nimport cv2 as cv\n\ncap = cv.VideoCapture(\"../../data/slow.mp4\")\n# Take first frame of the video\nret, frame = cap.read()\n\n# Setup initial location of window\nr, h, c, w = 190, 20, 330, 60 # simply hardcoded the values\ntrack_window = (c, r, w, h)\n\n# Set up the ROI for tracking\nroi = frame[r:r+h, c:c+w]\nhsv_roi = cv.cvtColor(roi, cv.COLOR_BGR2HSV)\nmask = cv.inRange(hsv_roi, np.array((0., 60., 32.)),\n np.array((180., 255., 255.)))\nroi_hist = cv.calcHist([hsv_roi], [0], mask, [180], [0, 180])\ncv.normalize(roi_hist, roi_hist, 0, 255, cv.NORM_MINMAX)\n\n# Setup the termination criteria, either 10 iteration or move by at least 1 pt\nterm_crit = (cv.TERM_CRITERIA_EPS | cv.TERM_CRITERIA_COUNT, 10, 1)\n\n# Define the codec and create VideoWriter object\nfourcc = cv.VideoWriter_fourcc(*'XVID')\nwidth = int(cap.get(cv.CAP_PROP_FRAME_WIDTH))\nheight = int(cap.get(cv.CAP_PROP_FRAME_HEIGHT))\noutVideo = cv.VideoWriter(\"output-files/meanshift-res.avi\",\n fourcc, 25.0, (width, height), True)\n\n# Saved frame number\nframe_number = 0\n\nwhile True:\n ret, frame = cap.read()\n if ret is True:\n hsv = cv.cvtColor(frame, cv.COLOR_BGR2HSV)\n dst = cv.calcBackProject([hsv], [0], roi_hist, [0, 180], 1)\n\n # Apply meanshift to get the new location\n ret, track_window = cv.meanShift(dst, track_window, term_crit)\n\n # Draw it on image\n x, y, w, h = track_window\n img2 = cv.rectangle(frame, (x, y), (x+w, y+h), 255, 2)\n\n # Save the image and show it\n outVideo.write(img2)\n cv.imshow(\"img2\", img2)\n k = cv.waitKey(60) & 0xFF\n if k == 27: # Press \"esc\" to exit\n break\n elif k == 0x73: # Press \"s\" to save the current frame\n cv.imwrite(\"output-files/\" + \"meanshift-res-\" +\n str(frame_number) + \".png\", img2)\n frame_number += 1\n else:\n break\ncap.release()\noutVideo.release()\ncv.destroyAllWindows()\n```\n\nThree frames in a video I used is given below:\n\n![meanshift-result](output-files/meanshift-result.png)\n\n## Camshift\n\nDid you closely watch the last result? There is a problem. Our window always has the same size when car is farther away and it is very close to camera. That is not good. We need to adapt the window size with size and rotation of the target. Once again, the solution came from \"OpenCV Labs\" and it is called CAMshift (Continuously Adaptive Meanshift) published by Gary Bradsky in his paper \"Computer Vision Face Tracking for Use in a Perceptual User Interface\" in 1988.\n\nIt applies meanshift first. Once meanshift converges, it updates the size of the window as, $ s = 2 \\times \\sqrt{\\frac{M_{00}}{256}} $. It also calculates the orientation of best fitting ellipse to it. Again it applies the meanshift with new scaled search window and previous window location. The process is continued until required accuracy is met.\n\n![camshift-face](../../data/camshift-face.gif)\n\n### Camshift in OpenCV\n\nIt is almost same as meanshift, but it returns a rotated rectangle (that is our result) and box parameters (used to be passed as search window in next iteration). See the code below:\n\n```python\nimport numpy as np\nimport cv2 as cv\n\ncap = cv.VideoCapture(\"../../data/slow.mp4\")\n# Take first frame of the video\nret, frame = cap.read()\n\n# Setup initial location of window\nr, h, c, w = 190, 20, 330, 60 # simply hardcoded the values\ntrack_window = (c, r, w, h)\n\n# Set up the ROI for tracking\nroi = frame[r:r+h, c:c+w]\nhsv_roi = cv.cvtColor(roi, cv.COLOR_BGR2HSV)\nmask = cv.inRange(hsv_roi, np.array((0., 60., 32.)),\n np.array((180., 255., 255.)))\nroi_hist = cv.calcHist([hsv_roi], [0], mask, [180], [0, 180])\ncv.normalize(roi_hist, roi_hist, 0, 255, cv.NORM_MINMAX)\n\n# Setup the termination criteria, either 10 iteration or move by at least 1 pt\nterm_crit = (cv.TERM_CRITERIA_EPS | cv.TERM_CRITERIA_COUNT, 10, 1)\n\n# Define the codec and create VideoWriter object\nfourcc = cv.VideoWriter_fourcc(*'XVID')\nwidth = int(cap.get(cv.CAP_PROP_FRAME_WIDTH))\nheight = int(cap.get(cv.CAP_PROP_FRAME_HEIGHT))\noutVideo = cv.VideoWriter(\"output-files/camshift-res.avi\",\n fourcc, 25.0, (width, height), True)\n\n# Saved frame number\nframe_number = 0\n\nwhile True:\n ret, frame = cap.read()\n if ret is True:\n hsv = cv.cvtColor(frame, cv.COLOR_BGR2HSV)\n dst = cv.calcBackProject([hsv], [0], roi_hist, [0, 180], 1)\n\n # Apply camshift to get the new location\n ret, track_window = cv.CamShift(dst, track_window, term_crit)\n\n # Draw it on image\n pts = cv.boxPoints(ret)\n pts = np.int0(pts)\n img2 = cv.polylines(frame, [pts], True, (0, 0, 255), 2)\n\n # Save the image and show it\n outVideo.write(img2)\n cv.imshow('img2', img2)\n k = cv.waitKey(60) & 0xFF\n if k == 27: # Press \"esc\" to exit\n break\n elif k == 0x73: # Press \"s\" to save the current frame\n cv.imwrite(\"output-files/\" + \"camshift-res-\" +\n str(frame_number) + \".png\", img2)\n frame_number += 1\n else:\n break\ncap.release()\noutVideo.release()\ncv.destroyAllWindows()\n```\n\nThree frames of the result is shown below:\n\n![camshift-result](output-files/camshift-result.png)\n\n## Additional Resources \n\n1. French Wikipedia page on Camshift. (The two animations are taken from here)\n2. Bradski, G.R., \"Real time face and object tracking as a component of a perceptual user interface,\" Applications of Computer Vision, 1998. WACV '98. Proceedings., Fourth IEEE Workshop on , vol., no., pp.214,219, 19-21 Oct 1998\n\n## Exercises\n\n1. OpenCV comes with a Python sample on interactive demo of camshift. Use it, hack it, understand it. ", "_____no_output_____" ] ] ]
[ "markdown" ]
[ [ "markdown" ] ]
4aa2c653dfcf4ad08f4f660abfe4084daf84ebc9
50,679
ipynb
Jupyter Notebook
images_chroms/meta_visual.ipynb
priyappillai/rare_muts
ab8c02a842054908cdd0e02868089474ea7ffee7
[ "MIT" ]
null
null
null
images_chroms/meta_visual.ipynb
priyappillai/rare_muts
ab8c02a842054908cdd0e02868089474ea7ffee7
[ "MIT" ]
null
null
null
images_chroms/meta_visual.ipynb
priyappillai/rare_muts
ab8c02a842054908cdd0e02868089474ea7ffee7
[ "MIT" ]
null
null
null
405.432
16,860
0.943645
[ [ [ "import pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns", "_____no_output_____" ], [ "temp = pd.read_csv('meta.txt')", "_____no_output_____" ], [ "temp.columns", "_____no_output_____" ], [ "for i in temp.columns[1:]:\n plt.figure(figsize=(15,8))\n sns.barplot(temp['Chromosome #'], temp[i])\n #plt.show()\n plt.savefig(i+'.png')", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code" ] ]
4aa2dd1bc3d28fcdea73563585f71a9dd5d539e3
7,205
ipynb
Jupyter Notebook
.ipynb_checkpoints/tester-checkpoint.ipynb
EderReisS/Avaliacao-Modulo-II-Let-s-Code
56eb2852bfa08217b4a591f23f795a63f153c41c
[ "MIT" ]
null
null
null
.ipynb_checkpoints/tester-checkpoint.ipynb
EderReisS/Avaliacao-Modulo-II-Let-s-Code
56eb2852bfa08217b4a591f23f795a63f153c41c
[ "MIT" ]
null
null
null
.ipynb_checkpoints/tester-checkpoint.ipynb
EderReisS/Avaliacao-Modulo-II-Let-s-Code
56eb2852bfa08217b4a591f23f795a63f153c41c
[ "MIT" ]
null
null
null
29.17004
454
0.52186
[ [ [ "<h1>Table of Contents<span class=\"tocSkip\"></span></h1>\n<div class=\"toc\"><ul class=\"toc-item\"></ul></div>", "_____no_output_____" ] ], [ [ "import pandas as pd", "_____no_output_____" ], [ "url_data=\"https://raw.githubusercontent.com/EderReisS/Python_Machine_Learning/main/input_Files/Credit.csv\"", "_____no_output_____" ], [ "df=pd.read_csv(url_data)", "_____no_output_____" ], [ "df.info()", "<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 1000 entries, 0 to 999\nData columns (total 21 columns):\n # Column Non-Null Count Dtype \n--- ------ -------------- ----- \n 0 checking_status 1000 non-null object\n 1 duration 1000 non-null int64 \n 2 credit_history 1000 non-null object\n 3 purpose 1000 non-null object\n 4 credit_amount 1000 non-null int64 \n 5 savings_status 1000 non-null object\n 6 employment 1000 non-null object\n 7 installment_commitment 1000 non-null int64 \n 8 personal_status 1000 non-null object\n 9 other_parties 1000 non-null object\n 10 residence_since 1000 non-null int64 \n 11 property_magnitude 1000 non-null object\n 12 age 1000 non-null int64 \n 13 other_payment_plans 1000 non-null object\n 14 housing 1000 non-null object\n 15 existing_credits 1000 non-null int64 \n 16 job 1000 non-null object\n 17 num_dependents 1000 non-null int64 \n 18 own_telephone 1000 non-null object\n 19 foreign_worker 1000 non-null object\n 20 class 1000 non-null object\ndtypes: int64(7), object(14)\nmemory usage: 164.2+ KB\n" ], [ "df.iloc[:,3].unique()", "_____no_output_____" ], [ "df.loc[df['class']=='good']", "_____no_output_____" ], [ "%%writefile api_credit.py\n\nfrom flask import Flask, request\nfrom flask_restful import Resource, Api\nimport pandas as pd\n\n\napp = Flask(__name__)\napi = Api(app)\n\nurl_data=\"https://raw.githubusercontent.com/EderReisS/Python_Machine_Learning/main/input_Files/Credit.csv\"\ndf_credit = pd.read_csv(url_data)\n\nclass credit_purpose(Resource):\n def get(self, purpose_):\n purpose_filter= df_credit['purpose']== purpose_\n credit_purpose = df_credit[purpose_filter]\n print(credit_purpose.to_json)\n return credit_purpose.to_json()\n \nclass purposes(Resource):\n def get(self):\n return {'purposes': ['radio/tv', 'education', 'furniture/equipment', 'new car',\n 'used car','business','domestic appliance', 'repairs',\n 'other', 'retraining']}\n \n\napi.add_resource(credit_purpose, '/<string:purpose_>')\napi.add_resource(purposes, '/')\n\nif __name__ == '__main__':\n app.run(debug=True)", "Overwriting api_credit.py\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ] ]
4aa2e7528c9c5ac705a308971959d041958858c2
158,781
ipynb
Jupyter Notebook
notebook/Modeling_C620.ipynb
skywalker0803r/c620
84e944f4ef09b9722672d0627bd90e63a5e32cac
[ "MIT" ]
null
null
null
notebook/Modeling_C620.ipynb
skywalker0803r/c620
84e944f4ef09b9722672d0627bd90e63a5e32cac
[ "MIT" ]
null
null
null
notebook/Modeling_C620.ipynb
skywalker0803r/c620
84e944f4ef09b9722672d0627bd90e63a5e32cac
[ "MIT" ]
1
2020-12-09T11:53:49.000Z
2020-12-09T11:53:49.000Z
52.593905
12,038
0.466964
[ [ [ "<a href=\"https://colab.research.google.com/github/skywalker0803r/c620/blob/main/notebook/Modeling_C620.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nimport joblib\n!pip install autorch > log.txt", "_____no_output_____" ], [ "c = joblib.load('/content/drive/MyDrive/台塑輕油案子/data/c620/col_names/c620_col_names.pkl')\nc620_df = pd.read_csv('/content/drive/MyDrive/台塑輕油案子/data/c620/cleaned/c620_train.csv',index_col=0).dropna(axis=0)\nprint(c620_df.shape)\nc620_df.head(3)", "(15552, 420)\n" ], [ "x_col = c['case']+c['x41']\nop_col = c['density']+c['yRefluxRate']+c['yHeatDuty']+c['yControl']\nsp_col = c['vent_gas_sf'] +c['distillate_sf'] +c['sidedraw_sf'] +c['bottoms_sf']\nwt_col = c['vent_gas_x'] +c['distillate_x'] +c['sidedraw_x'] +c['bottoms_x']\ny_col = sp_col + op_col\nn_idx = [ [i,i+41,i+41*2,i+41*3]for i in range(41)]\nfor idx in n_idx:\n assert np.allclose(c620_df[y_col].iloc[:,idx].sum(axis=1),1.0) # check\nprint(len(x_col),len(y_col))", "44 174\n" ], [ "bz_idx = y_col.index('Tatoray Stripper C620 Split Factors Calculation_Split Factor for Individual Component to Tatoray Stripper C620 Sidedraw_Benzene_Fraction')\nbz_idx", "_____no_output_____" ], [ "c620_df.loc[['001-002', '001-005', '001-008', '001-011', '001-014'],\n x_col+y_col].to_excel('/content/drive/MyDrive/台塑輕油案子/data/c620/Data_format_example/c620_data.xlsx')", "_____no_output_____" ] ], [ [ "# PartBuliderPlus", "_____no_output_____" ] ], [ [ "from autorch.utils import PartBulider\n\nclass PartBuliderPlus(PartBulider):\n def train_step(self):\n self.net.train()\n total_loss = 0\n for t,(x,y) in enumerate(self.train_iter):\n y_hat = self.net(x)\n bz_loss = (y_hat[:,bz_idx] - y[:,bz_idx])**2\n loss = self.loss_fn(y_hat,y) + 5*bz_loss\n loss = loss.mean()\n loss.backward()\n self.optimizer.step()\n self.optimizer.zero_grad()\n total_loss += loss.item()\n return total_loss/(t+1)\n \n def valid_step(self):\n self.net.eval()\n total_loss = 0\n for t,(x,y) in enumerate(self.vaild_iter):\n y_hat = self.net(x)\n bz_loss = (y_hat[:,bz_idx] - y[:,bz_idx])**2\n loss = self.loss_fn(y_hat,y) + 5*bz_loss\n loss = loss.mean()\n total_loss += loss.item()\n return total_loss/(t+1)\n\n#c620 = PartBuliderPlus(c620_df,x_col,y_col,normalize_idx_list=n_idx,limit_y_range=True)\nc620 = PartBulider(c620_df,x_col,y_col,normalize_idx_list=n_idx,limit_y_range=True)", "_____no_output_____" ], [ "c620.train()", " 0%| | 1/300 [00:00<04:56, 1.01it/s]" ], [ "c620.test(e=0.01)", "_____no_output_____" ], [ "from autorch.function import sp2wt\n\nx_test = c620.data['X_test']\nx41 = x_test[c['x41']].values\nsp = c620.predict(x_test).iloc[:,:41*4]\ns1,s2,s3,s4 = sp.iloc[:,:41].values,sp.iloc[:,41:41*2].values,sp.iloc[:,41*2:41*3].values,sp.iloc[:,41*3:41*4].values\nw1,w2,w3,w4 = sp2wt(x41,s1),sp2wt(x41,s2),sp2wt(x41,s3),sp2wt(x41,s4)\nwt_pred = np.hstack((w1,w2,w3,w4))\nwt_pred = pd.DataFrame(wt_pred,index=x_test.index,columns=wt_col)\nwt_pred.head(3)", "_____no_output_____" ], [ "wt_real = c620_df.loc[x_test.index,wt_col]\nwt_real.head(3)", "_____no_output_____" ], [ "res = c620.show_metrics(wt_real,wt_pred,e=0.01).fillna(0)\nres", "_____no_output_____" ] ], [ [ "# 有些分離係數欄位始終是常數 這裡調整一下看看", "_____no_output_____" ] ], [ [ "c620_wt_always_same_split_factor_dict = joblib.load('/content/drive/MyDrive/台塑輕油案子/data/c620/map_dict/c620_wt_always_same_split_factor_dict.pkl')\nfor i in c620_wt_always_same_split_factor_dict.keys():\n sp[i] = c620_wt_always_same_split_factor_dict[i]\ns1,s2,s3,s4 = sp.iloc[:,:41].values,sp.iloc[:,41:41*2].values,sp.iloc[:,41*2:41*3].values,sp.iloc[:,41*3:41*4].values\nw1,w2,w3,w4 = sp2wt(x41,s1),sp2wt(x41,s2),sp2wt(x41,s3),sp2wt(x41,s4)\nwt_pred = np.hstack((w1,w2,w3,w4))\nwt_pred = pd.DataFrame(wt_pred,index=x_test.index,columns=wt_col)\nwt_pred.head(3)", "_____no_output_____" ], [ "res = c620.show_metrics(wt_real,wt_pred,e=0.01).fillna(0)\nres", "_____no_output_____" ], [ "res.iloc[[bz_idx],:]", "_____no_output_____" ] ], [ [ "# 以下這兩個不能脫鉤", "_____no_output_____" ] ], [ [ "wt_pred.iloc[:,[89]].head()", "_____no_output_____" ], [ "c620.data['X_test'].iloc[:,[2]].head()", "_____no_output_____" ] ], [ [ "# 針對c620預測的wt_pred做後處理", "_____no_output_____" ] ], [ [ "def c620_wt_post_processing(wt_pred):\n def normalize(x):\n return x / x.sum(axis=1).reshape(-1,1)\n bz_idx = wt_pred.columns.tolist().index('Tatoray Stripper C620 Operation_Sidedraw Production Rate and Composition_Benzene_wt%')\n other_idx = [i for i in range(41*2,41*3,1) if i != bz_idx]\n other_total = (100 - wt_pred.iloc[:,bz_idx].values).reshape(-1,1)\n wt_pred.iloc[:,other_idx] = normalize(wt_pred.iloc[:,other_idx].values)*other_total\n wt_pred.iloc[:,bz_idx] = c620.data['X_test'].iloc[:,[2]].values\n return wt_pred\nwt_pred = c620_wt_post_processing(wt_pred)", "_____no_output_____" ] ], [ [ "# 再次確認", "_____no_output_____" ] ], [ [ "wt_pred.iloc[:,[89]].head()", "_____no_output_____" ], [ "c620.data['X_test'].iloc[:,[2]].head()", "_____no_output_____" ] ], [ [ "# 確認質量平衡", "_____no_output_____" ] ], [ [ "wt_pred.iloc[:,41*2:41*3].sum(axis=1)", "_____no_output_____" ] ], [ [ "# 確認準確度", "_____no_output_____" ] ], [ [ "c620.show_metrics(wt_real,wt_pred,e=0.01).fillna(0)", "_____no_output_____" ], [ "c620.shrink() # 節省空間", "_____no_output_____" ], [ "joblib.dump(c620,'/content/drive/MyDrive/台塑輕油案子/data/c620/model/c620.pkl')", "_____no_output_____" ], [ "", "_____no_output_____" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
4aa2f71873a09287651ee25b7ec26307a556b2c0
69,233
ipynb
Jupyter Notebook
3_6_autoencoder/Part 3 - denoising-autoencoder/Denoising_Autoencoder_Exercise.ipynb
JasmineMou/udacity_DL
5e33a2aae8a062948cc17e147ec25de334f0ed41
[ "MIT" ]
null
null
null
3_6_autoencoder/Part 3 - denoising-autoencoder/Denoising_Autoencoder_Exercise.ipynb
JasmineMou/udacity_DL
5e33a2aae8a062948cc17e147ec25de334f0ed41
[ "MIT" ]
null
null
null
3_6_autoencoder/Part 3 - denoising-autoencoder/Denoising_Autoencoder_Exercise.ipynb
JasmineMou/udacity_DL
5e33a2aae8a062948cc17e147ec25de334f0ed41
[ "MIT" ]
null
null
null
158.427918
48,828
0.868228
[ [ [ "# Denoising Autoencoder\n\nSticking with the MNIST dataset, let's add noise to our data and see if we can define and train an autoencoder to _de_-noise the images.\n\n<img src='notebook_ims/autoencoder_denoise.png' width=70%/>\n\nLet's get started by importing our libraries and getting the dataset.", "_____no_output_____" ] ], [ [ "import torch\nimport numpy as np\nfrom torchvision import datasets\nimport torchvision.transforms as transforms\n\n# convert data to torch.FloatTensor\ntransform = transforms.ToTensor()\n\n# load the training and test datasets\ntrain_data = datasets.MNIST(root='data', train=True,\n download=True, transform=transform)\ntest_data = datasets.MNIST(root='data', train=False,\n download=True, transform=transform)\n\n# Create training and test dataloaders\nnum_workers = 0\n# how many samples per batch to load\nbatch_size = 20\n\n# prepare data loaders\ntrain_loader = torch.utils.data.DataLoader(train_data, batch_size=batch_size, num_workers=num_workers)\ntest_loader = torch.utils.data.DataLoader(test_data, batch_size=batch_size, num_workers=num_workers)", "_____no_output_____" ] ], [ [ "### Visualize the Data", "_____no_output_____" ] ], [ [ "import matplotlib.pyplot as plt\n%matplotlib inline\n \n# obtain one batch of training images\ndataiter = iter(train_loader)\nimages, labels = dataiter.next()\nimages = images.numpy()\n\n# get one image from the batch\nimg = np.squeeze(images[0])\n\nfig = plt.figure(figsize = (5,5)) \nax = fig.add_subplot(111)\nax.imshow(img, cmap='gray')", "_____no_output_____" ] ], [ [ "---\n# Denoising\n\nAs I've mentioned before, autoencoders like the ones you've built so far aren't too useful in practive. However, they can be used to denoise images quite successfully just by training the network on noisy images. We can create the noisy images ourselves by adding Gaussian noise to the training images, then clipping the values to be between 0 and 1.\n\n>**We'll use noisy images as input and the original, clean images as targets.** \n\nBelow is an example of some of the noisy images I generated and the associated, denoised images.\n\n<img src='notebook_ims/denoising.png' />\n\n\nSince this is a harder problem for the network, we'll want to use _deeper_ convolutional layers here; layers with more feature maps. You might also consider adding additional layers. I suggest starting with a depth of 32 for the convolutional layers in the encoder, and the same depths going backward through the decoder.\n\n#### TODO: Build the network for the denoising autoencoder. Add deeper and/or additional layers compared to the model above.", "_____no_output_____" ] ], [ [ "train_on_gpu = torch.cuda.is_available()\nif not train_on_gpu:\n print(\"CUDA is not available. Train on CPU ...\")\nelse:\n print(\"CUDA is available. Train on GPU ...\")", "CUDA is available. Train on GPU ...\n" ], [ "import torch.nn as nn\nimport torch.nn.functional as F\n\n# define the NN architecture\nclass ConvDenoiser(nn.Module):\n def __init__(self):\n super(ConvDenoiser, self).__init__()\n ## encoder layers ##\n self.encoder1 = nn.Conv2d(1,32,3,padding=1)\n self.encoder2 = nn.Conv2d(32,16,3,padding=1)\n self.encoder3 = nn.Conv2d(16,8,3,padding=1)\n \n self.maxpool = nn.MaxPool2d(2,2) # reduce x-y dims by 2\n \n ## decoder layers ##\n ## a kernel of 2 and a stride of 2 will increase the spatial dims by 2\n self.decoder1 = nn.ConvTranspose2d(8,8,3,stride=2) \n self.decoder2 = nn.ConvTranspose2d(8,16,2,stride=2)\n self.decoder3 = nn.ConvTranspose2d(16,32,2,stride=2)\n \n self.final = nn.Conv2d(32,1,3,padding=1) # final normal conv layer to decrease depth\n\n\n def forward(self, x):\n ## encode ##\n x = F.relu(self.encoder1(x))\n x = self.maxpool(x)\n x = F.relu(self.encoder2(x))\n x = self.maxpool(x)\n x = F.relu(self.encoder3(x))\n x = self.maxpool(x)\n \n ## decode ##\n x = F.relu(self.decoder1(x))\n x = F.relu(self.decoder2(x))\n x = F.relu(self.decoder3(x))\n x = F.sigmoid(self.final(x))\n \n return x\n\n# initialize the NN\nmodel = ConvDenoiser()\nif train_on_gpu:\n model.cuda()\n \nprint(model)", "ConvDenoiser(\n (encoder1): Conv2d(1, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n (encoder2): Conv2d(32, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n (encoder3): Conv2d(16, 8, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n (maxpool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n (decoder1): ConvTranspose2d(8, 8, kernel_size=(3, 3), stride=(2, 2))\n (decoder2): ConvTranspose2d(8, 16, kernel_size=(2, 2), stride=(2, 2))\n (decoder3): ConvTranspose2d(16, 32, kernel_size=(2, 2), stride=(2, 2))\n (final): Conv2d(32, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n)\n" ] ], [ [ "---\n## Training\n\nWe are only concerned with the training images, which we can get from the `train_loader`.\n\n>In this case, we are actually **adding some noise** to these images and we'll feed these `noisy_imgs` to our model. The model will produce reconstructed images based on the noisy input. But, we want it to produce _normal_ un-noisy images, and so, when we calculate the loss, we will still compare the reconstructed outputs to the original images!\n\nBecause we're comparing pixel values in input and output images, it will be best to use a loss that is meant for a regression task. Regression is all about comparing quantities rather than probabilistic values. So, in this case, I'll use `MSELoss`. And compare output images and input images as follows:\n```\nloss = criterion(outputs, images)\n```", "_____no_output_____" ] ], [ [ "# for adding noise to images\nnoise_factor=0.5\n\ndef train_model(trainloader, model, criterion, optimizer):\n # number of epochs to train the model\n n_epochs = 20 \n\n for epoch in range(1, n_epochs+1):\n # monitor training loss\n train_loss = 0.0\n\n ###################\n # train the model #\n ###################\n # _ stands in for labels, here\n # no need to flatten images\n for (images,_) in train_loader:\n ## add random noise to the input images\n noisy_imgs = images + noise_factor * torch.randn(*images.shape)\n # Clip the images to be between 0 and 1\n noisy_imgs = np.clip(noisy_imgs, 0., 1.)\n\n if train_on_gpu:\n images = images.cuda()\n noisy_imgs = noisy_imgs.cuda() \n \n # clear the gradients of all optimized variables\n optimizer.zero_grad()\n ## forward pass: compute predicted outputs by passing *noisy* images to the model\n outputs = model(noisy_imgs)\n # calculate the loss\n # the \"target\" is still the original, not-noisy images\n loss = criterion(outputs, images)\n # backward pass: compute gradient of the loss with respect to model parameters\n loss.backward()\n # perform a single optimization step (parameter update)\n optimizer.step()\n # update running training loss\n train_loss += loss.item()*images.size(0)\n\n # print avg training statistics \n train_loss = train_loss/len(train_loader)\n print('Epoch: {} \\tTraining Loss: {:.6f}'.format(epoch, train_loss))", "_____no_output_____" ], [ "# specify loss function\ncriterion = nn.MSELoss()\n\n# specify loss function\noptimizer = torch.optim.Adam(model.parameters(), lr=0.001)", "_____no_output_____" ], [ "train_model(train_loader, model, criterion, optimizer)", "Epoch: 1 \tTraining Loss: 1.027003\nEpoch: 2 \tTraining Loss: 0.778870\nEpoch: 3 \tTraining Loss: 0.727460\nEpoch: 4 \tTraining Loss: 0.703074\nEpoch: 5 \tTraining Loss: 0.688385\nEpoch: 6 \tTraining Loss: 0.675194\nEpoch: 7 \tTraining Loss: 0.664477\nEpoch: 8 \tTraining Loss: 0.655550\nEpoch: 9 \tTraining Loss: 0.648978\nEpoch: 10 \tTraining Loss: 0.644065\nEpoch: 11 \tTraining Loss: 0.639112\nEpoch: 12 \tTraining Loss: 0.635416\nEpoch: 13 \tTraining Loss: 0.632685\nEpoch: 14 \tTraining Loss: 0.630055\nEpoch: 15 \tTraining Loss: 0.626158\nEpoch: 16 \tTraining Loss: 0.625067\nEpoch: 17 \tTraining Loss: 0.622354\nEpoch: 18 \tTraining Loss: 0.620321\nEpoch: 19 \tTraining Loss: 0.618755\nEpoch: 20 \tTraining Loss: 0.616563\n" ] ], [ [ "## Checking out the results\n\nHere I'm adding noise to the test images and passing them through the autoencoder. It does a suprising great job of removing the noise, even though it's sometimes difficult to tell what the original number is.", "_____no_output_____" ] ], [ [ "def test(model, images): \n # add noise to the test images\n noisy_imgs = images + noise_factor * torch.randn(*images.shape)\n noisy_imgs = np.clip(noisy_imgs, 0., 1.)\n \n if train_on_gpu:\n images = images.cuda()\n noisy_imgs = noisy_imgs.cuda()\n \n # get sample outputs\n output = model(noisy_imgs) \n\n # prep images for display\n noisy_imgs = noisy_imgs.numpy() if not train_on_gpu else noisy_imgs.cpu().numpy() \n \n # output is resized into a batch of iages\n output = output.view(batch_size, 1, 28, 28)\n # use detach when it's an output that requires_grad\n output = output.detach().numpy() if not train_on_gpu else output.detach().cpu().numpy()\n\n # plot the first ten input images and then reconstructed images\n fig, axes = plt.subplots(nrows=2, ncols=10, sharex=True, sharey=True, figsize=(25,4))\n\n # input images on top row, reconstructions on bottom\n for noisy_imgs, row in zip([noisy_imgs, output], axes):\n for img, ax in zip(noisy_imgs, row):\n ax.imshow(np.squeeze(img), cmap='gray')\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)", "_____no_output_____" ], [ "# obtain one batch of test images\ndataiter = iter(test_loader)\nimages, labels = dataiter.next()", "_____no_output_____" ], [ "test(model, images)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
4aa32357325fd4fb3313af355f37ae419b6cc800
360,770
ipynb
Jupyter Notebook
.ipynb_checkpoints/map storms-checkpoint.ipynb
cgentemann/storm_heat_content
6132f9cb34e3b3ac46fa834f98c58c93fb69f8e2
[ "Apache-2.0" ]
null
null
null
.ipynb_checkpoints/map storms-checkpoint.ipynb
cgentemann/storm_heat_content
6132f9cb34e3b3ac46fa834f98c58c93fb69f8e2
[ "Apache-2.0" ]
null
null
null
.ipynb_checkpoints/map storms-checkpoint.ipynb
cgentemann/storm_heat_content
6132f9cb34e3b3ac46fa834f98c58c93fb69f8e2
[ "Apache-2.0" ]
1
2020-12-13T02:36:29.000Z
2020-12-13T02:36:29.000Z
273.310606
251,612
0.910855
[ [ [ "import os\nimport time\nimport datetime as dt\nimport xarray as xr\nfrom datetime import datetime\nimport pandas\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport math\nimport geopy.distance\nfrom math import sin, pi\nfrom scipy import interpolate\nfrom scipy import stats\n\n#functions for running storm data\nimport sys\n\n####################you will need to change some paths here!#####################\n#list of input directories\ndir_storm_info='f:/data/tc_wakes/database/info/'\ndir_out='f:/data/tc_wakes/database/sst/'\n\n", "_____no_output_____" ], [ "#for iyr_storm in range(2002,2018):\ninit = 0\nfor iyr_storm in range(2002,2018):\n for inum_storm in range(0,110): \n filename = dir_out + str(iyr_storm) + '/' + str(inum_storm).zfill(3) + '_interpolated_track.nc'\n exists = os.path.isfile(filename)\n if not exists:\n continue\n print(filename)\n ds_storm_info=xr.open_dataset(filename)\n ds_storm_info = ds_storm_info.sel(j2=0)\n ds_storm_info.close()\n ds_storm_info['sindex']=ds_storm_info.pres\n ds_storm_info['sindex']=iyr_storm+inum_storm/120\n# if abs(ds_storm_info.lon[-1]-ds_storm_info.lon[0])>180:\n# ds_storm_info['lon'] = np.mod(ds_storm_info['lon'], 360)\n if init==0:\n ds_all = ds_storm_info\n init=1\n ds_all = xr.concat([ds_all, ds_storm_info], dim='i2')\n", "f:/data/tc_wakes/database/sst/2002/001_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/002_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/005_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/007_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/009_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/010_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/011_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/012_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/013_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/014_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/015_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/017_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/018_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/019_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/020_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/021_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/022_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/023_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/025_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/027_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/028_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/030_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/031_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/033_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/034_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/035_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/036_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/037_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/039_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/040_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/041_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/042_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/043_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/046_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/047_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/048_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/049_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/050_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/051_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/052_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/053_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/054_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/055_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/056_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/058_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/060_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/061_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/063_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/064_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/066_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/067_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/068_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/069_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/070_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/072_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/075_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/077_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/081_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/082_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/083_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2002/084_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/002_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/003_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/004_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/005_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/006_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/008_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/009_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/010_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/011_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/012_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/013_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/015_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/016_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/017_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/018_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/019_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/021_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/022_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/023_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/024_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/025_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/026_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/027_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/028_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/031_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/032_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/033_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/034_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/035_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/036_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/037_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/038_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/040_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/042_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/044_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/046_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/047_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/048_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/049_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/050_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/051_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/055_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/057_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/059_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/060_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/064_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/065_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/066_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/067_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/070_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/071_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/072_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/073_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/075_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/076_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/077_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/078_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/079_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/080_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/081_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/082_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/083_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/086_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/087_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/088_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/089_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/091_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/092_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/093_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/094_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/095_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2003/097_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2004/001_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2004/002_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2004/003_interpolated_track.nc\nf:/data/tc_wakes/database/sst/2004/004_interpolated_track.nc\n" ], [ "import cartopy.crs as ccrs\nimport matplotlib.pyplot as plt\nfrom cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter\nimport matplotlib as mpl\ndir_figs = 'f:/data/tc_wakes/database/figs/hist/'\nfig = plt.figure(figsize=(14,6))\nax = plt.axes(projection=ccrs.PlateCarree())\nax.coastlines()\n#ax.scatter(ds_all.lon,ds_all.lat,c=ds_all.sindex,s=.01)\nax.scatter(ds_all.lon,ds_all.lat,cmap=mpl.cm.jet,c=ds_all.wind,s=.01,vmin=0,vmax=100)\nax.set_xticks([-180, -120, -60, 0, 60, 120, 180], crs=ccrs.PlateCarree())\nax.set_yticks([-90, -60, -30, 0, 30, 60, 90], crs=ccrs.PlateCarree())\nlon_formatter = LongitudeFormatter(zero_direction_label=True)\nlat_formatter = LatitudeFormatter()\nax.xaxis.set_major_formatter(lon_formatter)\nax.yaxis.set_major_formatter(lat_formatter)\nsm = plt.cm.ScalarMappable(cmap=mpl.cm.jet,norm=plt.Normalize(0,100))\nsm._A = []\ncb = plt.colorbar(sm,ax=ax)\ncb.ax.set_ylabel('wind speed (ms$^{-1}$)') \nplt.savefig(dir_figs+'map_storms.png')", "_____no_output_____" ], [ "subset = ds_all.where(ds_all.wind>1)\nplt.hist(subset.wind,bins=np.arange(0,150,10))\nplt.xlabel('wind speed (ms$^{-1}$)')\nplt.ylabel('number of observations')\nplt.savefig(dir_figs+'hist_database_windspeed.png')", "_____no_output_____" ], [ "subset = ds_all.where(ds_all.wind>1)\nplt.scatter(subset.storm_speed_kmhr,subset.wind)\nplt.xlabel('wind speed (m s$^{-1}$)')\nplt.ylabel('translation speed (km hr$^{-1}$)')\nplt.savefig(dir_figs+'hist_database_translation.png')", "_____no_output_____" ], [ "ds_all.wind", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code" ] ]
4aa3265c5708aa688cce6be48e9142ce2d7b2434
10,488
ipynb
Jupyter Notebook
Chapter04-Part-1/App/Data/data.ipynb
atkinsonbg/packt-hands-on-machine-learning-with-mlnet
98e3106bcf59555d2d8c3fc1e5e44018dbb2d823
[ "MIT" ]
null
null
null
Chapter04-Part-1/App/Data/data.ipynb
atkinsonbg/packt-hands-on-machine-learning-with-mlnet
98e3106bcf59555d2d8c3fc1e5e44018dbb2d823
[ "MIT" ]
null
null
null
Chapter04-Part-1/App/Data/data.ipynb
atkinsonbg/packt-hands-on-machine-learning-with-mlnet
98e3106bcf59555d2d8c3fc1e5e44018dbb2d823
[ "MIT" ]
null
null
null
49.942857
145
0.40532
[ [ [ "import pandas as pd\nfrom sklearn.utils import shuffle", "_____no_output_____" ], [ "# load book examples and confirm the shape\ndf_vehicles = pd.read_csv('./vehicles.csv', sep=',', header=0)\ndf_vehicles.head", "_____no_output_____" ], [ "df_vehicles_select = df_vehicles[['region','price','year','manufacturer','model','condition','cylinders']].copy().reset_index(drop=True)\ndf_vehicles_select.dropna(inplace=True)\ndf_vehicles_select = shuffle(df_vehicles_select)\ndf_vehicles_select.head", "_____no_output_____" ], [ "df_all.to_csv(\"all_data.csv\", sep='\\t', index=False, header=False)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code" ] ]
4aa32deb877b537cd26b001a3c2397fdfc36f7c5
513,508
ipynb
Jupyter Notebook
Model backlog/EfficientNet/EfficientNetB5/243 - EfficientNetB5-Reg-Img256 Old Pretrain.ipynb
ThinkBricks/APTOS2019BlindnessDetection
e524fd69f83a1252710076c78b6a5236849cd885
[ "MIT" ]
23
2019-09-08T17:19:16.000Z
2022-02-02T16:20:09.000Z
Model backlog/EfficientNet/EfficientNetB5/243 - EfficientNetB5-Reg-Img256 Old Pretrain.ipynb
ThinkBricks/APTOS2019BlindnessDetection
e524fd69f83a1252710076c78b6a5236849cd885
[ "MIT" ]
1
2020-03-10T18:42:12.000Z
2020-09-18T22:02:38.000Z
Model backlog/EfficientNet/EfficientNetB5/243 - EfficientNetB5-Reg-Img256 Old Pretrain.ipynb
ThinkBricks/APTOS2019BlindnessDetection
e524fd69f83a1252710076c78b6a5236849cd885
[ "MIT" ]
16
2019-09-21T12:29:59.000Z
2022-03-21T00:42:26.000Z
138.41186
91,192
0.74845
[ [ [ "## Dependencies", "_____no_output_____" ] ], [ [ "import os\nimport sys\nimport cv2\nimport shutil\nimport random\nimport warnings\nimport numpy as np\nimport pandas as pd\nimport seaborn as sns\nimport multiprocessing as mp\nimport matplotlib.pyplot as plt\nfrom tensorflow import set_random_seed\nfrom sklearn.utils import class_weight\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import confusion_matrix, cohen_kappa_score\nfrom keras import backend as K\nfrom keras.models import Model\nfrom keras.utils import to_categorical\nfrom keras import optimizers, applications\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.layers import Dense, Dropout, GlobalAveragePooling2D, Input\nfrom keras.callbacks import EarlyStopping, ReduceLROnPlateau, Callback, LearningRateScheduler\n\ndef seed_everything(seed=0):\n random.seed(seed)\n os.environ['PYTHONHASHSEED'] = str(seed)\n np.random.seed(seed)\n set_random_seed(0)\n\nseed = 0\nseed_everything(seed)\n%matplotlib inline\nsns.set(style=\"whitegrid\")\nwarnings.filterwarnings(\"ignore\")\nsys.path.append(os.path.abspath('../input/efficientnet/efficientnet-master/efficientnet-master/'))\nfrom efficientnet import *", "/opt/conda/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:516: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:517: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:518: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:519: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:520: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:525: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:541: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:542: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:543: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:544: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:545: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n/opt/conda/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:550: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\nUsing TensorFlow backend.\n" ] ], [ [ "## Load data", "_____no_output_____" ] ], [ [ "hold_out_set = pd.read_csv('../input/aptos-data-split/hold-out.csv')\nX_train = hold_out_set[hold_out_set['set'] == 'train']\nX_val = hold_out_set[hold_out_set['set'] == 'validation']\ntest = pd.read_csv('../input/aptos2019-blindness-detection/test.csv')\nprint('Number of train samples: ', X_train.shape[0])\nprint('Number of validation samples: ', X_val.shape[0])\nprint('Number of test samples: ', test.shape[0])\n\n# Preprocecss data\nX_train[\"id_code\"] = X_train[\"id_code\"].apply(lambda x: x + \".png\")\nX_val[\"id_code\"] = X_val[\"id_code\"].apply(lambda x: x + \".png\")\ntest[\"id_code\"] = test[\"id_code\"].apply(lambda x: x + \".png\")\ndisplay(X_train.head())", "Number of train samples: 2929\nNumber of validation samples: 733\nNumber of test samples: 1928\n" ] ], [ [ "# Model parameters", "_____no_output_____" ] ], [ [ "# Model parameters\nFACTOR = 2\nBATCH_SIZE = 8 * FACTOR\nEPOCHS = 10\nWARMUP_EPOCHS = 3\nLEARNING_RATE = 1e-4 * FACTOR\nWARMUP_LEARNING_RATE = 1e-3 * FACTOR\nHEIGHT = 256\nWIDTH = 256\nCHANNELS = 3\nTTA_STEPS = 5\nES_PATIENCE = 5\nRLROP_PATIENCE = 3\nDECAY_DROP = 0.5\nLR_WARMUP_EPOCHS_1st = 2\nLR_WARMUP_EPOCHS_2nd = 3\nSTEP_SIZE = len(X_train) // BATCH_SIZE\nTOTAL_STEPS_1st = WARMUP_EPOCHS * STEP_SIZE\nTOTAL_STEPS_2nd = EPOCHS * STEP_SIZE\nWARMUP_STEPS_1st = LR_WARMUP_EPOCHS_1st * STEP_SIZE\nWARMUP_STEPS_2nd = LR_WARMUP_EPOCHS_2nd * STEP_SIZE", "_____no_output_____" ] ], [ [ "# Pre-procecess images", "_____no_output_____" ] ], [ [ "train_base_path = '../input/aptos2019-blindness-detection/train_images/'\ntest_base_path = '../input/aptos2019-blindness-detection/test_images/'\ntrain_dest_path = 'base_dir/train_images/'\nvalidation_dest_path = 'base_dir/validation_images/'\ntest_dest_path = 'base_dir/test_images/'\n\n# Making sure directories don't exist\nif os.path.exists(train_dest_path):\n shutil.rmtree(train_dest_path)\nif os.path.exists(validation_dest_path):\n shutil.rmtree(validation_dest_path)\nif os.path.exists(test_dest_path):\n shutil.rmtree(test_dest_path)\n \n# Creating train, validation and test directories\nos.makedirs(train_dest_path)\nos.makedirs(validation_dest_path)\nos.makedirs(test_dest_path)\n\ndef crop_image(img, tol=7):\n if img.ndim ==2:\n mask = img>tol\n return img[np.ix_(mask.any(1),mask.any(0))]\n elif img.ndim==3:\n gray_img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)\n mask = gray_img>tol\n check_shape = img[:,:,0][np.ix_(mask.any(1),mask.any(0))].shape[0]\n if (check_shape == 0): # image is too dark so that we crop out everything,\n return img # return original image\n else:\n img1=img[:,:,0][np.ix_(mask.any(1),mask.any(0))]\n img2=img[:,:,1][np.ix_(mask.any(1),mask.any(0))]\n img3=img[:,:,2][np.ix_(mask.any(1),mask.any(0))]\n img = np.stack([img1,img2,img3],axis=-1)\n \n return img\n\ndef circle_crop(img):\n img = crop_image(img)\n\n height, width, depth = img.shape\n largest_side = np.max((height, width))\n img = cv2.resize(img, (largest_side, largest_side))\n\n height, width, depth = img.shape\n\n x = width//2\n y = height//2\n r = np.amin((x, y))\n\n circle_img = np.zeros((height, width), np.uint8)\n cv2.circle(circle_img, (x, y), int(r), 1, thickness=-1)\n img = cv2.bitwise_and(img, img, mask=circle_img)\n img = crop_image(img)\n\n return img\n \ndef preprocess_image(base_path, save_path, image_id, HEIGHT, WIDTH, sigmaX=10):\n image = cv2.imread(base_path + image_id)\n image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n image = circle_crop(image)\n image = cv2.resize(image, (HEIGHT, WIDTH))\n image = cv2.addWeighted(image, 4, cv2.GaussianBlur(image, (0,0), sigmaX), -4 , 128)\n cv2.imwrite(save_path + image_id, image)\n \n# Pre-procecss train set\nfor i, image_id in enumerate(X_train['id_code']):\n preprocess_image(train_base_path, train_dest_path, image_id, HEIGHT, WIDTH)\n \n# Pre-procecss validation set\nfor i, image_id in enumerate(X_val['id_code']):\n preprocess_image(train_base_path, validation_dest_path, image_id, HEIGHT, WIDTH)\n \n# Pre-procecss test set\nfor i, image_id in enumerate(test['id_code']):\n preprocess_image(test_base_path, test_dest_path, image_id, HEIGHT, WIDTH)", "_____no_output_____" ] ], [ [ "# Data generator", "_____no_output_____" ] ], [ [ "datagen=ImageDataGenerator(rescale=1./255, \n rotation_range=360,\n horizontal_flip=True,\n vertical_flip=True)\n\ntrain_generator=datagen.flow_from_dataframe(\n dataframe=X_train,\n directory=train_dest_path,\n x_col=\"id_code\",\n y_col=\"diagnosis\",\n class_mode=\"raw\",\n batch_size=BATCH_SIZE,\n target_size=(HEIGHT, WIDTH),\n seed=seed)\n\nvalid_generator=datagen.flow_from_dataframe(\n dataframe=X_val,\n directory=validation_dest_path,\n x_col=\"id_code\",\n y_col=\"diagnosis\",\n class_mode=\"raw\",\n batch_size=BATCH_SIZE,\n target_size=(HEIGHT, WIDTH),\n seed=seed)\n\ntest_generator=datagen.flow_from_dataframe( \n dataframe=test,\n directory=test_dest_path,\n x_col=\"id_code\",\n batch_size=1,\n class_mode=None,\n shuffle=False,\n target_size=(HEIGHT, WIDTH),\n seed=seed)", "Found 2929 validated image filenames.\nFound 733 validated image filenames.\nFound 1928 validated image filenames.\n" ], [ "def cosine_decay_with_warmup(global_step,\n learning_rate_base,\n total_steps,\n warmup_learning_rate=0.0,\n warmup_steps=0,\n hold_base_rate_steps=0):\n \"\"\"\n Cosine decay schedule with warm up period.\n In this schedule, the learning rate grows linearly from warmup_learning_rate\n to learning_rate_base for warmup_steps, then transitions to a cosine decay\n schedule.\n :param global_step {int}: global step.\n :param learning_rate_base {float}: base learning rate.\n :param total_steps {int}: total number of training steps.\n :param warmup_learning_rate {float}: initial learning rate for warm up. (default: {0.0}).\n :param warmup_steps {int}: number of warmup steps. (default: {0}).\n :param hold_base_rate_steps {int}: Optional number of steps to hold base learning rate before decaying. (default: {0}).\n :param global_step {int}: global step.\n :Returns : a float representing learning rate.\n :Raises ValueError: if warmup_learning_rate is larger than learning_rate_base, or if warmup_steps is larger than total_steps.\n \"\"\"\n\n if total_steps < warmup_steps:\n raise ValueError('total_steps must be larger or equal to warmup_steps.')\n learning_rate = 0.5 * learning_rate_base * (1 + np.cos(\n np.pi *\n (global_step - warmup_steps - hold_base_rate_steps\n ) / float(total_steps - warmup_steps - hold_base_rate_steps)))\n if hold_base_rate_steps > 0:\n learning_rate = np.where(global_step > warmup_steps + hold_base_rate_steps,\n learning_rate, learning_rate_base)\n if warmup_steps > 0:\n if learning_rate_base < warmup_learning_rate:\n raise ValueError('learning_rate_base must be larger or equal to warmup_learning_rate.')\n slope = (learning_rate_base - warmup_learning_rate) / warmup_steps\n warmup_rate = slope * global_step + warmup_learning_rate\n learning_rate = np.where(global_step < warmup_steps, warmup_rate,\n learning_rate)\n return np.where(global_step > total_steps, 0.0, learning_rate)\n\n\nclass WarmUpCosineDecayScheduler(Callback):\n \"\"\"Cosine decay with warmup learning rate scheduler\"\"\"\n\n def __init__(self,\n learning_rate_base,\n total_steps,\n global_step_init=0,\n warmup_learning_rate=0.0,\n warmup_steps=0,\n hold_base_rate_steps=0,\n verbose=0):\n \"\"\"\n Constructor for cosine decay with warmup learning rate scheduler.\n :param learning_rate_base {float}: base learning rate.\n :param total_steps {int}: total number of training steps.\n :param global_step_init {int}: initial global step, e.g. from previous checkpoint.\n :param warmup_learning_rate {float}: initial learning rate for warm up. (default: {0.0}).\n :param warmup_steps {int}: number of warmup steps. (default: {0}).\n :param hold_base_rate_steps {int}: Optional number of steps to hold base learning rate before decaying. (default: {0}).\n :param verbose {int}: quiet, 1: update messages. (default: {0}).\n \"\"\"\n\n super(WarmUpCosineDecayScheduler, self).__init__()\n self.learning_rate_base = learning_rate_base\n self.total_steps = total_steps\n self.global_step = global_step_init\n self.warmup_learning_rate = warmup_learning_rate\n self.warmup_steps = warmup_steps\n self.hold_base_rate_steps = hold_base_rate_steps\n self.verbose = verbose\n self.learning_rates = []\n\n def on_batch_end(self, batch, logs=None):\n self.global_step = self.global_step + 1\n lr = K.get_value(self.model.optimizer.lr)\n self.learning_rates.append(lr)\n\n def on_batch_begin(self, batch, logs=None):\n lr = cosine_decay_with_warmup(global_step=self.global_step,\n learning_rate_base=self.learning_rate_base,\n total_steps=self.total_steps,\n warmup_learning_rate=self.warmup_learning_rate,\n warmup_steps=self.warmup_steps,\n hold_base_rate_steps=self.hold_base_rate_steps)\n K.set_value(self.model.optimizer.lr, lr)\n if self.verbose > 0:\n print('\\nBatch %02d: setting learning rate to %s.' % (self.global_step + 1, lr))", "_____no_output_____" ] ], [ [ "# Model", "_____no_output_____" ] ], [ [ "def create_model(input_shape):\n input_tensor = Input(shape=input_shape)\n base_model = EfficientNetB5(weights=None, \n include_top=False,\n input_tensor=input_tensor)\n# base_model.load_weights('../input/efficientnet-keras-weights-b0b5/efficientnet-b5_imagenet_1000_notop.h5')\n\n x = GlobalAveragePooling2D()(base_model.output)\n final_output = Dense(1, activation='linear', name='final_output')(x)\n model = Model(input_tensor, final_output)\n \n model.load_weights('../input/aptos-pretrain-olddata-effnetb5/effNetB5_img224_oldData.h5')\n \n return model", "_____no_output_____" ] ], [ [ "# Train top layers", "_____no_output_____" ] ], [ [ "model = create_model(input_shape=(HEIGHT, WIDTH, CHANNELS))\n\nfor layer in model.layers:\n layer.trainable = False\n\nfor i in range(-2, 0):\n model.layers[i].trainable = True\n\ncosine_lr_1st = WarmUpCosineDecayScheduler(learning_rate_base=WARMUP_LEARNING_RATE,\n total_steps=TOTAL_STEPS_1st,\n warmup_learning_rate=0.0,\n warmup_steps=WARMUP_STEPS_1st,\n hold_base_rate_steps=(2 * STEP_SIZE))\n\nmetric_list = [\"accuracy\"]\ncallback_list = [cosine_lr_1st]\noptimizer = optimizers.Adam(lr=WARMUP_LEARNING_RATE)\nmodel.compile(optimizer=optimizer, loss='mean_squared_error', metrics=metric_list)\nmodel.summary()", "__________________________________________________________________________________________________\nLayer (type) Output Shape Param # Connected to \n==================================================================================================\ninput_1 (InputLayer) (None, 256, 256, 3) 0 \n__________________________________________________________________________________________________\nconv2d_1 (Conv2D) (None, 128, 128, 48) 1296 input_1[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_1 (BatchNor (None, 128, 128, 48) 192 conv2d_1[0][0] \n__________________________________________________________________________________________________\nswish_1 (Swish) (None, 128, 128, 48) 0 batch_normalization_1[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_1 (DepthwiseCo (None, 128, 128, 48) 432 swish_1[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_2 (BatchNor (None, 128, 128, 48) 192 depthwise_conv2d_1[0][0] \n__________________________________________________________________________________________________\nswish_2 (Swish) (None, 128, 128, 48) 0 batch_normalization_2[0][0] \n__________________________________________________________________________________________________\nlambda_1 (Lambda) (None, 1, 1, 48) 0 swish_2[0][0] \n__________________________________________________________________________________________________\nconv2d_2 (Conv2D) (None, 1, 1, 12) 588 lambda_1[0][0] \n__________________________________________________________________________________________________\nswish_3 (Swish) (None, 1, 1, 12) 0 conv2d_2[0][0] \n__________________________________________________________________________________________________\nconv2d_3 (Conv2D) (None, 1, 1, 48) 624 swish_3[0][0] \n__________________________________________________________________________________________________\nactivation_1 (Activation) (None, 1, 1, 48) 0 conv2d_3[0][0] \n__________________________________________________________________________________________________\nmultiply_1 (Multiply) (None, 128, 128, 48) 0 activation_1[0][0] \n swish_2[0][0] \n__________________________________________________________________________________________________\nconv2d_4 (Conv2D) (None, 128, 128, 24) 1152 multiply_1[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_3 (BatchNor (None, 128, 128, 24) 96 conv2d_4[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_2 (DepthwiseCo (None, 128, 128, 24) 216 batch_normalization_3[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_4 (BatchNor (None, 128, 128, 24) 96 depthwise_conv2d_2[0][0] \n__________________________________________________________________________________________________\nswish_4 (Swish) (None, 128, 128, 24) 0 batch_normalization_4[0][0] \n__________________________________________________________________________________________________\nlambda_2 (Lambda) (None, 1, 1, 24) 0 swish_4[0][0] \n__________________________________________________________________________________________________\nconv2d_5 (Conv2D) (None, 1, 1, 6) 150 lambda_2[0][0] \n__________________________________________________________________________________________________\nswish_5 (Swish) (None, 1, 1, 6) 0 conv2d_5[0][0] \n__________________________________________________________________________________________________\nconv2d_6 (Conv2D) (None, 1, 1, 24) 168 swish_5[0][0] \n__________________________________________________________________________________________________\nactivation_2 (Activation) (None, 1, 1, 24) 0 conv2d_6[0][0] \n__________________________________________________________________________________________________\nmultiply_2 (Multiply) (None, 128, 128, 24) 0 activation_2[0][0] \n swish_4[0][0] \n__________________________________________________________________________________________________\nconv2d_7 (Conv2D) (None, 128, 128, 24) 576 multiply_2[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_5 (BatchNor (None, 128, 128, 24) 96 conv2d_7[0][0] \n__________________________________________________________________________________________________\ndrop_connect_1 (DropConnect) (None, 128, 128, 24) 0 batch_normalization_5[0][0] \n__________________________________________________________________________________________________\nadd_1 (Add) (None, 128, 128, 24) 0 drop_connect_1[0][0] \n batch_normalization_3[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_3 (DepthwiseCo (None, 128, 128, 24) 216 add_1[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_6 (BatchNor (None, 128, 128, 24) 96 depthwise_conv2d_3[0][0] \n__________________________________________________________________________________________________\nswish_6 (Swish) (None, 128, 128, 24) 0 batch_normalization_6[0][0] \n__________________________________________________________________________________________________\nlambda_3 (Lambda) (None, 1, 1, 24) 0 swish_6[0][0] \n__________________________________________________________________________________________________\nconv2d_8 (Conv2D) (None, 1, 1, 6) 150 lambda_3[0][0] \n__________________________________________________________________________________________________\nswish_7 (Swish) (None, 1, 1, 6) 0 conv2d_8[0][0] \n__________________________________________________________________________________________________\nconv2d_9 (Conv2D) (None, 1, 1, 24) 168 swish_7[0][0] \n__________________________________________________________________________________________________\nactivation_3 (Activation) (None, 1, 1, 24) 0 conv2d_9[0][0] \n__________________________________________________________________________________________________\nmultiply_3 (Multiply) (None, 128, 128, 24) 0 activation_3[0][0] \n swish_6[0][0] \n__________________________________________________________________________________________________\nconv2d_10 (Conv2D) (None, 128, 128, 24) 576 multiply_3[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_7 (BatchNor (None, 128, 128, 24) 96 conv2d_10[0][0] \n__________________________________________________________________________________________________\ndrop_connect_2 (DropConnect) (None, 128, 128, 24) 0 batch_normalization_7[0][0] \n__________________________________________________________________________________________________\nadd_2 (Add) (None, 128, 128, 24) 0 drop_connect_2[0][0] \n add_1[0][0] \n__________________________________________________________________________________________________\nconv2d_11 (Conv2D) (None, 128, 128, 144 3456 add_2[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_8 (BatchNor (None, 128, 128, 144 576 conv2d_11[0][0] \n__________________________________________________________________________________________________\nswish_8 (Swish) (None, 128, 128, 144 0 batch_normalization_8[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_4 (DepthwiseCo (None, 64, 64, 144) 1296 swish_8[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_9 (BatchNor (None, 64, 64, 144) 576 depthwise_conv2d_4[0][0] \n__________________________________________________________________________________________________\nswish_9 (Swish) (None, 64, 64, 144) 0 batch_normalization_9[0][0] \n__________________________________________________________________________________________________\nlambda_4 (Lambda) (None, 1, 1, 144) 0 swish_9[0][0] \n__________________________________________________________________________________________________\nconv2d_12 (Conv2D) (None, 1, 1, 6) 870 lambda_4[0][0] \n__________________________________________________________________________________________________\nswish_10 (Swish) (None, 1, 1, 6) 0 conv2d_12[0][0] \n__________________________________________________________________________________________________\nconv2d_13 (Conv2D) (None, 1, 1, 144) 1008 swish_10[0][0] \n__________________________________________________________________________________________________\nactivation_4 (Activation) (None, 1, 1, 144) 0 conv2d_13[0][0] \n__________________________________________________________________________________________________\nmultiply_4 (Multiply) (None, 64, 64, 144) 0 activation_4[0][0] \n swish_9[0][0] \n__________________________________________________________________________________________________\nconv2d_14 (Conv2D) (None, 64, 64, 40) 5760 multiply_4[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_10 (BatchNo (None, 64, 64, 40) 160 conv2d_14[0][0] \n__________________________________________________________________________________________________\nconv2d_15 (Conv2D) (None, 64, 64, 240) 9600 batch_normalization_10[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_11 (BatchNo (None, 64, 64, 240) 960 conv2d_15[0][0] \n__________________________________________________________________________________________________\nswish_11 (Swish) (None, 64, 64, 240) 0 batch_normalization_11[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_5 (DepthwiseCo (None, 64, 64, 240) 2160 swish_11[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_12 (BatchNo (None, 64, 64, 240) 960 depthwise_conv2d_5[0][0] \n__________________________________________________________________________________________________\nswish_12 (Swish) (None, 64, 64, 240) 0 batch_normalization_12[0][0] \n__________________________________________________________________________________________________\nlambda_5 (Lambda) (None, 1, 1, 240) 0 swish_12[0][0] \n__________________________________________________________________________________________________\nconv2d_16 (Conv2D) (None, 1, 1, 10) 2410 lambda_5[0][0] \n__________________________________________________________________________________________________\nswish_13 (Swish) (None, 1, 1, 10) 0 conv2d_16[0][0] \n__________________________________________________________________________________________________\nconv2d_17 (Conv2D) (None, 1, 1, 240) 2640 swish_13[0][0] \n__________________________________________________________________________________________________\nactivation_5 (Activation) (None, 1, 1, 240) 0 conv2d_17[0][0] \n__________________________________________________________________________________________________\nmultiply_5 (Multiply) (None, 64, 64, 240) 0 activation_5[0][0] \n swish_12[0][0] \n__________________________________________________________________________________________________\nconv2d_18 (Conv2D) (None, 64, 64, 40) 9600 multiply_5[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_13 (BatchNo (None, 64, 64, 40) 160 conv2d_18[0][0] \n__________________________________________________________________________________________________\ndrop_connect_3 (DropConnect) (None, 64, 64, 40) 0 batch_normalization_13[0][0] \n__________________________________________________________________________________________________\nadd_3 (Add) (None, 64, 64, 40) 0 drop_connect_3[0][0] \n batch_normalization_10[0][0] \n__________________________________________________________________________________________________\nconv2d_19 (Conv2D) (None, 64, 64, 240) 9600 add_3[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_14 (BatchNo (None, 64, 64, 240) 960 conv2d_19[0][0] \n__________________________________________________________________________________________________\nswish_14 (Swish) (None, 64, 64, 240) 0 batch_normalization_14[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_6 (DepthwiseCo (None, 64, 64, 240) 2160 swish_14[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_15 (BatchNo (None, 64, 64, 240) 960 depthwise_conv2d_6[0][0] \n__________________________________________________________________________________________________\nswish_15 (Swish) (None, 64, 64, 240) 0 batch_normalization_15[0][0] \n__________________________________________________________________________________________________\nlambda_6 (Lambda) (None, 1, 1, 240) 0 swish_15[0][0] \n__________________________________________________________________________________________________\nconv2d_20 (Conv2D) (None, 1, 1, 10) 2410 lambda_6[0][0] \n__________________________________________________________________________________________________\nswish_16 (Swish) (None, 1, 1, 10) 0 conv2d_20[0][0] \n__________________________________________________________________________________________________\nconv2d_21 (Conv2D) (None, 1, 1, 240) 2640 swish_16[0][0] \n__________________________________________________________________________________________________\nactivation_6 (Activation) (None, 1, 1, 240) 0 conv2d_21[0][0] \n__________________________________________________________________________________________________\nmultiply_6 (Multiply) (None, 64, 64, 240) 0 activation_6[0][0] \n swish_15[0][0] \n__________________________________________________________________________________________________\nconv2d_22 (Conv2D) (None, 64, 64, 40) 9600 multiply_6[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_16 (BatchNo (None, 64, 64, 40) 160 conv2d_22[0][0] \n__________________________________________________________________________________________________\ndrop_connect_4 (DropConnect) (None, 64, 64, 40) 0 batch_normalization_16[0][0] \n__________________________________________________________________________________________________\nadd_4 (Add) (None, 64, 64, 40) 0 drop_connect_4[0][0] \n add_3[0][0] \n__________________________________________________________________________________________________\nconv2d_23 (Conv2D) (None, 64, 64, 240) 9600 add_4[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_17 (BatchNo (None, 64, 64, 240) 960 conv2d_23[0][0] \n__________________________________________________________________________________________________\nswish_17 (Swish) (None, 64, 64, 240) 0 batch_normalization_17[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_7 (DepthwiseCo (None, 64, 64, 240) 2160 swish_17[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_18 (BatchNo (None, 64, 64, 240) 960 depthwise_conv2d_7[0][0] \n__________________________________________________________________________________________________\nswish_18 (Swish) (None, 64, 64, 240) 0 batch_normalization_18[0][0] \n__________________________________________________________________________________________________\nlambda_7 (Lambda) (None, 1, 1, 240) 0 swish_18[0][0] \n__________________________________________________________________________________________________\nconv2d_24 (Conv2D) (None, 1, 1, 10) 2410 lambda_7[0][0] \n__________________________________________________________________________________________________\nswish_19 (Swish) (None, 1, 1, 10) 0 conv2d_24[0][0] \n__________________________________________________________________________________________________\nconv2d_25 (Conv2D) (None, 1, 1, 240) 2640 swish_19[0][0] \n__________________________________________________________________________________________________\nactivation_7 (Activation) (None, 1, 1, 240) 0 conv2d_25[0][0] \n__________________________________________________________________________________________________\nmultiply_7 (Multiply) (None, 64, 64, 240) 0 activation_7[0][0] \n swish_18[0][0] \n__________________________________________________________________________________________________\nconv2d_26 (Conv2D) (None, 64, 64, 40) 9600 multiply_7[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_19 (BatchNo (None, 64, 64, 40) 160 conv2d_26[0][0] \n__________________________________________________________________________________________________\ndrop_connect_5 (DropConnect) (None, 64, 64, 40) 0 batch_normalization_19[0][0] \n__________________________________________________________________________________________________\nadd_5 (Add) (None, 64, 64, 40) 0 drop_connect_5[0][0] \n add_4[0][0] \n__________________________________________________________________________________________________\nconv2d_27 (Conv2D) (None, 64, 64, 240) 9600 add_5[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_20 (BatchNo (None, 64, 64, 240) 960 conv2d_27[0][0] \n__________________________________________________________________________________________________\nswish_20 (Swish) (None, 64, 64, 240) 0 batch_normalization_20[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_8 (DepthwiseCo (None, 64, 64, 240) 2160 swish_20[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_21 (BatchNo (None, 64, 64, 240) 960 depthwise_conv2d_8[0][0] \n__________________________________________________________________________________________________\nswish_21 (Swish) (None, 64, 64, 240) 0 batch_normalization_21[0][0] \n__________________________________________________________________________________________________\nlambda_8 (Lambda) (None, 1, 1, 240) 0 swish_21[0][0] \n__________________________________________________________________________________________________\nconv2d_28 (Conv2D) (None, 1, 1, 10) 2410 lambda_8[0][0] \n__________________________________________________________________________________________________\nswish_22 (Swish) (None, 1, 1, 10) 0 conv2d_28[0][0] \n__________________________________________________________________________________________________\nconv2d_29 (Conv2D) (None, 1, 1, 240) 2640 swish_22[0][0] \n__________________________________________________________________________________________________\nactivation_8 (Activation) (None, 1, 1, 240) 0 conv2d_29[0][0] \n__________________________________________________________________________________________________\nmultiply_8 (Multiply) (None, 64, 64, 240) 0 activation_8[0][0] \n swish_21[0][0] \n__________________________________________________________________________________________________\nconv2d_30 (Conv2D) (None, 64, 64, 40) 9600 multiply_8[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_22 (BatchNo (None, 64, 64, 40) 160 conv2d_30[0][0] \n__________________________________________________________________________________________________\ndrop_connect_6 (DropConnect) (None, 64, 64, 40) 0 batch_normalization_22[0][0] \n__________________________________________________________________________________________________\nadd_6 (Add) (None, 64, 64, 40) 0 drop_connect_6[0][0] \n add_5[0][0] \n__________________________________________________________________________________________________\nconv2d_31 (Conv2D) (None, 64, 64, 240) 9600 add_6[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_23 (BatchNo (None, 64, 64, 240) 960 conv2d_31[0][0] \n__________________________________________________________________________________________________\nswish_23 (Swish) (None, 64, 64, 240) 0 batch_normalization_23[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_9 (DepthwiseCo (None, 32, 32, 240) 6000 swish_23[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_24 (BatchNo (None, 32, 32, 240) 960 depthwise_conv2d_9[0][0] \n__________________________________________________________________________________________________\nswish_24 (Swish) (None, 32, 32, 240) 0 batch_normalization_24[0][0] \n__________________________________________________________________________________________________\nlambda_9 (Lambda) (None, 1, 1, 240) 0 swish_24[0][0] \n__________________________________________________________________________________________________\nconv2d_32 (Conv2D) (None, 1, 1, 10) 2410 lambda_9[0][0] \n__________________________________________________________________________________________________\nswish_25 (Swish) (None, 1, 1, 10) 0 conv2d_32[0][0] \n__________________________________________________________________________________________________\nconv2d_33 (Conv2D) (None, 1, 1, 240) 2640 swish_25[0][0] \n__________________________________________________________________________________________________\nactivation_9 (Activation) (None, 1, 1, 240) 0 conv2d_33[0][0] \n__________________________________________________________________________________________________\nmultiply_9 (Multiply) (None, 32, 32, 240) 0 activation_9[0][0] \n swish_24[0][0] \n__________________________________________________________________________________________________\nconv2d_34 (Conv2D) (None, 32, 32, 64) 15360 multiply_9[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_25 (BatchNo (None, 32, 32, 64) 256 conv2d_34[0][0] \n__________________________________________________________________________________________________\nconv2d_35 (Conv2D) (None, 32, 32, 384) 24576 batch_normalization_25[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_26 (BatchNo (None, 32, 32, 384) 1536 conv2d_35[0][0] \n__________________________________________________________________________________________________\nswish_26 (Swish) (None, 32, 32, 384) 0 batch_normalization_26[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_10 (DepthwiseC (None, 32, 32, 384) 9600 swish_26[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_27 (BatchNo (None, 32, 32, 384) 1536 depthwise_conv2d_10[0][0] \n__________________________________________________________________________________________________\nswish_27 (Swish) (None, 32, 32, 384) 0 batch_normalization_27[0][0] \n__________________________________________________________________________________________________\nlambda_10 (Lambda) (None, 1, 1, 384) 0 swish_27[0][0] \n__________________________________________________________________________________________________\nconv2d_36 (Conv2D) (None, 1, 1, 16) 6160 lambda_10[0][0] \n__________________________________________________________________________________________________\nswish_28 (Swish) (None, 1, 1, 16) 0 conv2d_36[0][0] \n__________________________________________________________________________________________________\nconv2d_37 (Conv2D) (None, 1, 1, 384) 6528 swish_28[0][0] \n__________________________________________________________________________________________________\nactivation_10 (Activation) (None, 1, 1, 384) 0 conv2d_37[0][0] \n__________________________________________________________________________________________________\nmultiply_10 (Multiply) (None, 32, 32, 384) 0 activation_10[0][0] \n swish_27[0][0] \n__________________________________________________________________________________________________\nconv2d_38 (Conv2D) (None, 32, 32, 64) 24576 multiply_10[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_28 (BatchNo (None, 32, 32, 64) 256 conv2d_38[0][0] \n__________________________________________________________________________________________________\ndrop_connect_7 (DropConnect) (None, 32, 32, 64) 0 batch_normalization_28[0][0] \n__________________________________________________________________________________________________\nadd_7 (Add) (None, 32, 32, 64) 0 drop_connect_7[0][0] \n batch_normalization_25[0][0] \n__________________________________________________________________________________________________\nconv2d_39 (Conv2D) (None, 32, 32, 384) 24576 add_7[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_29 (BatchNo (None, 32, 32, 384) 1536 conv2d_39[0][0] \n__________________________________________________________________________________________________\nswish_29 (Swish) (None, 32, 32, 384) 0 batch_normalization_29[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_11 (DepthwiseC (None, 32, 32, 384) 9600 swish_29[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_30 (BatchNo (None, 32, 32, 384) 1536 depthwise_conv2d_11[0][0] \n__________________________________________________________________________________________________\nswish_30 (Swish) (None, 32, 32, 384) 0 batch_normalization_30[0][0] \n__________________________________________________________________________________________________\nlambda_11 (Lambda) (None, 1, 1, 384) 0 swish_30[0][0] \n__________________________________________________________________________________________________\nconv2d_40 (Conv2D) (None, 1, 1, 16) 6160 lambda_11[0][0] \n__________________________________________________________________________________________________\nswish_31 (Swish) (None, 1, 1, 16) 0 conv2d_40[0][0] \n__________________________________________________________________________________________________\nconv2d_41 (Conv2D) (None, 1, 1, 384) 6528 swish_31[0][0] \n__________________________________________________________________________________________________\nactivation_11 (Activation) (None, 1, 1, 384) 0 conv2d_41[0][0] \n__________________________________________________________________________________________________\nmultiply_11 (Multiply) (None, 32, 32, 384) 0 activation_11[0][0] \n swish_30[0][0] \n__________________________________________________________________________________________________\nconv2d_42 (Conv2D) (None, 32, 32, 64) 24576 multiply_11[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_31 (BatchNo (None, 32, 32, 64) 256 conv2d_42[0][0] \n__________________________________________________________________________________________________\ndrop_connect_8 (DropConnect) (None, 32, 32, 64) 0 batch_normalization_31[0][0] \n__________________________________________________________________________________________________\nadd_8 (Add) (None, 32, 32, 64) 0 drop_connect_8[0][0] \n add_7[0][0] \n__________________________________________________________________________________________________\nconv2d_43 (Conv2D) (None, 32, 32, 384) 24576 add_8[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_32 (BatchNo (None, 32, 32, 384) 1536 conv2d_43[0][0] \n__________________________________________________________________________________________________\nswish_32 (Swish) (None, 32, 32, 384) 0 batch_normalization_32[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_12 (DepthwiseC (None, 32, 32, 384) 9600 swish_32[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_33 (BatchNo (None, 32, 32, 384) 1536 depthwise_conv2d_12[0][0] \n__________________________________________________________________________________________________\nswish_33 (Swish) (None, 32, 32, 384) 0 batch_normalization_33[0][0] \n__________________________________________________________________________________________________\nlambda_12 (Lambda) (None, 1, 1, 384) 0 swish_33[0][0] \n__________________________________________________________________________________________________\nconv2d_44 (Conv2D) (None, 1, 1, 16) 6160 lambda_12[0][0] \n__________________________________________________________________________________________________\nswish_34 (Swish) (None, 1, 1, 16) 0 conv2d_44[0][0] \n__________________________________________________________________________________________________\nconv2d_45 (Conv2D) (None, 1, 1, 384) 6528 swish_34[0][0] \n__________________________________________________________________________________________________\nactivation_12 (Activation) (None, 1, 1, 384) 0 conv2d_45[0][0] \n__________________________________________________________________________________________________\nmultiply_12 (Multiply) (None, 32, 32, 384) 0 activation_12[0][0] \n swish_33[0][0] \n__________________________________________________________________________________________________\nconv2d_46 (Conv2D) (None, 32, 32, 64) 24576 multiply_12[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_34 (BatchNo (None, 32, 32, 64) 256 conv2d_46[0][0] \n__________________________________________________________________________________________________\ndrop_connect_9 (DropConnect) (None, 32, 32, 64) 0 batch_normalization_34[0][0] \n__________________________________________________________________________________________________\nadd_9 (Add) (None, 32, 32, 64) 0 drop_connect_9[0][0] \n add_8[0][0] \n__________________________________________________________________________________________________\nconv2d_47 (Conv2D) (None, 32, 32, 384) 24576 add_9[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_35 (BatchNo (None, 32, 32, 384) 1536 conv2d_47[0][0] \n__________________________________________________________________________________________________\nswish_35 (Swish) (None, 32, 32, 384) 0 batch_normalization_35[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_13 (DepthwiseC (None, 32, 32, 384) 9600 swish_35[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_36 (BatchNo (None, 32, 32, 384) 1536 depthwise_conv2d_13[0][0] \n__________________________________________________________________________________________________\nswish_36 (Swish) (None, 32, 32, 384) 0 batch_normalization_36[0][0] \n__________________________________________________________________________________________________\nlambda_13 (Lambda) (None, 1, 1, 384) 0 swish_36[0][0] \n__________________________________________________________________________________________________\nconv2d_48 (Conv2D) (None, 1, 1, 16) 6160 lambda_13[0][0] \n__________________________________________________________________________________________________\nswish_37 (Swish) (None, 1, 1, 16) 0 conv2d_48[0][0] \n__________________________________________________________________________________________________\nconv2d_49 (Conv2D) (None, 1, 1, 384) 6528 swish_37[0][0] \n__________________________________________________________________________________________________\nactivation_13 (Activation) (None, 1, 1, 384) 0 conv2d_49[0][0] \n__________________________________________________________________________________________________\nmultiply_13 (Multiply) (None, 32, 32, 384) 0 activation_13[0][0] \n swish_36[0][0] \n__________________________________________________________________________________________________\nconv2d_50 (Conv2D) (None, 32, 32, 64) 24576 multiply_13[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_37 (BatchNo (None, 32, 32, 64) 256 conv2d_50[0][0] \n__________________________________________________________________________________________________\ndrop_connect_10 (DropConnect) (None, 32, 32, 64) 0 batch_normalization_37[0][0] \n__________________________________________________________________________________________________\nadd_10 (Add) (None, 32, 32, 64) 0 drop_connect_10[0][0] \n add_9[0][0] \n__________________________________________________________________________________________________\nconv2d_51 (Conv2D) (None, 32, 32, 384) 24576 add_10[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_38 (BatchNo (None, 32, 32, 384) 1536 conv2d_51[0][0] \n__________________________________________________________________________________________________\nswish_38 (Swish) (None, 32, 32, 384) 0 batch_normalization_38[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_14 (DepthwiseC (None, 16, 16, 384) 3456 swish_38[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_39 (BatchNo (None, 16, 16, 384) 1536 depthwise_conv2d_14[0][0] \n__________________________________________________________________________________________________\nswish_39 (Swish) (None, 16, 16, 384) 0 batch_normalization_39[0][0] \n__________________________________________________________________________________________________\nlambda_14 (Lambda) (None, 1, 1, 384) 0 swish_39[0][0] \n__________________________________________________________________________________________________\nconv2d_52 (Conv2D) (None, 1, 1, 16) 6160 lambda_14[0][0] \n__________________________________________________________________________________________________\nswish_40 (Swish) (None, 1, 1, 16) 0 conv2d_52[0][0] \n__________________________________________________________________________________________________\nconv2d_53 (Conv2D) (None, 1, 1, 384) 6528 swish_40[0][0] \n__________________________________________________________________________________________________\nactivation_14 (Activation) (None, 1, 1, 384) 0 conv2d_53[0][0] \n__________________________________________________________________________________________________\nmultiply_14 (Multiply) (None, 16, 16, 384) 0 activation_14[0][0] \n swish_39[0][0] \n__________________________________________________________________________________________________\nconv2d_54 (Conv2D) (None, 16, 16, 128) 49152 multiply_14[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_40 (BatchNo (None, 16, 16, 128) 512 conv2d_54[0][0] \n__________________________________________________________________________________________________\nconv2d_55 (Conv2D) (None, 16, 16, 768) 98304 batch_normalization_40[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_41 (BatchNo (None, 16, 16, 768) 3072 conv2d_55[0][0] \n__________________________________________________________________________________________________\nswish_41 (Swish) (None, 16, 16, 768) 0 batch_normalization_41[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_15 (DepthwiseC (None, 16, 16, 768) 6912 swish_41[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_42 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_15[0][0] \n__________________________________________________________________________________________________\nswish_42 (Swish) (None, 16, 16, 768) 0 batch_normalization_42[0][0] \n__________________________________________________________________________________________________\nlambda_15 (Lambda) (None, 1, 1, 768) 0 swish_42[0][0] \n__________________________________________________________________________________________________\nconv2d_56 (Conv2D) (None, 1, 1, 32) 24608 lambda_15[0][0] \n__________________________________________________________________________________________________\nswish_43 (Swish) (None, 1, 1, 32) 0 conv2d_56[0][0] \n__________________________________________________________________________________________________\nconv2d_57 (Conv2D) (None, 1, 1, 768) 25344 swish_43[0][0] \n__________________________________________________________________________________________________\nactivation_15 (Activation) (None, 1, 1, 768) 0 conv2d_57[0][0] \n__________________________________________________________________________________________________\nmultiply_15 (Multiply) (None, 16, 16, 768) 0 activation_15[0][0] \n swish_42[0][0] \n__________________________________________________________________________________________________\nconv2d_58 (Conv2D) (None, 16, 16, 128) 98304 multiply_15[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_43 (BatchNo (None, 16, 16, 128) 512 conv2d_58[0][0] \n__________________________________________________________________________________________________\ndrop_connect_11 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_43[0][0] \n__________________________________________________________________________________________________\nadd_11 (Add) (None, 16, 16, 128) 0 drop_connect_11[0][0] \n batch_normalization_40[0][0] \n__________________________________________________________________________________________________\nconv2d_59 (Conv2D) (None, 16, 16, 768) 98304 add_11[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_44 (BatchNo (None, 16, 16, 768) 3072 conv2d_59[0][0] \n__________________________________________________________________________________________________\nswish_44 (Swish) (None, 16, 16, 768) 0 batch_normalization_44[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_16 (DepthwiseC (None, 16, 16, 768) 6912 swish_44[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_45 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_16[0][0] \n__________________________________________________________________________________________________\nswish_45 (Swish) (None, 16, 16, 768) 0 batch_normalization_45[0][0] \n__________________________________________________________________________________________________\nlambda_16 (Lambda) (None, 1, 1, 768) 0 swish_45[0][0] \n__________________________________________________________________________________________________\nconv2d_60 (Conv2D) (None, 1, 1, 32) 24608 lambda_16[0][0] \n__________________________________________________________________________________________________\nswish_46 (Swish) (None, 1, 1, 32) 0 conv2d_60[0][0] \n__________________________________________________________________________________________________\nconv2d_61 (Conv2D) (None, 1, 1, 768) 25344 swish_46[0][0] \n__________________________________________________________________________________________________\nactivation_16 (Activation) (None, 1, 1, 768) 0 conv2d_61[0][0] \n__________________________________________________________________________________________________\nmultiply_16 (Multiply) (None, 16, 16, 768) 0 activation_16[0][0] \n swish_45[0][0] \n__________________________________________________________________________________________________\nconv2d_62 (Conv2D) (None, 16, 16, 128) 98304 multiply_16[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_46 (BatchNo (None, 16, 16, 128) 512 conv2d_62[0][0] \n__________________________________________________________________________________________________\ndrop_connect_12 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_46[0][0] \n__________________________________________________________________________________________________\nadd_12 (Add) (None, 16, 16, 128) 0 drop_connect_12[0][0] \n add_11[0][0] \n__________________________________________________________________________________________________\nconv2d_63 (Conv2D) (None, 16, 16, 768) 98304 add_12[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_47 (BatchNo (None, 16, 16, 768) 3072 conv2d_63[0][0] \n__________________________________________________________________________________________________\nswish_47 (Swish) (None, 16, 16, 768) 0 batch_normalization_47[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_17 (DepthwiseC (None, 16, 16, 768) 6912 swish_47[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_48 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_17[0][0] \n__________________________________________________________________________________________________\nswish_48 (Swish) (None, 16, 16, 768) 0 batch_normalization_48[0][0] \n__________________________________________________________________________________________________\nlambda_17 (Lambda) (None, 1, 1, 768) 0 swish_48[0][0] \n__________________________________________________________________________________________________\nconv2d_64 (Conv2D) (None, 1, 1, 32) 24608 lambda_17[0][0] \n__________________________________________________________________________________________________\nswish_49 (Swish) (None, 1, 1, 32) 0 conv2d_64[0][0] \n__________________________________________________________________________________________________\nconv2d_65 (Conv2D) (None, 1, 1, 768) 25344 swish_49[0][0] \n__________________________________________________________________________________________________\nactivation_17 (Activation) (None, 1, 1, 768) 0 conv2d_65[0][0] \n__________________________________________________________________________________________________\nmultiply_17 (Multiply) (None, 16, 16, 768) 0 activation_17[0][0] \n swish_48[0][0] \n__________________________________________________________________________________________________\nconv2d_66 (Conv2D) (None, 16, 16, 128) 98304 multiply_17[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_49 (BatchNo (None, 16, 16, 128) 512 conv2d_66[0][0] \n__________________________________________________________________________________________________\ndrop_connect_13 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_49[0][0] \n__________________________________________________________________________________________________\nadd_13 (Add) (None, 16, 16, 128) 0 drop_connect_13[0][0] \n add_12[0][0] \n__________________________________________________________________________________________________\nconv2d_67 (Conv2D) (None, 16, 16, 768) 98304 add_13[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_50 (BatchNo (None, 16, 16, 768) 3072 conv2d_67[0][0] \n__________________________________________________________________________________________________\nswish_50 (Swish) (None, 16, 16, 768) 0 batch_normalization_50[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_18 (DepthwiseC (None, 16, 16, 768) 6912 swish_50[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_51 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_18[0][0] \n__________________________________________________________________________________________________\nswish_51 (Swish) (None, 16, 16, 768) 0 batch_normalization_51[0][0] \n__________________________________________________________________________________________________\nlambda_18 (Lambda) (None, 1, 1, 768) 0 swish_51[0][0] \n__________________________________________________________________________________________________\nconv2d_68 (Conv2D) (None, 1, 1, 32) 24608 lambda_18[0][0] \n__________________________________________________________________________________________________\nswish_52 (Swish) (None, 1, 1, 32) 0 conv2d_68[0][0] \n__________________________________________________________________________________________________\nconv2d_69 (Conv2D) (None, 1, 1, 768) 25344 swish_52[0][0] \n__________________________________________________________________________________________________\nactivation_18 (Activation) (None, 1, 1, 768) 0 conv2d_69[0][0] \n__________________________________________________________________________________________________\nmultiply_18 (Multiply) (None, 16, 16, 768) 0 activation_18[0][0] \n swish_51[0][0] \n__________________________________________________________________________________________________\nconv2d_70 (Conv2D) (None, 16, 16, 128) 98304 multiply_18[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_52 (BatchNo (None, 16, 16, 128) 512 conv2d_70[0][0] \n__________________________________________________________________________________________________\ndrop_connect_14 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_52[0][0] \n__________________________________________________________________________________________________\nadd_14 (Add) (None, 16, 16, 128) 0 drop_connect_14[0][0] \n add_13[0][0] \n__________________________________________________________________________________________________\nconv2d_71 (Conv2D) (None, 16, 16, 768) 98304 add_14[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_53 (BatchNo (None, 16, 16, 768) 3072 conv2d_71[0][0] \n__________________________________________________________________________________________________\nswish_53 (Swish) (None, 16, 16, 768) 0 batch_normalization_53[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_19 (DepthwiseC (None, 16, 16, 768) 6912 swish_53[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_54 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_19[0][0] \n__________________________________________________________________________________________________\nswish_54 (Swish) (None, 16, 16, 768) 0 batch_normalization_54[0][0] \n__________________________________________________________________________________________________\nlambda_19 (Lambda) (None, 1, 1, 768) 0 swish_54[0][0] \n__________________________________________________________________________________________________\nconv2d_72 (Conv2D) (None, 1, 1, 32) 24608 lambda_19[0][0] \n__________________________________________________________________________________________________\nswish_55 (Swish) (None, 1, 1, 32) 0 conv2d_72[0][0] \n__________________________________________________________________________________________________\nconv2d_73 (Conv2D) (None, 1, 1, 768) 25344 swish_55[0][0] \n__________________________________________________________________________________________________\nactivation_19 (Activation) (None, 1, 1, 768) 0 conv2d_73[0][0] \n__________________________________________________________________________________________________\nmultiply_19 (Multiply) (None, 16, 16, 768) 0 activation_19[0][0] \n swish_54[0][0] \n__________________________________________________________________________________________________\nconv2d_74 (Conv2D) (None, 16, 16, 128) 98304 multiply_19[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_55 (BatchNo (None, 16, 16, 128) 512 conv2d_74[0][0] \n__________________________________________________________________________________________________\ndrop_connect_15 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_55[0][0] \n__________________________________________________________________________________________________\nadd_15 (Add) (None, 16, 16, 128) 0 drop_connect_15[0][0] \n add_14[0][0] \n__________________________________________________________________________________________________\nconv2d_75 (Conv2D) (None, 16, 16, 768) 98304 add_15[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_56 (BatchNo (None, 16, 16, 768) 3072 conv2d_75[0][0] \n__________________________________________________________________________________________________\nswish_56 (Swish) (None, 16, 16, 768) 0 batch_normalization_56[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_20 (DepthwiseC (None, 16, 16, 768) 6912 swish_56[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_57 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_20[0][0] \n__________________________________________________________________________________________________\nswish_57 (Swish) (None, 16, 16, 768) 0 batch_normalization_57[0][0] \n__________________________________________________________________________________________________\nlambda_20 (Lambda) (None, 1, 1, 768) 0 swish_57[0][0] \n__________________________________________________________________________________________________\nconv2d_76 (Conv2D) (None, 1, 1, 32) 24608 lambda_20[0][0] \n__________________________________________________________________________________________________\nswish_58 (Swish) (None, 1, 1, 32) 0 conv2d_76[0][0] \n__________________________________________________________________________________________________\nconv2d_77 (Conv2D) (None, 1, 1, 768) 25344 swish_58[0][0] \n__________________________________________________________________________________________________\nactivation_20 (Activation) (None, 1, 1, 768) 0 conv2d_77[0][0] \n__________________________________________________________________________________________________\nmultiply_20 (Multiply) (None, 16, 16, 768) 0 activation_20[0][0] \n swish_57[0][0] \n__________________________________________________________________________________________________\nconv2d_78 (Conv2D) (None, 16, 16, 128) 98304 multiply_20[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_58 (BatchNo (None, 16, 16, 128) 512 conv2d_78[0][0] \n__________________________________________________________________________________________________\ndrop_connect_16 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_58[0][0] \n__________________________________________________________________________________________________\nadd_16 (Add) (None, 16, 16, 128) 0 drop_connect_16[0][0] \n add_15[0][0] \n__________________________________________________________________________________________________\nconv2d_79 (Conv2D) (None, 16, 16, 768) 98304 add_16[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_59 (BatchNo (None, 16, 16, 768) 3072 conv2d_79[0][0] \n__________________________________________________________________________________________________\nswish_59 (Swish) (None, 16, 16, 768) 0 batch_normalization_59[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_21 (DepthwiseC (None, 16, 16, 768) 19200 swish_59[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_60 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_21[0][0] \n__________________________________________________________________________________________________\nswish_60 (Swish) (None, 16, 16, 768) 0 batch_normalization_60[0][0] \n__________________________________________________________________________________________________\nlambda_21 (Lambda) (None, 1, 1, 768) 0 swish_60[0][0] \n__________________________________________________________________________________________________\nconv2d_80 (Conv2D) (None, 1, 1, 32) 24608 lambda_21[0][0] \n__________________________________________________________________________________________________\nswish_61 (Swish) (None, 1, 1, 32) 0 conv2d_80[0][0] \n__________________________________________________________________________________________________\nconv2d_81 (Conv2D) (None, 1, 1, 768) 25344 swish_61[0][0] \n__________________________________________________________________________________________________\nactivation_21 (Activation) (None, 1, 1, 768) 0 conv2d_81[0][0] \n__________________________________________________________________________________________________\nmultiply_21 (Multiply) (None, 16, 16, 768) 0 activation_21[0][0] \n swish_60[0][0] \n__________________________________________________________________________________________________\nconv2d_82 (Conv2D) (None, 16, 16, 176) 135168 multiply_21[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_61 (BatchNo (None, 16, 16, 176) 704 conv2d_82[0][0] \n__________________________________________________________________________________________________\nconv2d_83 (Conv2D) (None, 16, 16, 1056) 185856 batch_normalization_61[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_62 (BatchNo (None, 16, 16, 1056) 4224 conv2d_83[0][0] \n__________________________________________________________________________________________________\nswish_62 (Swish) (None, 16, 16, 1056) 0 batch_normalization_62[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_22 (DepthwiseC (None, 16, 16, 1056) 26400 swish_62[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_63 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_22[0][0] \n__________________________________________________________________________________________________\nswish_63 (Swish) (None, 16, 16, 1056) 0 batch_normalization_63[0][0] \n__________________________________________________________________________________________________\nlambda_22 (Lambda) (None, 1, 1, 1056) 0 swish_63[0][0] \n__________________________________________________________________________________________________\nconv2d_84 (Conv2D) (None, 1, 1, 44) 46508 lambda_22[0][0] \n__________________________________________________________________________________________________\nswish_64 (Swish) (None, 1, 1, 44) 0 conv2d_84[0][0] \n__________________________________________________________________________________________________\nconv2d_85 (Conv2D) (None, 1, 1, 1056) 47520 swish_64[0][0] \n__________________________________________________________________________________________________\nactivation_22 (Activation) (None, 1, 1, 1056) 0 conv2d_85[0][0] \n__________________________________________________________________________________________________\nmultiply_22 (Multiply) (None, 16, 16, 1056) 0 activation_22[0][0] \n swish_63[0][0] \n__________________________________________________________________________________________________\nconv2d_86 (Conv2D) (None, 16, 16, 176) 185856 multiply_22[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_64 (BatchNo (None, 16, 16, 176) 704 conv2d_86[0][0] \n__________________________________________________________________________________________________\ndrop_connect_17 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_64[0][0] \n__________________________________________________________________________________________________\nadd_17 (Add) (None, 16, 16, 176) 0 drop_connect_17[0][0] \n batch_normalization_61[0][0] \n__________________________________________________________________________________________________\nconv2d_87 (Conv2D) (None, 16, 16, 1056) 185856 add_17[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_65 (BatchNo (None, 16, 16, 1056) 4224 conv2d_87[0][0] \n__________________________________________________________________________________________________\nswish_65 (Swish) (None, 16, 16, 1056) 0 batch_normalization_65[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_23 (DepthwiseC (None, 16, 16, 1056) 26400 swish_65[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_66 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_23[0][0] \n__________________________________________________________________________________________________\nswish_66 (Swish) (None, 16, 16, 1056) 0 batch_normalization_66[0][0] \n__________________________________________________________________________________________________\nlambda_23 (Lambda) (None, 1, 1, 1056) 0 swish_66[0][0] \n__________________________________________________________________________________________________\nconv2d_88 (Conv2D) (None, 1, 1, 44) 46508 lambda_23[0][0] \n__________________________________________________________________________________________________\nswish_67 (Swish) (None, 1, 1, 44) 0 conv2d_88[0][0] \n__________________________________________________________________________________________________\nconv2d_89 (Conv2D) (None, 1, 1, 1056) 47520 swish_67[0][0] \n__________________________________________________________________________________________________\nactivation_23 (Activation) (None, 1, 1, 1056) 0 conv2d_89[0][0] \n__________________________________________________________________________________________________\nmultiply_23 (Multiply) (None, 16, 16, 1056) 0 activation_23[0][0] \n swish_66[0][0] \n__________________________________________________________________________________________________\nconv2d_90 (Conv2D) (None, 16, 16, 176) 185856 multiply_23[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_67 (BatchNo (None, 16, 16, 176) 704 conv2d_90[0][0] \n__________________________________________________________________________________________________\ndrop_connect_18 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_67[0][0] \n__________________________________________________________________________________________________\nadd_18 (Add) (None, 16, 16, 176) 0 drop_connect_18[0][0] \n add_17[0][0] \n__________________________________________________________________________________________________\nconv2d_91 (Conv2D) (None, 16, 16, 1056) 185856 add_18[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_68 (BatchNo (None, 16, 16, 1056) 4224 conv2d_91[0][0] \n__________________________________________________________________________________________________\nswish_68 (Swish) (None, 16, 16, 1056) 0 batch_normalization_68[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_24 (DepthwiseC (None, 16, 16, 1056) 26400 swish_68[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_69 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_24[0][0] \n__________________________________________________________________________________________________\nswish_69 (Swish) (None, 16, 16, 1056) 0 batch_normalization_69[0][0] \n__________________________________________________________________________________________________\nlambda_24 (Lambda) (None, 1, 1, 1056) 0 swish_69[0][0] \n__________________________________________________________________________________________________\nconv2d_92 (Conv2D) (None, 1, 1, 44) 46508 lambda_24[0][0] \n__________________________________________________________________________________________________\nswish_70 (Swish) (None, 1, 1, 44) 0 conv2d_92[0][0] \n__________________________________________________________________________________________________\nconv2d_93 (Conv2D) (None, 1, 1, 1056) 47520 swish_70[0][0] \n__________________________________________________________________________________________________\nactivation_24 (Activation) (None, 1, 1, 1056) 0 conv2d_93[0][0] \n__________________________________________________________________________________________________\nmultiply_24 (Multiply) (None, 16, 16, 1056) 0 activation_24[0][0] \n swish_69[0][0] \n__________________________________________________________________________________________________\nconv2d_94 (Conv2D) (None, 16, 16, 176) 185856 multiply_24[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_70 (BatchNo (None, 16, 16, 176) 704 conv2d_94[0][0] \n__________________________________________________________________________________________________\ndrop_connect_19 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_70[0][0] \n__________________________________________________________________________________________________\nadd_19 (Add) (None, 16, 16, 176) 0 drop_connect_19[0][0] \n add_18[0][0] \n__________________________________________________________________________________________________\nconv2d_95 (Conv2D) (None, 16, 16, 1056) 185856 add_19[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_71 (BatchNo (None, 16, 16, 1056) 4224 conv2d_95[0][0] \n__________________________________________________________________________________________________\nswish_71 (Swish) (None, 16, 16, 1056) 0 batch_normalization_71[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_25 (DepthwiseC (None, 16, 16, 1056) 26400 swish_71[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_72 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_25[0][0] \n__________________________________________________________________________________________________\nswish_72 (Swish) (None, 16, 16, 1056) 0 batch_normalization_72[0][0] \n__________________________________________________________________________________________________\nlambda_25 (Lambda) (None, 1, 1, 1056) 0 swish_72[0][0] \n__________________________________________________________________________________________________\nconv2d_96 (Conv2D) (None, 1, 1, 44) 46508 lambda_25[0][0] \n__________________________________________________________________________________________________\nswish_73 (Swish) (None, 1, 1, 44) 0 conv2d_96[0][0] \n__________________________________________________________________________________________________\nconv2d_97 (Conv2D) (None, 1, 1, 1056) 47520 swish_73[0][0] \n__________________________________________________________________________________________________\nactivation_25 (Activation) (None, 1, 1, 1056) 0 conv2d_97[0][0] \n__________________________________________________________________________________________________\nmultiply_25 (Multiply) (None, 16, 16, 1056) 0 activation_25[0][0] \n swish_72[0][0] \n__________________________________________________________________________________________________\nconv2d_98 (Conv2D) (None, 16, 16, 176) 185856 multiply_25[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_73 (BatchNo (None, 16, 16, 176) 704 conv2d_98[0][0] \n__________________________________________________________________________________________________\ndrop_connect_20 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_73[0][0] \n__________________________________________________________________________________________________\nadd_20 (Add) (None, 16, 16, 176) 0 drop_connect_20[0][0] \n add_19[0][0] \n__________________________________________________________________________________________________\nconv2d_99 (Conv2D) (None, 16, 16, 1056) 185856 add_20[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_74 (BatchNo (None, 16, 16, 1056) 4224 conv2d_99[0][0] \n__________________________________________________________________________________________________\nswish_74 (Swish) (None, 16, 16, 1056) 0 batch_normalization_74[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_26 (DepthwiseC (None, 16, 16, 1056) 26400 swish_74[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_75 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_26[0][0] \n__________________________________________________________________________________________________\nswish_75 (Swish) (None, 16, 16, 1056) 0 batch_normalization_75[0][0] \n__________________________________________________________________________________________________\nlambda_26 (Lambda) (None, 1, 1, 1056) 0 swish_75[0][0] \n__________________________________________________________________________________________________\nconv2d_100 (Conv2D) (None, 1, 1, 44) 46508 lambda_26[0][0] \n__________________________________________________________________________________________________\nswish_76 (Swish) (None, 1, 1, 44) 0 conv2d_100[0][0] \n__________________________________________________________________________________________________\nconv2d_101 (Conv2D) (None, 1, 1, 1056) 47520 swish_76[0][0] \n__________________________________________________________________________________________________\nactivation_26 (Activation) (None, 1, 1, 1056) 0 conv2d_101[0][0] \n__________________________________________________________________________________________________\nmultiply_26 (Multiply) (None, 16, 16, 1056) 0 activation_26[0][0] \n swish_75[0][0] \n__________________________________________________________________________________________________\nconv2d_102 (Conv2D) (None, 16, 16, 176) 185856 multiply_26[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_76 (BatchNo (None, 16, 16, 176) 704 conv2d_102[0][0] \n__________________________________________________________________________________________________\ndrop_connect_21 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_76[0][0] \n__________________________________________________________________________________________________\nadd_21 (Add) (None, 16, 16, 176) 0 drop_connect_21[0][0] \n add_20[0][0] \n__________________________________________________________________________________________________\nconv2d_103 (Conv2D) (None, 16, 16, 1056) 185856 add_21[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_77 (BatchNo (None, 16, 16, 1056) 4224 conv2d_103[0][0] \n__________________________________________________________________________________________________\nswish_77 (Swish) (None, 16, 16, 1056) 0 batch_normalization_77[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_27 (DepthwiseC (None, 16, 16, 1056) 26400 swish_77[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_78 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_27[0][0] \n__________________________________________________________________________________________________\nswish_78 (Swish) (None, 16, 16, 1056) 0 batch_normalization_78[0][0] \n__________________________________________________________________________________________________\nlambda_27 (Lambda) (None, 1, 1, 1056) 0 swish_78[0][0] \n__________________________________________________________________________________________________\nconv2d_104 (Conv2D) (None, 1, 1, 44) 46508 lambda_27[0][0] \n__________________________________________________________________________________________________\nswish_79 (Swish) (None, 1, 1, 44) 0 conv2d_104[0][0] \n__________________________________________________________________________________________________\nconv2d_105 (Conv2D) (None, 1, 1, 1056) 47520 swish_79[0][0] \n__________________________________________________________________________________________________\nactivation_27 (Activation) (None, 1, 1, 1056) 0 conv2d_105[0][0] \n__________________________________________________________________________________________________\nmultiply_27 (Multiply) (None, 16, 16, 1056) 0 activation_27[0][0] \n swish_78[0][0] \n__________________________________________________________________________________________________\nconv2d_106 (Conv2D) (None, 16, 16, 176) 185856 multiply_27[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_79 (BatchNo (None, 16, 16, 176) 704 conv2d_106[0][0] \n__________________________________________________________________________________________________\ndrop_connect_22 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_79[0][0] \n__________________________________________________________________________________________________\nadd_22 (Add) (None, 16, 16, 176) 0 drop_connect_22[0][0] \n add_21[0][0] \n__________________________________________________________________________________________________\nconv2d_107 (Conv2D) (None, 16, 16, 1056) 185856 add_22[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_80 (BatchNo (None, 16, 16, 1056) 4224 conv2d_107[0][0] \n__________________________________________________________________________________________________\nswish_80 (Swish) (None, 16, 16, 1056) 0 batch_normalization_80[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_28 (DepthwiseC (None, 8, 8, 1056) 26400 swish_80[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_81 (BatchNo (None, 8, 8, 1056) 4224 depthwise_conv2d_28[0][0] \n__________________________________________________________________________________________________\nswish_81 (Swish) (None, 8, 8, 1056) 0 batch_normalization_81[0][0] \n__________________________________________________________________________________________________\nlambda_28 (Lambda) (None, 1, 1, 1056) 0 swish_81[0][0] \n__________________________________________________________________________________________________\nconv2d_108 (Conv2D) (None, 1, 1, 44) 46508 lambda_28[0][0] \n__________________________________________________________________________________________________\nswish_82 (Swish) (None, 1, 1, 44) 0 conv2d_108[0][0] \n__________________________________________________________________________________________________\nconv2d_109 (Conv2D) (None, 1, 1, 1056) 47520 swish_82[0][0] \n__________________________________________________________________________________________________\nactivation_28 (Activation) (None, 1, 1, 1056) 0 conv2d_109[0][0] \n__________________________________________________________________________________________________\nmultiply_28 (Multiply) (None, 8, 8, 1056) 0 activation_28[0][0] \n swish_81[0][0] \n__________________________________________________________________________________________________\nconv2d_110 (Conv2D) (None, 8, 8, 304) 321024 multiply_28[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_82 (BatchNo (None, 8, 8, 304) 1216 conv2d_110[0][0] \n__________________________________________________________________________________________________\nconv2d_111 (Conv2D) (None, 8, 8, 1824) 554496 batch_normalization_82[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_83 (BatchNo (None, 8, 8, 1824) 7296 conv2d_111[0][0] \n__________________________________________________________________________________________________\nswish_83 (Swish) (None, 8, 8, 1824) 0 batch_normalization_83[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_29 (DepthwiseC (None, 8, 8, 1824) 45600 swish_83[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_84 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_29[0][0] \n__________________________________________________________________________________________________\nswish_84 (Swish) (None, 8, 8, 1824) 0 batch_normalization_84[0][0] \n__________________________________________________________________________________________________\nlambda_29 (Lambda) (None, 1, 1, 1824) 0 swish_84[0][0] \n__________________________________________________________________________________________________\nconv2d_112 (Conv2D) (None, 1, 1, 76) 138700 lambda_29[0][0] \n__________________________________________________________________________________________________\nswish_85 (Swish) (None, 1, 1, 76) 0 conv2d_112[0][0] \n__________________________________________________________________________________________________\nconv2d_113 (Conv2D) (None, 1, 1, 1824) 140448 swish_85[0][0] \n__________________________________________________________________________________________________\nactivation_29 (Activation) (None, 1, 1, 1824) 0 conv2d_113[0][0] \n__________________________________________________________________________________________________\nmultiply_29 (Multiply) (None, 8, 8, 1824) 0 activation_29[0][0] \n swish_84[0][0] \n__________________________________________________________________________________________________\nconv2d_114 (Conv2D) (None, 8, 8, 304) 554496 multiply_29[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_85 (BatchNo (None, 8, 8, 304) 1216 conv2d_114[0][0] \n__________________________________________________________________________________________________\ndrop_connect_23 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_85[0][0] \n__________________________________________________________________________________________________\nadd_23 (Add) (None, 8, 8, 304) 0 drop_connect_23[0][0] \n batch_normalization_82[0][0] \n__________________________________________________________________________________________________\nconv2d_115 (Conv2D) (None, 8, 8, 1824) 554496 add_23[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_86 (BatchNo (None, 8, 8, 1824) 7296 conv2d_115[0][0] \n__________________________________________________________________________________________________\nswish_86 (Swish) (None, 8, 8, 1824) 0 batch_normalization_86[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_30 (DepthwiseC (None, 8, 8, 1824) 45600 swish_86[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_87 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_30[0][0] \n__________________________________________________________________________________________________\nswish_87 (Swish) (None, 8, 8, 1824) 0 batch_normalization_87[0][0] \n__________________________________________________________________________________________________\nlambda_30 (Lambda) (None, 1, 1, 1824) 0 swish_87[0][0] \n__________________________________________________________________________________________________\nconv2d_116 (Conv2D) (None, 1, 1, 76) 138700 lambda_30[0][0] \n__________________________________________________________________________________________________\nswish_88 (Swish) (None, 1, 1, 76) 0 conv2d_116[0][0] \n__________________________________________________________________________________________________\nconv2d_117 (Conv2D) (None, 1, 1, 1824) 140448 swish_88[0][0] \n__________________________________________________________________________________________________\nactivation_30 (Activation) (None, 1, 1, 1824) 0 conv2d_117[0][0] \n__________________________________________________________________________________________________\nmultiply_30 (Multiply) (None, 8, 8, 1824) 0 activation_30[0][0] \n swish_87[0][0] \n__________________________________________________________________________________________________\nconv2d_118 (Conv2D) (None, 8, 8, 304) 554496 multiply_30[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_88 (BatchNo (None, 8, 8, 304) 1216 conv2d_118[0][0] \n__________________________________________________________________________________________________\ndrop_connect_24 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_88[0][0] \n__________________________________________________________________________________________________\nadd_24 (Add) (None, 8, 8, 304) 0 drop_connect_24[0][0] \n add_23[0][0] \n__________________________________________________________________________________________________\nconv2d_119 (Conv2D) (None, 8, 8, 1824) 554496 add_24[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_89 (BatchNo (None, 8, 8, 1824) 7296 conv2d_119[0][0] \n__________________________________________________________________________________________________\nswish_89 (Swish) (None, 8, 8, 1824) 0 batch_normalization_89[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_31 (DepthwiseC (None, 8, 8, 1824) 45600 swish_89[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_90 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_31[0][0] \n__________________________________________________________________________________________________\nswish_90 (Swish) (None, 8, 8, 1824) 0 batch_normalization_90[0][0] \n__________________________________________________________________________________________________\nlambda_31 (Lambda) (None, 1, 1, 1824) 0 swish_90[0][0] \n__________________________________________________________________________________________________\nconv2d_120 (Conv2D) (None, 1, 1, 76) 138700 lambda_31[0][0] \n__________________________________________________________________________________________________\nswish_91 (Swish) (None, 1, 1, 76) 0 conv2d_120[0][0] \n__________________________________________________________________________________________________\nconv2d_121 (Conv2D) (None, 1, 1, 1824) 140448 swish_91[0][0] \n__________________________________________________________________________________________________\nactivation_31 (Activation) (None, 1, 1, 1824) 0 conv2d_121[0][0] \n__________________________________________________________________________________________________\nmultiply_31 (Multiply) (None, 8, 8, 1824) 0 activation_31[0][0] \n swish_90[0][0] \n__________________________________________________________________________________________________\nconv2d_122 (Conv2D) (None, 8, 8, 304) 554496 multiply_31[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_91 (BatchNo (None, 8, 8, 304) 1216 conv2d_122[0][0] \n__________________________________________________________________________________________________\ndrop_connect_25 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_91[0][0] \n__________________________________________________________________________________________________\nadd_25 (Add) (None, 8, 8, 304) 0 drop_connect_25[0][0] \n add_24[0][0] \n__________________________________________________________________________________________________\nconv2d_123 (Conv2D) (None, 8, 8, 1824) 554496 add_25[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_92 (BatchNo (None, 8, 8, 1824) 7296 conv2d_123[0][0] \n__________________________________________________________________________________________________\nswish_92 (Swish) (None, 8, 8, 1824) 0 batch_normalization_92[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_32 (DepthwiseC (None, 8, 8, 1824) 45600 swish_92[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_93 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_32[0][0] \n__________________________________________________________________________________________________\nswish_93 (Swish) (None, 8, 8, 1824) 0 batch_normalization_93[0][0] \n__________________________________________________________________________________________________\nlambda_32 (Lambda) (None, 1, 1, 1824) 0 swish_93[0][0] \n__________________________________________________________________________________________________\nconv2d_124 (Conv2D) (None, 1, 1, 76) 138700 lambda_32[0][0] \n__________________________________________________________________________________________________\nswish_94 (Swish) (None, 1, 1, 76) 0 conv2d_124[0][0] \n__________________________________________________________________________________________________\nconv2d_125 (Conv2D) (None, 1, 1, 1824) 140448 swish_94[0][0] \n__________________________________________________________________________________________________\nactivation_32 (Activation) (None, 1, 1, 1824) 0 conv2d_125[0][0] \n__________________________________________________________________________________________________\nmultiply_32 (Multiply) (None, 8, 8, 1824) 0 activation_32[0][0] \n swish_93[0][0] \n__________________________________________________________________________________________________\nconv2d_126 (Conv2D) (None, 8, 8, 304) 554496 multiply_32[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_94 (BatchNo (None, 8, 8, 304) 1216 conv2d_126[0][0] \n__________________________________________________________________________________________________\ndrop_connect_26 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_94[0][0] \n__________________________________________________________________________________________________\nadd_26 (Add) (None, 8, 8, 304) 0 drop_connect_26[0][0] \n add_25[0][0] \n__________________________________________________________________________________________________\nconv2d_127 (Conv2D) (None, 8, 8, 1824) 554496 add_26[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_95 (BatchNo (None, 8, 8, 1824) 7296 conv2d_127[0][0] \n__________________________________________________________________________________________________\nswish_95 (Swish) (None, 8, 8, 1824) 0 batch_normalization_95[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_33 (DepthwiseC (None, 8, 8, 1824) 45600 swish_95[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_96 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_33[0][0] \n__________________________________________________________________________________________________\nswish_96 (Swish) (None, 8, 8, 1824) 0 batch_normalization_96[0][0] \n__________________________________________________________________________________________________\nlambda_33 (Lambda) (None, 1, 1, 1824) 0 swish_96[0][0] \n__________________________________________________________________________________________________\nconv2d_128 (Conv2D) (None, 1, 1, 76) 138700 lambda_33[0][0] \n__________________________________________________________________________________________________\nswish_97 (Swish) (None, 1, 1, 76) 0 conv2d_128[0][0] \n__________________________________________________________________________________________________\nconv2d_129 (Conv2D) (None, 1, 1, 1824) 140448 swish_97[0][0] \n__________________________________________________________________________________________________\nactivation_33 (Activation) (None, 1, 1, 1824) 0 conv2d_129[0][0] \n__________________________________________________________________________________________________\nmultiply_33 (Multiply) (None, 8, 8, 1824) 0 activation_33[0][0] \n swish_96[0][0] \n__________________________________________________________________________________________________\nconv2d_130 (Conv2D) (None, 8, 8, 304) 554496 multiply_33[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_97 (BatchNo (None, 8, 8, 304) 1216 conv2d_130[0][0] \n__________________________________________________________________________________________________\ndrop_connect_27 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_97[0][0] \n__________________________________________________________________________________________________\nadd_27 (Add) (None, 8, 8, 304) 0 drop_connect_27[0][0] \n add_26[0][0] \n__________________________________________________________________________________________________\nconv2d_131 (Conv2D) (None, 8, 8, 1824) 554496 add_27[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_98 (BatchNo (None, 8, 8, 1824) 7296 conv2d_131[0][0] \n__________________________________________________________________________________________________\nswish_98 (Swish) (None, 8, 8, 1824) 0 batch_normalization_98[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_34 (DepthwiseC (None, 8, 8, 1824) 45600 swish_98[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_99 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_34[0][0] \n__________________________________________________________________________________________________\nswish_99 (Swish) (None, 8, 8, 1824) 0 batch_normalization_99[0][0] \n__________________________________________________________________________________________________\nlambda_34 (Lambda) (None, 1, 1, 1824) 0 swish_99[0][0] \n__________________________________________________________________________________________________\nconv2d_132 (Conv2D) (None, 1, 1, 76) 138700 lambda_34[0][0] \n__________________________________________________________________________________________________\nswish_100 (Swish) (None, 1, 1, 76) 0 conv2d_132[0][0] \n__________________________________________________________________________________________________\nconv2d_133 (Conv2D) (None, 1, 1, 1824) 140448 swish_100[0][0] \n__________________________________________________________________________________________________\nactivation_34 (Activation) (None, 1, 1, 1824) 0 conv2d_133[0][0] \n__________________________________________________________________________________________________\nmultiply_34 (Multiply) (None, 8, 8, 1824) 0 activation_34[0][0] \n swish_99[0][0] \n__________________________________________________________________________________________________\nconv2d_134 (Conv2D) (None, 8, 8, 304) 554496 multiply_34[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_100 (BatchN (None, 8, 8, 304) 1216 conv2d_134[0][0] \n__________________________________________________________________________________________________\ndrop_connect_28 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_100[0][0] \n__________________________________________________________________________________________________\nadd_28 (Add) (None, 8, 8, 304) 0 drop_connect_28[0][0] \n add_27[0][0] \n__________________________________________________________________________________________________\nconv2d_135 (Conv2D) (None, 8, 8, 1824) 554496 add_28[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_101 (BatchN (None, 8, 8, 1824) 7296 conv2d_135[0][0] \n__________________________________________________________________________________________________\nswish_101 (Swish) (None, 8, 8, 1824) 0 batch_normalization_101[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_35 (DepthwiseC (None, 8, 8, 1824) 45600 swish_101[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_102 (BatchN (None, 8, 8, 1824) 7296 depthwise_conv2d_35[0][0] \n__________________________________________________________________________________________________\nswish_102 (Swish) (None, 8, 8, 1824) 0 batch_normalization_102[0][0] \n__________________________________________________________________________________________________\nlambda_35 (Lambda) (None, 1, 1, 1824) 0 swish_102[0][0] \n__________________________________________________________________________________________________\nconv2d_136 (Conv2D) (None, 1, 1, 76) 138700 lambda_35[0][0] \n__________________________________________________________________________________________________\nswish_103 (Swish) (None, 1, 1, 76) 0 conv2d_136[0][0] \n__________________________________________________________________________________________________\nconv2d_137 (Conv2D) (None, 1, 1, 1824) 140448 swish_103[0][0] \n__________________________________________________________________________________________________\nactivation_35 (Activation) (None, 1, 1, 1824) 0 conv2d_137[0][0] \n__________________________________________________________________________________________________\nmultiply_35 (Multiply) (None, 8, 8, 1824) 0 activation_35[0][0] \n swish_102[0][0] \n__________________________________________________________________________________________________\nconv2d_138 (Conv2D) (None, 8, 8, 304) 554496 multiply_35[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_103 (BatchN (None, 8, 8, 304) 1216 conv2d_138[0][0] \n__________________________________________________________________________________________________\ndrop_connect_29 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_103[0][0] \n__________________________________________________________________________________________________\nadd_29 (Add) (None, 8, 8, 304) 0 drop_connect_29[0][0] \n add_28[0][0] \n__________________________________________________________________________________________________\nconv2d_139 (Conv2D) (None, 8, 8, 1824) 554496 add_29[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_104 (BatchN (None, 8, 8, 1824) 7296 conv2d_139[0][0] \n__________________________________________________________________________________________________\nswish_104 (Swish) (None, 8, 8, 1824) 0 batch_normalization_104[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_36 (DepthwiseC (None, 8, 8, 1824) 45600 swish_104[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_105 (BatchN (None, 8, 8, 1824) 7296 depthwise_conv2d_36[0][0] \n__________________________________________________________________________________________________\nswish_105 (Swish) (None, 8, 8, 1824) 0 batch_normalization_105[0][0] \n__________________________________________________________________________________________________\nlambda_36 (Lambda) (None, 1, 1, 1824) 0 swish_105[0][0] \n__________________________________________________________________________________________________\nconv2d_140 (Conv2D) (None, 1, 1, 76) 138700 lambda_36[0][0] \n__________________________________________________________________________________________________\nswish_106 (Swish) (None, 1, 1, 76) 0 conv2d_140[0][0] \n__________________________________________________________________________________________________\nconv2d_141 (Conv2D) (None, 1, 1, 1824) 140448 swish_106[0][0] \n__________________________________________________________________________________________________\nactivation_36 (Activation) (None, 1, 1, 1824) 0 conv2d_141[0][0] \n__________________________________________________________________________________________________\nmultiply_36 (Multiply) (None, 8, 8, 1824) 0 activation_36[0][0] \n swish_105[0][0] \n__________________________________________________________________________________________________\nconv2d_142 (Conv2D) (None, 8, 8, 304) 554496 multiply_36[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_106 (BatchN (None, 8, 8, 304) 1216 conv2d_142[0][0] \n__________________________________________________________________________________________________\ndrop_connect_30 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_106[0][0] \n__________________________________________________________________________________________________\nadd_30 (Add) (None, 8, 8, 304) 0 drop_connect_30[0][0] \n add_29[0][0] \n__________________________________________________________________________________________________\nconv2d_143 (Conv2D) (None, 8, 8, 1824) 554496 add_30[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_107 (BatchN (None, 8, 8, 1824) 7296 conv2d_143[0][0] \n__________________________________________________________________________________________________\nswish_107 (Swish) (None, 8, 8, 1824) 0 batch_normalization_107[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_37 (DepthwiseC (None, 8, 8, 1824) 16416 swish_107[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_108 (BatchN (None, 8, 8, 1824) 7296 depthwise_conv2d_37[0][0] \n__________________________________________________________________________________________________\nswish_108 (Swish) (None, 8, 8, 1824) 0 batch_normalization_108[0][0] \n__________________________________________________________________________________________________\nlambda_37 (Lambda) (None, 1, 1, 1824) 0 swish_108[0][0] \n__________________________________________________________________________________________________\nconv2d_144 (Conv2D) (None, 1, 1, 76) 138700 lambda_37[0][0] \n__________________________________________________________________________________________________\nswish_109 (Swish) (None, 1, 1, 76) 0 conv2d_144[0][0] \n__________________________________________________________________________________________________\nconv2d_145 (Conv2D) (None, 1, 1, 1824) 140448 swish_109[0][0] \n__________________________________________________________________________________________________\nactivation_37 (Activation) (None, 1, 1, 1824) 0 conv2d_145[0][0] \n__________________________________________________________________________________________________\nmultiply_37 (Multiply) (None, 8, 8, 1824) 0 activation_37[0][0] \n swish_108[0][0] \n__________________________________________________________________________________________________\nconv2d_146 (Conv2D) (None, 8, 8, 512) 933888 multiply_37[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_109 (BatchN (None, 8, 8, 512) 2048 conv2d_146[0][0] \n__________________________________________________________________________________________________\nconv2d_147 (Conv2D) (None, 8, 8, 3072) 1572864 batch_normalization_109[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_110 (BatchN (None, 8, 8, 3072) 12288 conv2d_147[0][0] \n__________________________________________________________________________________________________\nswish_110 (Swish) (None, 8, 8, 3072) 0 batch_normalization_110[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_38 (DepthwiseC (None, 8, 8, 3072) 27648 swish_110[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_111 (BatchN (None, 8, 8, 3072) 12288 depthwise_conv2d_38[0][0] \n__________________________________________________________________________________________________\nswish_111 (Swish) (None, 8, 8, 3072) 0 batch_normalization_111[0][0] \n__________________________________________________________________________________________________\nlambda_38 (Lambda) (None, 1, 1, 3072) 0 swish_111[0][0] \n__________________________________________________________________________________________________\nconv2d_148 (Conv2D) (None, 1, 1, 128) 393344 lambda_38[0][0] \n__________________________________________________________________________________________________\nswish_112 (Swish) (None, 1, 1, 128) 0 conv2d_148[0][0] \n__________________________________________________________________________________________________\nconv2d_149 (Conv2D) (None, 1, 1, 3072) 396288 swish_112[0][0] \n__________________________________________________________________________________________________\nactivation_38 (Activation) (None, 1, 1, 3072) 0 conv2d_149[0][0] \n__________________________________________________________________________________________________\nmultiply_38 (Multiply) (None, 8, 8, 3072) 0 activation_38[0][0] \n swish_111[0][0] \n__________________________________________________________________________________________________\nconv2d_150 (Conv2D) (None, 8, 8, 512) 1572864 multiply_38[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_112 (BatchN (None, 8, 8, 512) 2048 conv2d_150[0][0] \n__________________________________________________________________________________________________\ndrop_connect_31 (DropConnect) (None, 8, 8, 512) 0 batch_normalization_112[0][0] \n__________________________________________________________________________________________________\nadd_31 (Add) (None, 8, 8, 512) 0 drop_connect_31[0][0] \n batch_normalization_109[0][0] \n__________________________________________________________________________________________________\nconv2d_151 (Conv2D) (None, 8, 8, 3072) 1572864 add_31[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_113 (BatchN (None, 8, 8, 3072) 12288 conv2d_151[0][0] \n__________________________________________________________________________________________________\nswish_113 (Swish) (None, 8, 8, 3072) 0 batch_normalization_113[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_39 (DepthwiseC (None, 8, 8, 3072) 27648 swish_113[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_114 (BatchN (None, 8, 8, 3072) 12288 depthwise_conv2d_39[0][0] \n__________________________________________________________________________________________________\nswish_114 (Swish) (None, 8, 8, 3072) 0 batch_normalization_114[0][0] \n__________________________________________________________________________________________________\nlambda_39 (Lambda) (None, 1, 1, 3072) 0 swish_114[0][0] \n__________________________________________________________________________________________________\nconv2d_152 (Conv2D) (None, 1, 1, 128) 393344 lambda_39[0][0] \n__________________________________________________________________________________________________\nswish_115 (Swish) (None, 1, 1, 128) 0 conv2d_152[0][0] \n__________________________________________________________________________________________________\nconv2d_153 (Conv2D) (None, 1, 1, 3072) 396288 swish_115[0][0] \n__________________________________________________________________________________________________\nactivation_39 (Activation) (None, 1, 1, 3072) 0 conv2d_153[0][0] \n__________________________________________________________________________________________________\nmultiply_39 (Multiply) (None, 8, 8, 3072) 0 activation_39[0][0] \n swish_114[0][0] \n__________________________________________________________________________________________________\nconv2d_154 (Conv2D) (None, 8, 8, 512) 1572864 multiply_39[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_115 (BatchN (None, 8, 8, 512) 2048 conv2d_154[0][0] \n__________________________________________________________________________________________________\ndrop_connect_32 (DropConnect) (None, 8, 8, 512) 0 batch_normalization_115[0][0] \n__________________________________________________________________________________________________\nadd_32 (Add) (None, 8, 8, 512) 0 drop_connect_32[0][0] \n add_31[0][0] \n__________________________________________________________________________________________________\nconv2d_155 (Conv2D) (None, 8, 8, 2048) 1048576 add_32[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_116 (BatchN (None, 8, 8, 2048) 8192 conv2d_155[0][0] \n__________________________________________________________________________________________________\nswish_116 (Swish) (None, 8, 8, 2048) 0 batch_normalization_116[0][0] \n__________________________________________________________________________________________________\nglobal_average_pooling2d_1 (Glo (None, 2048) 0 swish_116[0][0] \n__________________________________________________________________________________________________\nfinal_output (Dense) (None, 1) 2049 global_average_pooling2d_1[0][0] \n==================================================================================================\nTotal params: 28,515,569\nTrainable params: 2,049\nNon-trainable params: 28,513,520\n__________________________________________________________________________________________________\n" ], [ "STEP_SIZE_TRAIN = train_generator.n//train_generator.batch_size\nSTEP_SIZE_VALID = valid_generator.n//valid_generator.batch_size\n\nhistory_warmup = model.fit_generator(generator=train_generator,\n steps_per_epoch=STEP_SIZE_TRAIN,\n validation_data=valid_generator,\n validation_steps=STEP_SIZE_VALID,\n epochs=WARMUP_EPOCHS,\n callbacks=callback_list,\n verbose=2).history", "Epoch 1/3\n - 70s - loss: 0.6403 - acc: 0.5775 - val_loss: 0.6992 - val_acc: 0.5875\nEpoch 2/3\n - 57s - loss: 0.6196 - acc: 0.6021 - val_loss: 0.7086 - val_acc: 0.6053\nEpoch 3/3\n - 58s - loss: 0.5809 - acc: 0.6294 - val_loss: 0.6139 - val_acc: 0.6444\n" ] ], [ [ "# Fine-tune the complete model", "_____no_output_____" ] ], [ [ "for layer in model.layers:\n layer.trainable = True\n\nes = EarlyStopping(monitor='val_loss', mode='min', patience=ES_PATIENCE, restore_best_weights=True, verbose=1)\ncosine_lr_2nd = WarmUpCosineDecayScheduler(learning_rate_base=LEARNING_RATE,\n total_steps=TOTAL_STEPS_2nd,\n warmup_learning_rate=0.0,\n warmup_steps=WARMUP_STEPS_2nd,\n hold_base_rate_steps=(2 * STEP_SIZE))\n\ncallback_list = [es, cosine_lr_2nd]\noptimizer = optimizers.Adam(lr=LEARNING_RATE)\nmodel.compile(optimizer=optimizer, loss='mean_squared_error', metrics=metric_list)\nmodel.summary()", "__________________________________________________________________________________________________\nLayer (type) Output Shape Param # Connected to \n==================================================================================================\ninput_1 (InputLayer) (None, 256, 256, 3) 0 \n__________________________________________________________________________________________________\nconv2d_1 (Conv2D) (None, 128, 128, 48) 1296 input_1[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_1 (BatchNor (None, 128, 128, 48) 192 conv2d_1[0][0] \n__________________________________________________________________________________________________\nswish_1 (Swish) (None, 128, 128, 48) 0 batch_normalization_1[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_1 (DepthwiseCo (None, 128, 128, 48) 432 swish_1[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_2 (BatchNor (None, 128, 128, 48) 192 depthwise_conv2d_1[0][0] \n__________________________________________________________________________________________________\nswish_2 (Swish) (None, 128, 128, 48) 0 batch_normalization_2[0][0] \n__________________________________________________________________________________________________\nlambda_1 (Lambda) (None, 1, 1, 48) 0 swish_2[0][0] \n__________________________________________________________________________________________________\nconv2d_2 (Conv2D) (None, 1, 1, 12) 588 lambda_1[0][0] \n__________________________________________________________________________________________________\nswish_3 (Swish) (None, 1, 1, 12) 0 conv2d_2[0][0] \n__________________________________________________________________________________________________\nconv2d_3 (Conv2D) (None, 1, 1, 48) 624 swish_3[0][0] \n__________________________________________________________________________________________________\nactivation_1 (Activation) (None, 1, 1, 48) 0 conv2d_3[0][0] \n__________________________________________________________________________________________________\nmultiply_1 (Multiply) (None, 128, 128, 48) 0 activation_1[0][0] \n swish_2[0][0] \n__________________________________________________________________________________________________\nconv2d_4 (Conv2D) (None, 128, 128, 24) 1152 multiply_1[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_3 (BatchNor (None, 128, 128, 24) 96 conv2d_4[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_2 (DepthwiseCo (None, 128, 128, 24) 216 batch_normalization_3[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_4 (BatchNor (None, 128, 128, 24) 96 depthwise_conv2d_2[0][0] \n__________________________________________________________________________________________________\nswish_4 (Swish) (None, 128, 128, 24) 0 batch_normalization_4[0][0] \n__________________________________________________________________________________________________\nlambda_2 (Lambda) (None, 1, 1, 24) 0 swish_4[0][0] \n__________________________________________________________________________________________________\nconv2d_5 (Conv2D) (None, 1, 1, 6) 150 lambda_2[0][0] \n__________________________________________________________________________________________________\nswish_5 (Swish) (None, 1, 1, 6) 0 conv2d_5[0][0] \n__________________________________________________________________________________________________\nconv2d_6 (Conv2D) (None, 1, 1, 24) 168 swish_5[0][0] \n__________________________________________________________________________________________________\nactivation_2 (Activation) (None, 1, 1, 24) 0 conv2d_6[0][0] \n__________________________________________________________________________________________________\nmultiply_2 (Multiply) (None, 128, 128, 24) 0 activation_2[0][0] \n swish_4[0][0] \n__________________________________________________________________________________________________\nconv2d_7 (Conv2D) (None, 128, 128, 24) 576 multiply_2[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_5 (BatchNor (None, 128, 128, 24) 96 conv2d_7[0][0] \n__________________________________________________________________________________________________\ndrop_connect_1 (DropConnect) (None, 128, 128, 24) 0 batch_normalization_5[0][0] \n__________________________________________________________________________________________________\nadd_1 (Add) (None, 128, 128, 24) 0 drop_connect_1[0][0] \n batch_normalization_3[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_3 (DepthwiseCo (None, 128, 128, 24) 216 add_1[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_6 (BatchNor (None, 128, 128, 24) 96 depthwise_conv2d_3[0][0] \n__________________________________________________________________________________________________\nswish_6 (Swish) (None, 128, 128, 24) 0 batch_normalization_6[0][0] \n__________________________________________________________________________________________________\nlambda_3 (Lambda) (None, 1, 1, 24) 0 swish_6[0][0] \n__________________________________________________________________________________________________\nconv2d_8 (Conv2D) (None, 1, 1, 6) 150 lambda_3[0][0] \n__________________________________________________________________________________________________\nswish_7 (Swish) (None, 1, 1, 6) 0 conv2d_8[0][0] \n__________________________________________________________________________________________________\nconv2d_9 (Conv2D) (None, 1, 1, 24) 168 swish_7[0][0] \n__________________________________________________________________________________________________\nactivation_3 (Activation) (None, 1, 1, 24) 0 conv2d_9[0][0] \n__________________________________________________________________________________________________\nmultiply_3 (Multiply) (None, 128, 128, 24) 0 activation_3[0][0] \n swish_6[0][0] \n__________________________________________________________________________________________________\nconv2d_10 (Conv2D) (None, 128, 128, 24) 576 multiply_3[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_7 (BatchNor (None, 128, 128, 24) 96 conv2d_10[0][0] \n__________________________________________________________________________________________________\ndrop_connect_2 (DropConnect) (None, 128, 128, 24) 0 batch_normalization_7[0][0] \n__________________________________________________________________________________________________\nadd_2 (Add) (None, 128, 128, 24) 0 drop_connect_2[0][0] \n add_1[0][0] \n__________________________________________________________________________________________________\nconv2d_11 (Conv2D) (None, 128, 128, 144 3456 add_2[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_8 (BatchNor (None, 128, 128, 144 576 conv2d_11[0][0] \n__________________________________________________________________________________________________\nswish_8 (Swish) (None, 128, 128, 144 0 batch_normalization_8[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_4 (DepthwiseCo (None, 64, 64, 144) 1296 swish_8[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_9 (BatchNor (None, 64, 64, 144) 576 depthwise_conv2d_4[0][0] \n__________________________________________________________________________________________________\nswish_9 (Swish) (None, 64, 64, 144) 0 batch_normalization_9[0][0] \n__________________________________________________________________________________________________\nlambda_4 (Lambda) (None, 1, 1, 144) 0 swish_9[0][0] \n__________________________________________________________________________________________________\nconv2d_12 (Conv2D) (None, 1, 1, 6) 870 lambda_4[0][0] \n__________________________________________________________________________________________________\nswish_10 (Swish) (None, 1, 1, 6) 0 conv2d_12[0][0] \n__________________________________________________________________________________________________\nconv2d_13 (Conv2D) (None, 1, 1, 144) 1008 swish_10[0][0] \n__________________________________________________________________________________________________\nactivation_4 (Activation) (None, 1, 1, 144) 0 conv2d_13[0][0] \n__________________________________________________________________________________________________\nmultiply_4 (Multiply) (None, 64, 64, 144) 0 activation_4[0][0] \n swish_9[0][0] \n__________________________________________________________________________________________________\nconv2d_14 (Conv2D) (None, 64, 64, 40) 5760 multiply_4[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_10 (BatchNo (None, 64, 64, 40) 160 conv2d_14[0][0] \n__________________________________________________________________________________________________\nconv2d_15 (Conv2D) (None, 64, 64, 240) 9600 batch_normalization_10[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_11 (BatchNo (None, 64, 64, 240) 960 conv2d_15[0][0] \n__________________________________________________________________________________________________\nswish_11 (Swish) (None, 64, 64, 240) 0 batch_normalization_11[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_5 (DepthwiseCo (None, 64, 64, 240) 2160 swish_11[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_12 (BatchNo (None, 64, 64, 240) 960 depthwise_conv2d_5[0][0] \n__________________________________________________________________________________________________\nswish_12 (Swish) (None, 64, 64, 240) 0 batch_normalization_12[0][0] \n__________________________________________________________________________________________________\nlambda_5 (Lambda) (None, 1, 1, 240) 0 swish_12[0][0] \n__________________________________________________________________________________________________\nconv2d_16 (Conv2D) (None, 1, 1, 10) 2410 lambda_5[0][0] \n__________________________________________________________________________________________________\nswish_13 (Swish) (None, 1, 1, 10) 0 conv2d_16[0][0] \n__________________________________________________________________________________________________\nconv2d_17 (Conv2D) (None, 1, 1, 240) 2640 swish_13[0][0] \n__________________________________________________________________________________________________\nactivation_5 (Activation) (None, 1, 1, 240) 0 conv2d_17[0][0] \n__________________________________________________________________________________________________\nmultiply_5 (Multiply) (None, 64, 64, 240) 0 activation_5[0][0] \n swish_12[0][0] \n__________________________________________________________________________________________________\nconv2d_18 (Conv2D) (None, 64, 64, 40) 9600 multiply_5[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_13 (BatchNo (None, 64, 64, 40) 160 conv2d_18[0][0] \n__________________________________________________________________________________________________\ndrop_connect_3 (DropConnect) (None, 64, 64, 40) 0 batch_normalization_13[0][0] \n__________________________________________________________________________________________________\nadd_3 (Add) (None, 64, 64, 40) 0 drop_connect_3[0][0] \n batch_normalization_10[0][0] \n__________________________________________________________________________________________________\nconv2d_19 (Conv2D) (None, 64, 64, 240) 9600 add_3[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_14 (BatchNo (None, 64, 64, 240) 960 conv2d_19[0][0] \n__________________________________________________________________________________________________\nswish_14 (Swish) (None, 64, 64, 240) 0 batch_normalization_14[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_6 (DepthwiseCo (None, 64, 64, 240) 2160 swish_14[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_15 (BatchNo (None, 64, 64, 240) 960 depthwise_conv2d_6[0][0] \n__________________________________________________________________________________________________\nswish_15 (Swish) (None, 64, 64, 240) 0 batch_normalization_15[0][0] \n__________________________________________________________________________________________________\nlambda_6 (Lambda) (None, 1, 1, 240) 0 swish_15[0][0] \n__________________________________________________________________________________________________\nconv2d_20 (Conv2D) (None, 1, 1, 10) 2410 lambda_6[0][0] \n__________________________________________________________________________________________________\nswish_16 (Swish) (None, 1, 1, 10) 0 conv2d_20[0][0] \n__________________________________________________________________________________________________\nconv2d_21 (Conv2D) (None, 1, 1, 240) 2640 swish_16[0][0] \n__________________________________________________________________________________________________\nactivation_6 (Activation) (None, 1, 1, 240) 0 conv2d_21[0][0] \n__________________________________________________________________________________________________\nmultiply_6 (Multiply) (None, 64, 64, 240) 0 activation_6[0][0] \n swish_15[0][0] \n__________________________________________________________________________________________________\nconv2d_22 (Conv2D) (None, 64, 64, 40) 9600 multiply_6[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_16 (BatchNo (None, 64, 64, 40) 160 conv2d_22[0][0] \n__________________________________________________________________________________________________\ndrop_connect_4 (DropConnect) (None, 64, 64, 40) 0 batch_normalization_16[0][0] \n__________________________________________________________________________________________________\nadd_4 (Add) (None, 64, 64, 40) 0 drop_connect_4[0][0] \n add_3[0][0] \n__________________________________________________________________________________________________\nconv2d_23 (Conv2D) (None, 64, 64, 240) 9600 add_4[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_17 (BatchNo (None, 64, 64, 240) 960 conv2d_23[0][0] \n__________________________________________________________________________________________________\nswish_17 (Swish) (None, 64, 64, 240) 0 batch_normalization_17[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_7 (DepthwiseCo (None, 64, 64, 240) 2160 swish_17[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_18 (BatchNo (None, 64, 64, 240) 960 depthwise_conv2d_7[0][0] \n__________________________________________________________________________________________________\nswish_18 (Swish) (None, 64, 64, 240) 0 batch_normalization_18[0][0] \n__________________________________________________________________________________________________\nlambda_7 (Lambda) (None, 1, 1, 240) 0 swish_18[0][0] \n__________________________________________________________________________________________________\nconv2d_24 (Conv2D) (None, 1, 1, 10) 2410 lambda_7[0][0] \n__________________________________________________________________________________________________\nswish_19 (Swish) (None, 1, 1, 10) 0 conv2d_24[0][0] \n__________________________________________________________________________________________________\nconv2d_25 (Conv2D) (None, 1, 1, 240) 2640 swish_19[0][0] \n__________________________________________________________________________________________________\nactivation_7 (Activation) (None, 1, 1, 240) 0 conv2d_25[0][0] \n__________________________________________________________________________________________________\nmultiply_7 (Multiply) (None, 64, 64, 240) 0 activation_7[0][0] \n swish_18[0][0] \n__________________________________________________________________________________________________\nconv2d_26 (Conv2D) (None, 64, 64, 40) 9600 multiply_7[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_19 (BatchNo (None, 64, 64, 40) 160 conv2d_26[0][0] \n__________________________________________________________________________________________________\ndrop_connect_5 (DropConnect) (None, 64, 64, 40) 0 batch_normalization_19[0][0] \n__________________________________________________________________________________________________\nadd_5 (Add) (None, 64, 64, 40) 0 drop_connect_5[0][0] \n add_4[0][0] \n__________________________________________________________________________________________________\nconv2d_27 (Conv2D) (None, 64, 64, 240) 9600 add_5[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_20 (BatchNo (None, 64, 64, 240) 960 conv2d_27[0][0] \n__________________________________________________________________________________________________\nswish_20 (Swish) (None, 64, 64, 240) 0 batch_normalization_20[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_8 (DepthwiseCo (None, 64, 64, 240) 2160 swish_20[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_21 (BatchNo (None, 64, 64, 240) 960 depthwise_conv2d_8[0][0] \n__________________________________________________________________________________________________\nswish_21 (Swish) (None, 64, 64, 240) 0 batch_normalization_21[0][0] \n__________________________________________________________________________________________________\nlambda_8 (Lambda) (None, 1, 1, 240) 0 swish_21[0][0] \n__________________________________________________________________________________________________\nconv2d_28 (Conv2D) (None, 1, 1, 10) 2410 lambda_8[0][0] \n__________________________________________________________________________________________________\nswish_22 (Swish) (None, 1, 1, 10) 0 conv2d_28[0][0] \n__________________________________________________________________________________________________\nconv2d_29 (Conv2D) (None, 1, 1, 240) 2640 swish_22[0][0] \n__________________________________________________________________________________________________\nactivation_8 (Activation) (None, 1, 1, 240) 0 conv2d_29[0][0] \n__________________________________________________________________________________________________\nmultiply_8 (Multiply) (None, 64, 64, 240) 0 activation_8[0][0] \n swish_21[0][0] \n__________________________________________________________________________________________________\nconv2d_30 (Conv2D) (None, 64, 64, 40) 9600 multiply_8[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_22 (BatchNo (None, 64, 64, 40) 160 conv2d_30[0][0] \n__________________________________________________________________________________________________\ndrop_connect_6 (DropConnect) (None, 64, 64, 40) 0 batch_normalization_22[0][0] \n__________________________________________________________________________________________________\nadd_6 (Add) (None, 64, 64, 40) 0 drop_connect_6[0][0] \n add_5[0][0] \n__________________________________________________________________________________________________\nconv2d_31 (Conv2D) (None, 64, 64, 240) 9600 add_6[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_23 (BatchNo (None, 64, 64, 240) 960 conv2d_31[0][0] \n__________________________________________________________________________________________________\nswish_23 (Swish) (None, 64, 64, 240) 0 batch_normalization_23[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_9 (DepthwiseCo (None, 32, 32, 240) 6000 swish_23[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_24 (BatchNo (None, 32, 32, 240) 960 depthwise_conv2d_9[0][0] \n__________________________________________________________________________________________________\nswish_24 (Swish) (None, 32, 32, 240) 0 batch_normalization_24[0][0] \n__________________________________________________________________________________________________\nlambda_9 (Lambda) (None, 1, 1, 240) 0 swish_24[0][0] \n__________________________________________________________________________________________________\nconv2d_32 (Conv2D) (None, 1, 1, 10) 2410 lambda_9[0][0] \n__________________________________________________________________________________________________\nswish_25 (Swish) (None, 1, 1, 10) 0 conv2d_32[0][0] \n__________________________________________________________________________________________________\nconv2d_33 (Conv2D) (None, 1, 1, 240) 2640 swish_25[0][0] \n__________________________________________________________________________________________________\nactivation_9 (Activation) (None, 1, 1, 240) 0 conv2d_33[0][0] \n__________________________________________________________________________________________________\nmultiply_9 (Multiply) (None, 32, 32, 240) 0 activation_9[0][0] \n swish_24[0][0] \n__________________________________________________________________________________________________\nconv2d_34 (Conv2D) (None, 32, 32, 64) 15360 multiply_9[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_25 (BatchNo (None, 32, 32, 64) 256 conv2d_34[0][0] \n__________________________________________________________________________________________________\nconv2d_35 (Conv2D) (None, 32, 32, 384) 24576 batch_normalization_25[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_26 (BatchNo (None, 32, 32, 384) 1536 conv2d_35[0][0] \n__________________________________________________________________________________________________\nswish_26 (Swish) (None, 32, 32, 384) 0 batch_normalization_26[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_10 (DepthwiseC (None, 32, 32, 384) 9600 swish_26[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_27 (BatchNo (None, 32, 32, 384) 1536 depthwise_conv2d_10[0][0] \n__________________________________________________________________________________________________\nswish_27 (Swish) (None, 32, 32, 384) 0 batch_normalization_27[0][0] \n__________________________________________________________________________________________________\nlambda_10 (Lambda) (None, 1, 1, 384) 0 swish_27[0][0] \n__________________________________________________________________________________________________\nconv2d_36 (Conv2D) (None, 1, 1, 16) 6160 lambda_10[0][0] \n__________________________________________________________________________________________________\nswish_28 (Swish) (None, 1, 1, 16) 0 conv2d_36[0][0] \n__________________________________________________________________________________________________\nconv2d_37 (Conv2D) (None, 1, 1, 384) 6528 swish_28[0][0] \n__________________________________________________________________________________________________\nactivation_10 (Activation) (None, 1, 1, 384) 0 conv2d_37[0][0] \n__________________________________________________________________________________________________\nmultiply_10 (Multiply) (None, 32, 32, 384) 0 activation_10[0][0] \n swish_27[0][0] \n__________________________________________________________________________________________________\nconv2d_38 (Conv2D) (None, 32, 32, 64) 24576 multiply_10[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_28 (BatchNo (None, 32, 32, 64) 256 conv2d_38[0][0] \n__________________________________________________________________________________________________\ndrop_connect_7 (DropConnect) (None, 32, 32, 64) 0 batch_normalization_28[0][0] \n__________________________________________________________________________________________________\nadd_7 (Add) (None, 32, 32, 64) 0 drop_connect_7[0][0] \n batch_normalization_25[0][0] \n__________________________________________________________________________________________________\nconv2d_39 (Conv2D) (None, 32, 32, 384) 24576 add_7[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_29 (BatchNo (None, 32, 32, 384) 1536 conv2d_39[0][0] \n__________________________________________________________________________________________________\nswish_29 (Swish) (None, 32, 32, 384) 0 batch_normalization_29[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_11 (DepthwiseC (None, 32, 32, 384) 9600 swish_29[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_30 (BatchNo (None, 32, 32, 384) 1536 depthwise_conv2d_11[0][0] \n__________________________________________________________________________________________________\nswish_30 (Swish) (None, 32, 32, 384) 0 batch_normalization_30[0][0] \n__________________________________________________________________________________________________\nlambda_11 (Lambda) (None, 1, 1, 384) 0 swish_30[0][0] \n__________________________________________________________________________________________________\nconv2d_40 (Conv2D) (None, 1, 1, 16) 6160 lambda_11[0][0] \n__________________________________________________________________________________________________\nswish_31 (Swish) (None, 1, 1, 16) 0 conv2d_40[0][0] \n__________________________________________________________________________________________________\nconv2d_41 (Conv2D) (None, 1, 1, 384) 6528 swish_31[0][0] \n__________________________________________________________________________________________________\nactivation_11 (Activation) (None, 1, 1, 384) 0 conv2d_41[0][0] \n__________________________________________________________________________________________________\nmultiply_11 (Multiply) (None, 32, 32, 384) 0 activation_11[0][0] \n swish_30[0][0] \n__________________________________________________________________________________________________\nconv2d_42 (Conv2D) (None, 32, 32, 64) 24576 multiply_11[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_31 (BatchNo (None, 32, 32, 64) 256 conv2d_42[0][0] \n__________________________________________________________________________________________________\ndrop_connect_8 (DropConnect) (None, 32, 32, 64) 0 batch_normalization_31[0][0] \n__________________________________________________________________________________________________\nadd_8 (Add) (None, 32, 32, 64) 0 drop_connect_8[0][0] \n add_7[0][0] \n__________________________________________________________________________________________________\nconv2d_43 (Conv2D) (None, 32, 32, 384) 24576 add_8[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_32 (BatchNo (None, 32, 32, 384) 1536 conv2d_43[0][0] \n__________________________________________________________________________________________________\nswish_32 (Swish) (None, 32, 32, 384) 0 batch_normalization_32[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_12 (DepthwiseC (None, 32, 32, 384) 9600 swish_32[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_33 (BatchNo (None, 32, 32, 384) 1536 depthwise_conv2d_12[0][0] \n__________________________________________________________________________________________________\nswish_33 (Swish) (None, 32, 32, 384) 0 batch_normalization_33[0][0] \n__________________________________________________________________________________________________\nlambda_12 (Lambda) (None, 1, 1, 384) 0 swish_33[0][0] \n__________________________________________________________________________________________________\nconv2d_44 (Conv2D) (None, 1, 1, 16) 6160 lambda_12[0][0] \n__________________________________________________________________________________________________\nswish_34 (Swish) (None, 1, 1, 16) 0 conv2d_44[0][0] \n__________________________________________________________________________________________________\nconv2d_45 (Conv2D) (None, 1, 1, 384) 6528 swish_34[0][0] \n__________________________________________________________________________________________________\nactivation_12 (Activation) (None, 1, 1, 384) 0 conv2d_45[0][0] \n__________________________________________________________________________________________________\nmultiply_12 (Multiply) (None, 32, 32, 384) 0 activation_12[0][0] \n swish_33[0][0] \n__________________________________________________________________________________________________\nconv2d_46 (Conv2D) (None, 32, 32, 64) 24576 multiply_12[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_34 (BatchNo (None, 32, 32, 64) 256 conv2d_46[0][0] \n__________________________________________________________________________________________________\ndrop_connect_9 (DropConnect) (None, 32, 32, 64) 0 batch_normalization_34[0][0] \n__________________________________________________________________________________________________\nadd_9 (Add) (None, 32, 32, 64) 0 drop_connect_9[0][0] \n add_8[0][0] \n__________________________________________________________________________________________________\nconv2d_47 (Conv2D) (None, 32, 32, 384) 24576 add_9[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_35 (BatchNo (None, 32, 32, 384) 1536 conv2d_47[0][0] \n__________________________________________________________________________________________________\nswish_35 (Swish) (None, 32, 32, 384) 0 batch_normalization_35[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_13 (DepthwiseC (None, 32, 32, 384) 9600 swish_35[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_36 (BatchNo (None, 32, 32, 384) 1536 depthwise_conv2d_13[0][0] \n__________________________________________________________________________________________________\nswish_36 (Swish) (None, 32, 32, 384) 0 batch_normalization_36[0][0] \n__________________________________________________________________________________________________\nlambda_13 (Lambda) (None, 1, 1, 384) 0 swish_36[0][0] \n__________________________________________________________________________________________________\nconv2d_48 (Conv2D) (None, 1, 1, 16) 6160 lambda_13[0][0] \n__________________________________________________________________________________________________\nswish_37 (Swish) (None, 1, 1, 16) 0 conv2d_48[0][0] \n__________________________________________________________________________________________________\nconv2d_49 (Conv2D) (None, 1, 1, 384) 6528 swish_37[0][0] \n__________________________________________________________________________________________________\nactivation_13 (Activation) (None, 1, 1, 384) 0 conv2d_49[0][0] \n__________________________________________________________________________________________________\nmultiply_13 (Multiply) (None, 32, 32, 384) 0 activation_13[0][0] \n swish_36[0][0] \n__________________________________________________________________________________________________\nconv2d_50 (Conv2D) (None, 32, 32, 64) 24576 multiply_13[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_37 (BatchNo (None, 32, 32, 64) 256 conv2d_50[0][0] \n__________________________________________________________________________________________________\ndrop_connect_10 (DropConnect) (None, 32, 32, 64) 0 batch_normalization_37[0][0] \n__________________________________________________________________________________________________\nadd_10 (Add) (None, 32, 32, 64) 0 drop_connect_10[0][0] \n add_9[0][0] \n__________________________________________________________________________________________________\nconv2d_51 (Conv2D) (None, 32, 32, 384) 24576 add_10[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_38 (BatchNo (None, 32, 32, 384) 1536 conv2d_51[0][0] \n__________________________________________________________________________________________________\nswish_38 (Swish) (None, 32, 32, 384) 0 batch_normalization_38[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_14 (DepthwiseC (None, 16, 16, 384) 3456 swish_38[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_39 (BatchNo (None, 16, 16, 384) 1536 depthwise_conv2d_14[0][0] \n__________________________________________________________________________________________________\nswish_39 (Swish) (None, 16, 16, 384) 0 batch_normalization_39[0][0] \n__________________________________________________________________________________________________\nlambda_14 (Lambda) (None, 1, 1, 384) 0 swish_39[0][0] \n__________________________________________________________________________________________________\nconv2d_52 (Conv2D) (None, 1, 1, 16) 6160 lambda_14[0][0] \n__________________________________________________________________________________________________\nswish_40 (Swish) (None, 1, 1, 16) 0 conv2d_52[0][0] \n__________________________________________________________________________________________________\nconv2d_53 (Conv2D) (None, 1, 1, 384) 6528 swish_40[0][0] \n__________________________________________________________________________________________________\nactivation_14 (Activation) (None, 1, 1, 384) 0 conv2d_53[0][0] \n__________________________________________________________________________________________________\nmultiply_14 (Multiply) (None, 16, 16, 384) 0 activation_14[0][0] \n swish_39[0][0] \n__________________________________________________________________________________________________\nconv2d_54 (Conv2D) (None, 16, 16, 128) 49152 multiply_14[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_40 (BatchNo (None, 16, 16, 128) 512 conv2d_54[0][0] \n__________________________________________________________________________________________________\nconv2d_55 (Conv2D) (None, 16, 16, 768) 98304 batch_normalization_40[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_41 (BatchNo (None, 16, 16, 768) 3072 conv2d_55[0][0] \n__________________________________________________________________________________________________\nswish_41 (Swish) (None, 16, 16, 768) 0 batch_normalization_41[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_15 (DepthwiseC (None, 16, 16, 768) 6912 swish_41[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_42 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_15[0][0] \n__________________________________________________________________________________________________\nswish_42 (Swish) (None, 16, 16, 768) 0 batch_normalization_42[0][0] \n__________________________________________________________________________________________________\nlambda_15 (Lambda) (None, 1, 1, 768) 0 swish_42[0][0] \n__________________________________________________________________________________________________\nconv2d_56 (Conv2D) (None, 1, 1, 32) 24608 lambda_15[0][0] \n__________________________________________________________________________________________________\nswish_43 (Swish) (None, 1, 1, 32) 0 conv2d_56[0][0] \n__________________________________________________________________________________________________\nconv2d_57 (Conv2D) (None, 1, 1, 768) 25344 swish_43[0][0] \n__________________________________________________________________________________________________\nactivation_15 (Activation) (None, 1, 1, 768) 0 conv2d_57[0][0] \n__________________________________________________________________________________________________\nmultiply_15 (Multiply) (None, 16, 16, 768) 0 activation_15[0][0] \n swish_42[0][0] \n__________________________________________________________________________________________________\nconv2d_58 (Conv2D) (None, 16, 16, 128) 98304 multiply_15[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_43 (BatchNo (None, 16, 16, 128) 512 conv2d_58[0][0] \n__________________________________________________________________________________________________\ndrop_connect_11 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_43[0][0] \n__________________________________________________________________________________________________\nadd_11 (Add) (None, 16, 16, 128) 0 drop_connect_11[0][0] \n batch_normalization_40[0][0] \n__________________________________________________________________________________________________\nconv2d_59 (Conv2D) (None, 16, 16, 768) 98304 add_11[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_44 (BatchNo (None, 16, 16, 768) 3072 conv2d_59[0][0] \n__________________________________________________________________________________________________\nswish_44 (Swish) (None, 16, 16, 768) 0 batch_normalization_44[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_16 (DepthwiseC (None, 16, 16, 768) 6912 swish_44[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_45 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_16[0][0] \n__________________________________________________________________________________________________\nswish_45 (Swish) (None, 16, 16, 768) 0 batch_normalization_45[0][0] \n__________________________________________________________________________________________________\nlambda_16 (Lambda) (None, 1, 1, 768) 0 swish_45[0][0] \n__________________________________________________________________________________________________\nconv2d_60 (Conv2D) (None, 1, 1, 32) 24608 lambda_16[0][0] \n__________________________________________________________________________________________________\nswish_46 (Swish) (None, 1, 1, 32) 0 conv2d_60[0][0] \n__________________________________________________________________________________________________\nconv2d_61 (Conv2D) (None, 1, 1, 768) 25344 swish_46[0][0] \n__________________________________________________________________________________________________\nactivation_16 (Activation) (None, 1, 1, 768) 0 conv2d_61[0][0] \n__________________________________________________________________________________________________\nmultiply_16 (Multiply) (None, 16, 16, 768) 0 activation_16[0][0] \n swish_45[0][0] \n__________________________________________________________________________________________________\nconv2d_62 (Conv2D) (None, 16, 16, 128) 98304 multiply_16[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_46 (BatchNo (None, 16, 16, 128) 512 conv2d_62[0][0] \n__________________________________________________________________________________________________\ndrop_connect_12 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_46[0][0] \n__________________________________________________________________________________________________\nadd_12 (Add) (None, 16, 16, 128) 0 drop_connect_12[0][0] \n add_11[0][0] \n__________________________________________________________________________________________________\nconv2d_63 (Conv2D) (None, 16, 16, 768) 98304 add_12[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_47 (BatchNo (None, 16, 16, 768) 3072 conv2d_63[0][0] \n__________________________________________________________________________________________________\nswish_47 (Swish) (None, 16, 16, 768) 0 batch_normalization_47[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_17 (DepthwiseC (None, 16, 16, 768) 6912 swish_47[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_48 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_17[0][0] \n__________________________________________________________________________________________________\nswish_48 (Swish) (None, 16, 16, 768) 0 batch_normalization_48[0][0] \n__________________________________________________________________________________________________\nlambda_17 (Lambda) (None, 1, 1, 768) 0 swish_48[0][0] \n__________________________________________________________________________________________________\nconv2d_64 (Conv2D) (None, 1, 1, 32) 24608 lambda_17[0][0] \n__________________________________________________________________________________________________\nswish_49 (Swish) (None, 1, 1, 32) 0 conv2d_64[0][0] \n__________________________________________________________________________________________________\nconv2d_65 (Conv2D) (None, 1, 1, 768) 25344 swish_49[0][0] \n__________________________________________________________________________________________________\nactivation_17 (Activation) (None, 1, 1, 768) 0 conv2d_65[0][0] \n__________________________________________________________________________________________________\nmultiply_17 (Multiply) (None, 16, 16, 768) 0 activation_17[0][0] \n swish_48[0][0] \n__________________________________________________________________________________________________\nconv2d_66 (Conv2D) (None, 16, 16, 128) 98304 multiply_17[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_49 (BatchNo (None, 16, 16, 128) 512 conv2d_66[0][0] \n__________________________________________________________________________________________________\ndrop_connect_13 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_49[0][0] \n__________________________________________________________________________________________________\nadd_13 (Add) (None, 16, 16, 128) 0 drop_connect_13[0][0] \n add_12[0][0] \n__________________________________________________________________________________________________\nconv2d_67 (Conv2D) (None, 16, 16, 768) 98304 add_13[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_50 (BatchNo (None, 16, 16, 768) 3072 conv2d_67[0][0] \n__________________________________________________________________________________________________\nswish_50 (Swish) (None, 16, 16, 768) 0 batch_normalization_50[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_18 (DepthwiseC (None, 16, 16, 768) 6912 swish_50[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_51 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_18[0][0] \n__________________________________________________________________________________________________\nswish_51 (Swish) (None, 16, 16, 768) 0 batch_normalization_51[0][0] \n__________________________________________________________________________________________________\nlambda_18 (Lambda) (None, 1, 1, 768) 0 swish_51[0][0] \n__________________________________________________________________________________________________\nconv2d_68 (Conv2D) (None, 1, 1, 32) 24608 lambda_18[0][0] \n__________________________________________________________________________________________________\nswish_52 (Swish) (None, 1, 1, 32) 0 conv2d_68[0][0] \n__________________________________________________________________________________________________\nconv2d_69 (Conv2D) (None, 1, 1, 768) 25344 swish_52[0][0] \n__________________________________________________________________________________________________\nactivation_18 (Activation) (None, 1, 1, 768) 0 conv2d_69[0][0] \n__________________________________________________________________________________________________\nmultiply_18 (Multiply) (None, 16, 16, 768) 0 activation_18[0][0] \n swish_51[0][0] \n__________________________________________________________________________________________________\nconv2d_70 (Conv2D) (None, 16, 16, 128) 98304 multiply_18[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_52 (BatchNo (None, 16, 16, 128) 512 conv2d_70[0][0] \n__________________________________________________________________________________________________\ndrop_connect_14 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_52[0][0] \n__________________________________________________________________________________________________\nadd_14 (Add) (None, 16, 16, 128) 0 drop_connect_14[0][0] \n add_13[0][0] \n__________________________________________________________________________________________________\nconv2d_71 (Conv2D) (None, 16, 16, 768) 98304 add_14[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_53 (BatchNo (None, 16, 16, 768) 3072 conv2d_71[0][0] \n__________________________________________________________________________________________________\nswish_53 (Swish) (None, 16, 16, 768) 0 batch_normalization_53[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_19 (DepthwiseC (None, 16, 16, 768) 6912 swish_53[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_54 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_19[0][0] \n__________________________________________________________________________________________________\nswish_54 (Swish) (None, 16, 16, 768) 0 batch_normalization_54[0][0] \n__________________________________________________________________________________________________\nlambda_19 (Lambda) (None, 1, 1, 768) 0 swish_54[0][0] \n__________________________________________________________________________________________________\nconv2d_72 (Conv2D) (None, 1, 1, 32) 24608 lambda_19[0][0] \n__________________________________________________________________________________________________\nswish_55 (Swish) (None, 1, 1, 32) 0 conv2d_72[0][0] \n__________________________________________________________________________________________________\nconv2d_73 (Conv2D) (None, 1, 1, 768) 25344 swish_55[0][0] \n__________________________________________________________________________________________________\nactivation_19 (Activation) (None, 1, 1, 768) 0 conv2d_73[0][0] \n__________________________________________________________________________________________________\nmultiply_19 (Multiply) (None, 16, 16, 768) 0 activation_19[0][0] \n swish_54[0][0] \n__________________________________________________________________________________________________\nconv2d_74 (Conv2D) (None, 16, 16, 128) 98304 multiply_19[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_55 (BatchNo (None, 16, 16, 128) 512 conv2d_74[0][0] \n__________________________________________________________________________________________________\ndrop_connect_15 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_55[0][0] \n__________________________________________________________________________________________________\nadd_15 (Add) (None, 16, 16, 128) 0 drop_connect_15[0][0] \n add_14[0][0] \n__________________________________________________________________________________________________\nconv2d_75 (Conv2D) (None, 16, 16, 768) 98304 add_15[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_56 (BatchNo (None, 16, 16, 768) 3072 conv2d_75[0][0] \n__________________________________________________________________________________________________\nswish_56 (Swish) (None, 16, 16, 768) 0 batch_normalization_56[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_20 (DepthwiseC (None, 16, 16, 768) 6912 swish_56[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_57 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_20[0][0] \n__________________________________________________________________________________________________\nswish_57 (Swish) (None, 16, 16, 768) 0 batch_normalization_57[0][0] \n__________________________________________________________________________________________________\nlambda_20 (Lambda) (None, 1, 1, 768) 0 swish_57[0][0] \n__________________________________________________________________________________________________\nconv2d_76 (Conv2D) (None, 1, 1, 32) 24608 lambda_20[0][0] \n__________________________________________________________________________________________________\nswish_58 (Swish) (None, 1, 1, 32) 0 conv2d_76[0][0] \n__________________________________________________________________________________________________\nconv2d_77 (Conv2D) (None, 1, 1, 768) 25344 swish_58[0][0] \n__________________________________________________________________________________________________\nactivation_20 (Activation) (None, 1, 1, 768) 0 conv2d_77[0][0] \n__________________________________________________________________________________________________\nmultiply_20 (Multiply) (None, 16, 16, 768) 0 activation_20[0][0] \n swish_57[0][0] \n__________________________________________________________________________________________________\nconv2d_78 (Conv2D) (None, 16, 16, 128) 98304 multiply_20[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_58 (BatchNo (None, 16, 16, 128) 512 conv2d_78[0][0] \n__________________________________________________________________________________________________\ndrop_connect_16 (DropConnect) (None, 16, 16, 128) 0 batch_normalization_58[0][0] \n__________________________________________________________________________________________________\nadd_16 (Add) (None, 16, 16, 128) 0 drop_connect_16[0][0] \n add_15[0][0] \n__________________________________________________________________________________________________\nconv2d_79 (Conv2D) (None, 16, 16, 768) 98304 add_16[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_59 (BatchNo (None, 16, 16, 768) 3072 conv2d_79[0][0] \n__________________________________________________________________________________________________\nswish_59 (Swish) (None, 16, 16, 768) 0 batch_normalization_59[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_21 (DepthwiseC (None, 16, 16, 768) 19200 swish_59[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_60 (BatchNo (None, 16, 16, 768) 3072 depthwise_conv2d_21[0][0] \n__________________________________________________________________________________________________\nswish_60 (Swish) (None, 16, 16, 768) 0 batch_normalization_60[0][0] \n__________________________________________________________________________________________________\nlambda_21 (Lambda) (None, 1, 1, 768) 0 swish_60[0][0] \n__________________________________________________________________________________________________\nconv2d_80 (Conv2D) (None, 1, 1, 32) 24608 lambda_21[0][0] \n__________________________________________________________________________________________________\nswish_61 (Swish) (None, 1, 1, 32) 0 conv2d_80[0][0] \n__________________________________________________________________________________________________\nconv2d_81 (Conv2D) (None, 1, 1, 768) 25344 swish_61[0][0] \n__________________________________________________________________________________________________\nactivation_21 (Activation) (None, 1, 1, 768) 0 conv2d_81[0][0] \n__________________________________________________________________________________________________\nmultiply_21 (Multiply) (None, 16, 16, 768) 0 activation_21[0][0] \n swish_60[0][0] \n__________________________________________________________________________________________________\nconv2d_82 (Conv2D) (None, 16, 16, 176) 135168 multiply_21[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_61 (BatchNo (None, 16, 16, 176) 704 conv2d_82[0][0] \n__________________________________________________________________________________________________\nconv2d_83 (Conv2D) (None, 16, 16, 1056) 185856 batch_normalization_61[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_62 (BatchNo (None, 16, 16, 1056) 4224 conv2d_83[0][0] \n__________________________________________________________________________________________________\nswish_62 (Swish) (None, 16, 16, 1056) 0 batch_normalization_62[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_22 (DepthwiseC (None, 16, 16, 1056) 26400 swish_62[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_63 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_22[0][0] \n__________________________________________________________________________________________________\nswish_63 (Swish) (None, 16, 16, 1056) 0 batch_normalization_63[0][0] \n__________________________________________________________________________________________________\nlambda_22 (Lambda) (None, 1, 1, 1056) 0 swish_63[0][0] \n__________________________________________________________________________________________________\nconv2d_84 (Conv2D) (None, 1, 1, 44) 46508 lambda_22[0][0] \n__________________________________________________________________________________________________\nswish_64 (Swish) (None, 1, 1, 44) 0 conv2d_84[0][0] \n__________________________________________________________________________________________________\nconv2d_85 (Conv2D) (None, 1, 1, 1056) 47520 swish_64[0][0] \n__________________________________________________________________________________________________\nactivation_22 (Activation) (None, 1, 1, 1056) 0 conv2d_85[0][0] \n__________________________________________________________________________________________________\nmultiply_22 (Multiply) (None, 16, 16, 1056) 0 activation_22[0][0] \n swish_63[0][0] \n__________________________________________________________________________________________________\nconv2d_86 (Conv2D) (None, 16, 16, 176) 185856 multiply_22[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_64 (BatchNo (None, 16, 16, 176) 704 conv2d_86[0][0] \n__________________________________________________________________________________________________\ndrop_connect_17 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_64[0][0] \n__________________________________________________________________________________________________\nadd_17 (Add) (None, 16, 16, 176) 0 drop_connect_17[0][0] \n batch_normalization_61[0][0] \n__________________________________________________________________________________________________\nconv2d_87 (Conv2D) (None, 16, 16, 1056) 185856 add_17[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_65 (BatchNo (None, 16, 16, 1056) 4224 conv2d_87[0][0] \n__________________________________________________________________________________________________\nswish_65 (Swish) (None, 16, 16, 1056) 0 batch_normalization_65[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_23 (DepthwiseC (None, 16, 16, 1056) 26400 swish_65[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_66 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_23[0][0] \n__________________________________________________________________________________________________\nswish_66 (Swish) (None, 16, 16, 1056) 0 batch_normalization_66[0][0] \n__________________________________________________________________________________________________\nlambda_23 (Lambda) (None, 1, 1, 1056) 0 swish_66[0][0] \n__________________________________________________________________________________________________\nconv2d_88 (Conv2D) (None, 1, 1, 44) 46508 lambda_23[0][0] \n__________________________________________________________________________________________________\nswish_67 (Swish) (None, 1, 1, 44) 0 conv2d_88[0][0] \n__________________________________________________________________________________________________\nconv2d_89 (Conv2D) (None, 1, 1, 1056) 47520 swish_67[0][0] \n__________________________________________________________________________________________________\nactivation_23 (Activation) (None, 1, 1, 1056) 0 conv2d_89[0][0] \n__________________________________________________________________________________________________\nmultiply_23 (Multiply) (None, 16, 16, 1056) 0 activation_23[0][0] \n swish_66[0][0] \n__________________________________________________________________________________________________\nconv2d_90 (Conv2D) (None, 16, 16, 176) 185856 multiply_23[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_67 (BatchNo (None, 16, 16, 176) 704 conv2d_90[0][0] \n__________________________________________________________________________________________________\ndrop_connect_18 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_67[0][0] \n__________________________________________________________________________________________________\nadd_18 (Add) (None, 16, 16, 176) 0 drop_connect_18[0][0] \n add_17[0][0] \n__________________________________________________________________________________________________\nconv2d_91 (Conv2D) (None, 16, 16, 1056) 185856 add_18[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_68 (BatchNo (None, 16, 16, 1056) 4224 conv2d_91[0][0] \n__________________________________________________________________________________________________\nswish_68 (Swish) (None, 16, 16, 1056) 0 batch_normalization_68[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_24 (DepthwiseC (None, 16, 16, 1056) 26400 swish_68[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_69 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_24[0][0] \n__________________________________________________________________________________________________\nswish_69 (Swish) (None, 16, 16, 1056) 0 batch_normalization_69[0][0] \n__________________________________________________________________________________________________\nlambda_24 (Lambda) (None, 1, 1, 1056) 0 swish_69[0][0] \n__________________________________________________________________________________________________\nconv2d_92 (Conv2D) (None, 1, 1, 44) 46508 lambda_24[0][0] \n__________________________________________________________________________________________________\nswish_70 (Swish) (None, 1, 1, 44) 0 conv2d_92[0][0] \n__________________________________________________________________________________________________\nconv2d_93 (Conv2D) (None, 1, 1, 1056) 47520 swish_70[0][0] \n__________________________________________________________________________________________________\nactivation_24 (Activation) (None, 1, 1, 1056) 0 conv2d_93[0][0] \n__________________________________________________________________________________________________\nmultiply_24 (Multiply) (None, 16, 16, 1056) 0 activation_24[0][0] \n swish_69[0][0] \n__________________________________________________________________________________________________\nconv2d_94 (Conv2D) (None, 16, 16, 176) 185856 multiply_24[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_70 (BatchNo (None, 16, 16, 176) 704 conv2d_94[0][0] \n__________________________________________________________________________________________________\ndrop_connect_19 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_70[0][0] \n__________________________________________________________________________________________________\nadd_19 (Add) (None, 16, 16, 176) 0 drop_connect_19[0][0] \n add_18[0][0] \n__________________________________________________________________________________________________\nconv2d_95 (Conv2D) (None, 16, 16, 1056) 185856 add_19[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_71 (BatchNo (None, 16, 16, 1056) 4224 conv2d_95[0][0] \n__________________________________________________________________________________________________\nswish_71 (Swish) (None, 16, 16, 1056) 0 batch_normalization_71[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_25 (DepthwiseC (None, 16, 16, 1056) 26400 swish_71[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_72 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_25[0][0] \n__________________________________________________________________________________________________\nswish_72 (Swish) (None, 16, 16, 1056) 0 batch_normalization_72[0][0] \n__________________________________________________________________________________________________\nlambda_25 (Lambda) (None, 1, 1, 1056) 0 swish_72[0][0] \n__________________________________________________________________________________________________\nconv2d_96 (Conv2D) (None, 1, 1, 44) 46508 lambda_25[0][0] \n__________________________________________________________________________________________________\nswish_73 (Swish) (None, 1, 1, 44) 0 conv2d_96[0][0] \n__________________________________________________________________________________________________\nconv2d_97 (Conv2D) (None, 1, 1, 1056) 47520 swish_73[0][0] \n__________________________________________________________________________________________________\nactivation_25 (Activation) (None, 1, 1, 1056) 0 conv2d_97[0][0] \n__________________________________________________________________________________________________\nmultiply_25 (Multiply) (None, 16, 16, 1056) 0 activation_25[0][0] \n swish_72[0][0] \n__________________________________________________________________________________________________\nconv2d_98 (Conv2D) (None, 16, 16, 176) 185856 multiply_25[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_73 (BatchNo (None, 16, 16, 176) 704 conv2d_98[0][0] \n__________________________________________________________________________________________________\ndrop_connect_20 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_73[0][0] \n__________________________________________________________________________________________________\nadd_20 (Add) (None, 16, 16, 176) 0 drop_connect_20[0][0] \n add_19[0][0] \n__________________________________________________________________________________________________\nconv2d_99 (Conv2D) (None, 16, 16, 1056) 185856 add_20[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_74 (BatchNo (None, 16, 16, 1056) 4224 conv2d_99[0][0] \n__________________________________________________________________________________________________\nswish_74 (Swish) (None, 16, 16, 1056) 0 batch_normalization_74[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_26 (DepthwiseC (None, 16, 16, 1056) 26400 swish_74[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_75 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_26[0][0] \n__________________________________________________________________________________________________\nswish_75 (Swish) (None, 16, 16, 1056) 0 batch_normalization_75[0][0] \n__________________________________________________________________________________________________\nlambda_26 (Lambda) (None, 1, 1, 1056) 0 swish_75[0][0] \n__________________________________________________________________________________________________\nconv2d_100 (Conv2D) (None, 1, 1, 44) 46508 lambda_26[0][0] \n__________________________________________________________________________________________________\nswish_76 (Swish) (None, 1, 1, 44) 0 conv2d_100[0][0] \n__________________________________________________________________________________________________\nconv2d_101 (Conv2D) (None, 1, 1, 1056) 47520 swish_76[0][0] \n__________________________________________________________________________________________________\nactivation_26 (Activation) (None, 1, 1, 1056) 0 conv2d_101[0][0] \n__________________________________________________________________________________________________\nmultiply_26 (Multiply) (None, 16, 16, 1056) 0 activation_26[0][0] \n swish_75[0][0] \n__________________________________________________________________________________________________\nconv2d_102 (Conv2D) (None, 16, 16, 176) 185856 multiply_26[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_76 (BatchNo (None, 16, 16, 176) 704 conv2d_102[0][0] \n__________________________________________________________________________________________________\ndrop_connect_21 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_76[0][0] \n__________________________________________________________________________________________________\nadd_21 (Add) (None, 16, 16, 176) 0 drop_connect_21[0][0] \n add_20[0][0] \n__________________________________________________________________________________________________\nconv2d_103 (Conv2D) (None, 16, 16, 1056) 185856 add_21[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_77 (BatchNo (None, 16, 16, 1056) 4224 conv2d_103[0][0] \n__________________________________________________________________________________________________\nswish_77 (Swish) (None, 16, 16, 1056) 0 batch_normalization_77[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_27 (DepthwiseC (None, 16, 16, 1056) 26400 swish_77[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_78 (BatchNo (None, 16, 16, 1056) 4224 depthwise_conv2d_27[0][0] \n__________________________________________________________________________________________________\nswish_78 (Swish) (None, 16, 16, 1056) 0 batch_normalization_78[0][0] \n__________________________________________________________________________________________________\nlambda_27 (Lambda) (None, 1, 1, 1056) 0 swish_78[0][0] \n__________________________________________________________________________________________________\nconv2d_104 (Conv2D) (None, 1, 1, 44) 46508 lambda_27[0][0] \n__________________________________________________________________________________________________\nswish_79 (Swish) (None, 1, 1, 44) 0 conv2d_104[0][0] \n__________________________________________________________________________________________________\nconv2d_105 (Conv2D) (None, 1, 1, 1056) 47520 swish_79[0][0] \n__________________________________________________________________________________________________\nactivation_27 (Activation) (None, 1, 1, 1056) 0 conv2d_105[0][0] \n__________________________________________________________________________________________________\nmultiply_27 (Multiply) (None, 16, 16, 1056) 0 activation_27[0][0] \n swish_78[0][0] \n__________________________________________________________________________________________________\nconv2d_106 (Conv2D) (None, 16, 16, 176) 185856 multiply_27[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_79 (BatchNo (None, 16, 16, 176) 704 conv2d_106[0][0] \n__________________________________________________________________________________________________\ndrop_connect_22 (DropConnect) (None, 16, 16, 176) 0 batch_normalization_79[0][0] \n__________________________________________________________________________________________________\nadd_22 (Add) (None, 16, 16, 176) 0 drop_connect_22[0][0] \n add_21[0][0] \n__________________________________________________________________________________________________\nconv2d_107 (Conv2D) (None, 16, 16, 1056) 185856 add_22[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_80 (BatchNo (None, 16, 16, 1056) 4224 conv2d_107[0][0] \n__________________________________________________________________________________________________\nswish_80 (Swish) (None, 16, 16, 1056) 0 batch_normalization_80[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_28 (DepthwiseC (None, 8, 8, 1056) 26400 swish_80[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_81 (BatchNo (None, 8, 8, 1056) 4224 depthwise_conv2d_28[0][0] \n__________________________________________________________________________________________________\nswish_81 (Swish) (None, 8, 8, 1056) 0 batch_normalization_81[0][0] \n__________________________________________________________________________________________________\nlambda_28 (Lambda) (None, 1, 1, 1056) 0 swish_81[0][0] \n__________________________________________________________________________________________________\nconv2d_108 (Conv2D) (None, 1, 1, 44) 46508 lambda_28[0][0] \n__________________________________________________________________________________________________\nswish_82 (Swish) (None, 1, 1, 44) 0 conv2d_108[0][0] \n__________________________________________________________________________________________________\nconv2d_109 (Conv2D) (None, 1, 1, 1056) 47520 swish_82[0][0] \n__________________________________________________________________________________________________\nactivation_28 (Activation) (None, 1, 1, 1056) 0 conv2d_109[0][0] \n__________________________________________________________________________________________________\nmultiply_28 (Multiply) (None, 8, 8, 1056) 0 activation_28[0][0] \n swish_81[0][0] \n__________________________________________________________________________________________________\nconv2d_110 (Conv2D) (None, 8, 8, 304) 321024 multiply_28[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_82 (BatchNo (None, 8, 8, 304) 1216 conv2d_110[0][0] \n__________________________________________________________________________________________________\nconv2d_111 (Conv2D) (None, 8, 8, 1824) 554496 batch_normalization_82[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_83 (BatchNo (None, 8, 8, 1824) 7296 conv2d_111[0][0] \n__________________________________________________________________________________________________\nswish_83 (Swish) (None, 8, 8, 1824) 0 batch_normalization_83[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_29 (DepthwiseC (None, 8, 8, 1824) 45600 swish_83[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_84 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_29[0][0] \n__________________________________________________________________________________________________\nswish_84 (Swish) (None, 8, 8, 1824) 0 batch_normalization_84[0][0] \n__________________________________________________________________________________________________\nlambda_29 (Lambda) (None, 1, 1, 1824) 0 swish_84[0][0] \n__________________________________________________________________________________________________\nconv2d_112 (Conv2D) (None, 1, 1, 76) 138700 lambda_29[0][0] \n__________________________________________________________________________________________________\nswish_85 (Swish) (None, 1, 1, 76) 0 conv2d_112[0][0] \n__________________________________________________________________________________________________\nconv2d_113 (Conv2D) (None, 1, 1, 1824) 140448 swish_85[0][0] \n__________________________________________________________________________________________________\nactivation_29 (Activation) (None, 1, 1, 1824) 0 conv2d_113[0][0] \n__________________________________________________________________________________________________\nmultiply_29 (Multiply) (None, 8, 8, 1824) 0 activation_29[0][0] \n swish_84[0][0] \n__________________________________________________________________________________________________\nconv2d_114 (Conv2D) (None, 8, 8, 304) 554496 multiply_29[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_85 (BatchNo (None, 8, 8, 304) 1216 conv2d_114[0][0] \n__________________________________________________________________________________________________\ndrop_connect_23 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_85[0][0] \n__________________________________________________________________________________________________\nadd_23 (Add) (None, 8, 8, 304) 0 drop_connect_23[0][0] \n batch_normalization_82[0][0] \n__________________________________________________________________________________________________\nconv2d_115 (Conv2D) (None, 8, 8, 1824) 554496 add_23[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_86 (BatchNo (None, 8, 8, 1824) 7296 conv2d_115[0][0] \n__________________________________________________________________________________________________\nswish_86 (Swish) (None, 8, 8, 1824) 0 batch_normalization_86[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_30 (DepthwiseC (None, 8, 8, 1824) 45600 swish_86[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_87 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_30[0][0] \n__________________________________________________________________________________________________\nswish_87 (Swish) (None, 8, 8, 1824) 0 batch_normalization_87[0][0] \n__________________________________________________________________________________________________\nlambda_30 (Lambda) (None, 1, 1, 1824) 0 swish_87[0][0] \n__________________________________________________________________________________________________\nconv2d_116 (Conv2D) (None, 1, 1, 76) 138700 lambda_30[0][0] \n__________________________________________________________________________________________________\nswish_88 (Swish) (None, 1, 1, 76) 0 conv2d_116[0][0] \n__________________________________________________________________________________________________\nconv2d_117 (Conv2D) (None, 1, 1, 1824) 140448 swish_88[0][0] \n__________________________________________________________________________________________________\nactivation_30 (Activation) (None, 1, 1, 1824) 0 conv2d_117[0][0] \n__________________________________________________________________________________________________\nmultiply_30 (Multiply) (None, 8, 8, 1824) 0 activation_30[0][0] \n swish_87[0][0] \n__________________________________________________________________________________________________\nconv2d_118 (Conv2D) (None, 8, 8, 304) 554496 multiply_30[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_88 (BatchNo (None, 8, 8, 304) 1216 conv2d_118[0][0] \n__________________________________________________________________________________________________\ndrop_connect_24 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_88[0][0] \n__________________________________________________________________________________________________\nadd_24 (Add) (None, 8, 8, 304) 0 drop_connect_24[0][0] \n add_23[0][0] \n__________________________________________________________________________________________________\nconv2d_119 (Conv2D) (None, 8, 8, 1824) 554496 add_24[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_89 (BatchNo (None, 8, 8, 1824) 7296 conv2d_119[0][0] \n__________________________________________________________________________________________________\nswish_89 (Swish) (None, 8, 8, 1824) 0 batch_normalization_89[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_31 (DepthwiseC (None, 8, 8, 1824) 45600 swish_89[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_90 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_31[0][0] \n__________________________________________________________________________________________________\nswish_90 (Swish) (None, 8, 8, 1824) 0 batch_normalization_90[0][0] \n__________________________________________________________________________________________________\nlambda_31 (Lambda) (None, 1, 1, 1824) 0 swish_90[0][0] \n__________________________________________________________________________________________________\nconv2d_120 (Conv2D) (None, 1, 1, 76) 138700 lambda_31[0][0] \n__________________________________________________________________________________________________\nswish_91 (Swish) (None, 1, 1, 76) 0 conv2d_120[0][0] \n__________________________________________________________________________________________________\nconv2d_121 (Conv2D) (None, 1, 1, 1824) 140448 swish_91[0][0] \n__________________________________________________________________________________________________\nactivation_31 (Activation) (None, 1, 1, 1824) 0 conv2d_121[0][0] \n__________________________________________________________________________________________________\nmultiply_31 (Multiply) (None, 8, 8, 1824) 0 activation_31[0][0] \n swish_90[0][0] \n__________________________________________________________________________________________________\nconv2d_122 (Conv2D) (None, 8, 8, 304) 554496 multiply_31[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_91 (BatchNo (None, 8, 8, 304) 1216 conv2d_122[0][0] \n__________________________________________________________________________________________________\ndrop_connect_25 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_91[0][0] \n__________________________________________________________________________________________________\nadd_25 (Add) (None, 8, 8, 304) 0 drop_connect_25[0][0] \n add_24[0][0] \n__________________________________________________________________________________________________\nconv2d_123 (Conv2D) (None, 8, 8, 1824) 554496 add_25[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_92 (BatchNo (None, 8, 8, 1824) 7296 conv2d_123[0][0] \n__________________________________________________________________________________________________\nswish_92 (Swish) (None, 8, 8, 1824) 0 batch_normalization_92[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_32 (DepthwiseC (None, 8, 8, 1824) 45600 swish_92[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_93 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_32[0][0] \n__________________________________________________________________________________________________\nswish_93 (Swish) (None, 8, 8, 1824) 0 batch_normalization_93[0][0] \n__________________________________________________________________________________________________\nlambda_32 (Lambda) (None, 1, 1, 1824) 0 swish_93[0][0] \n__________________________________________________________________________________________________\nconv2d_124 (Conv2D) (None, 1, 1, 76) 138700 lambda_32[0][0] \n__________________________________________________________________________________________________\nswish_94 (Swish) (None, 1, 1, 76) 0 conv2d_124[0][0] \n__________________________________________________________________________________________________\nconv2d_125 (Conv2D) (None, 1, 1, 1824) 140448 swish_94[0][0] \n__________________________________________________________________________________________________\nactivation_32 (Activation) (None, 1, 1, 1824) 0 conv2d_125[0][0] \n__________________________________________________________________________________________________\nmultiply_32 (Multiply) (None, 8, 8, 1824) 0 activation_32[0][0] \n swish_93[0][0] \n__________________________________________________________________________________________________\nconv2d_126 (Conv2D) (None, 8, 8, 304) 554496 multiply_32[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_94 (BatchNo (None, 8, 8, 304) 1216 conv2d_126[0][0] \n__________________________________________________________________________________________________\ndrop_connect_26 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_94[0][0] \n__________________________________________________________________________________________________\nadd_26 (Add) (None, 8, 8, 304) 0 drop_connect_26[0][0] \n add_25[0][0] \n__________________________________________________________________________________________________\nconv2d_127 (Conv2D) (None, 8, 8, 1824) 554496 add_26[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_95 (BatchNo (None, 8, 8, 1824) 7296 conv2d_127[0][0] \n__________________________________________________________________________________________________\nswish_95 (Swish) (None, 8, 8, 1824) 0 batch_normalization_95[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_33 (DepthwiseC (None, 8, 8, 1824) 45600 swish_95[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_96 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_33[0][0] \n__________________________________________________________________________________________________\nswish_96 (Swish) (None, 8, 8, 1824) 0 batch_normalization_96[0][0] \n__________________________________________________________________________________________________\nlambda_33 (Lambda) (None, 1, 1, 1824) 0 swish_96[0][0] \n__________________________________________________________________________________________________\nconv2d_128 (Conv2D) (None, 1, 1, 76) 138700 lambda_33[0][0] \n__________________________________________________________________________________________________\nswish_97 (Swish) (None, 1, 1, 76) 0 conv2d_128[0][0] \n__________________________________________________________________________________________________\nconv2d_129 (Conv2D) (None, 1, 1, 1824) 140448 swish_97[0][0] \n__________________________________________________________________________________________________\nactivation_33 (Activation) (None, 1, 1, 1824) 0 conv2d_129[0][0] \n__________________________________________________________________________________________________\nmultiply_33 (Multiply) (None, 8, 8, 1824) 0 activation_33[0][0] \n swish_96[0][0] \n__________________________________________________________________________________________________\nconv2d_130 (Conv2D) (None, 8, 8, 304) 554496 multiply_33[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_97 (BatchNo (None, 8, 8, 304) 1216 conv2d_130[0][0] \n__________________________________________________________________________________________________\ndrop_connect_27 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_97[0][0] \n__________________________________________________________________________________________________\nadd_27 (Add) (None, 8, 8, 304) 0 drop_connect_27[0][0] \n add_26[0][0] \n__________________________________________________________________________________________________\nconv2d_131 (Conv2D) (None, 8, 8, 1824) 554496 add_27[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_98 (BatchNo (None, 8, 8, 1824) 7296 conv2d_131[0][0] \n__________________________________________________________________________________________________\nswish_98 (Swish) (None, 8, 8, 1824) 0 batch_normalization_98[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_34 (DepthwiseC (None, 8, 8, 1824) 45600 swish_98[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_99 (BatchNo (None, 8, 8, 1824) 7296 depthwise_conv2d_34[0][0] \n__________________________________________________________________________________________________\nswish_99 (Swish) (None, 8, 8, 1824) 0 batch_normalization_99[0][0] \n__________________________________________________________________________________________________\nlambda_34 (Lambda) (None, 1, 1, 1824) 0 swish_99[0][0] \n__________________________________________________________________________________________________\nconv2d_132 (Conv2D) (None, 1, 1, 76) 138700 lambda_34[0][0] \n__________________________________________________________________________________________________\nswish_100 (Swish) (None, 1, 1, 76) 0 conv2d_132[0][0] \n__________________________________________________________________________________________________\nconv2d_133 (Conv2D) (None, 1, 1, 1824) 140448 swish_100[0][0] \n__________________________________________________________________________________________________\nactivation_34 (Activation) (None, 1, 1, 1824) 0 conv2d_133[0][0] \n__________________________________________________________________________________________________\nmultiply_34 (Multiply) (None, 8, 8, 1824) 0 activation_34[0][0] \n swish_99[0][0] \n__________________________________________________________________________________________________\nconv2d_134 (Conv2D) (None, 8, 8, 304) 554496 multiply_34[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_100 (BatchN (None, 8, 8, 304) 1216 conv2d_134[0][0] \n__________________________________________________________________________________________________\ndrop_connect_28 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_100[0][0] \n__________________________________________________________________________________________________\nadd_28 (Add) (None, 8, 8, 304) 0 drop_connect_28[0][0] \n add_27[0][0] \n__________________________________________________________________________________________________\nconv2d_135 (Conv2D) (None, 8, 8, 1824) 554496 add_28[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_101 (BatchN (None, 8, 8, 1824) 7296 conv2d_135[0][0] \n__________________________________________________________________________________________________\nswish_101 (Swish) (None, 8, 8, 1824) 0 batch_normalization_101[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_35 (DepthwiseC (None, 8, 8, 1824) 45600 swish_101[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_102 (BatchN (None, 8, 8, 1824) 7296 depthwise_conv2d_35[0][0] \n__________________________________________________________________________________________________\nswish_102 (Swish) (None, 8, 8, 1824) 0 batch_normalization_102[0][0] \n__________________________________________________________________________________________________\nlambda_35 (Lambda) (None, 1, 1, 1824) 0 swish_102[0][0] \n__________________________________________________________________________________________________\nconv2d_136 (Conv2D) (None, 1, 1, 76) 138700 lambda_35[0][0] \n__________________________________________________________________________________________________\nswish_103 (Swish) (None, 1, 1, 76) 0 conv2d_136[0][0] \n__________________________________________________________________________________________________\nconv2d_137 (Conv2D) (None, 1, 1, 1824) 140448 swish_103[0][0] \n__________________________________________________________________________________________________\nactivation_35 (Activation) (None, 1, 1, 1824) 0 conv2d_137[0][0] \n__________________________________________________________________________________________________\nmultiply_35 (Multiply) (None, 8, 8, 1824) 0 activation_35[0][0] \n swish_102[0][0] \n__________________________________________________________________________________________________\nconv2d_138 (Conv2D) (None, 8, 8, 304) 554496 multiply_35[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_103 (BatchN (None, 8, 8, 304) 1216 conv2d_138[0][0] \n__________________________________________________________________________________________________\ndrop_connect_29 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_103[0][0] \n__________________________________________________________________________________________________\nadd_29 (Add) (None, 8, 8, 304) 0 drop_connect_29[0][0] \n add_28[0][0] \n__________________________________________________________________________________________________\nconv2d_139 (Conv2D) (None, 8, 8, 1824) 554496 add_29[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_104 (BatchN (None, 8, 8, 1824) 7296 conv2d_139[0][0] \n__________________________________________________________________________________________________\nswish_104 (Swish) (None, 8, 8, 1824) 0 batch_normalization_104[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_36 (DepthwiseC (None, 8, 8, 1824) 45600 swish_104[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_105 (BatchN (None, 8, 8, 1824) 7296 depthwise_conv2d_36[0][0] \n__________________________________________________________________________________________________\nswish_105 (Swish) (None, 8, 8, 1824) 0 batch_normalization_105[0][0] \n__________________________________________________________________________________________________\nlambda_36 (Lambda) (None, 1, 1, 1824) 0 swish_105[0][0] \n__________________________________________________________________________________________________\nconv2d_140 (Conv2D) (None, 1, 1, 76) 138700 lambda_36[0][0] \n__________________________________________________________________________________________________\nswish_106 (Swish) (None, 1, 1, 76) 0 conv2d_140[0][0] \n__________________________________________________________________________________________________\nconv2d_141 (Conv2D) (None, 1, 1, 1824) 140448 swish_106[0][0] \n__________________________________________________________________________________________________\nactivation_36 (Activation) (None, 1, 1, 1824) 0 conv2d_141[0][0] \n__________________________________________________________________________________________________\nmultiply_36 (Multiply) (None, 8, 8, 1824) 0 activation_36[0][0] \n swish_105[0][0] \n__________________________________________________________________________________________________\nconv2d_142 (Conv2D) (None, 8, 8, 304) 554496 multiply_36[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_106 (BatchN (None, 8, 8, 304) 1216 conv2d_142[0][0] \n__________________________________________________________________________________________________\ndrop_connect_30 (DropConnect) (None, 8, 8, 304) 0 batch_normalization_106[0][0] \n__________________________________________________________________________________________________\nadd_30 (Add) (None, 8, 8, 304) 0 drop_connect_30[0][0] \n add_29[0][0] \n__________________________________________________________________________________________________\nconv2d_143 (Conv2D) (None, 8, 8, 1824) 554496 add_30[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_107 (BatchN (None, 8, 8, 1824) 7296 conv2d_143[0][0] \n__________________________________________________________________________________________________\nswish_107 (Swish) (None, 8, 8, 1824) 0 batch_normalization_107[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_37 (DepthwiseC (None, 8, 8, 1824) 16416 swish_107[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_108 (BatchN (None, 8, 8, 1824) 7296 depthwise_conv2d_37[0][0] \n__________________________________________________________________________________________________\nswish_108 (Swish) (None, 8, 8, 1824) 0 batch_normalization_108[0][0] \n__________________________________________________________________________________________________\nlambda_37 (Lambda) (None, 1, 1, 1824) 0 swish_108[0][0] \n__________________________________________________________________________________________________\nconv2d_144 (Conv2D) (None, 1, 1, 76) 138700 lambda_37[0][0] \n__________________________________________________________________________________________________\nswish_109 (Swish) (None, 1, 1, 76) 0 conv2d_144[0][0] \n__________________________________________________________________________________________________\nconv2d_145 (Conv2D) (None, 1, 1, 1824) 140448 swish_109[0][0] \n__________________________________________________________________________________________________\nactivation_37 (Activation) (None, 1, 1, 1824) 0 conv2d_145[0][0] \n__________________________________________________________________________________________________\nmultiply_37 (Multiply) (None, 8, 8, 1824) 0 activation_37[0][0] \n swish_108[0][0] \n__________________________________________________________________________________________________\nconv2d_146 (Conv2D) (None, 8, 8, 512) 933888 multiply_37[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_109 (BatchN (None, 8, 8, 512) 2048 conv2d_146[0][0] \n__________________________________________________________________________________________________\nconv2d_147 (Conv2D) (None, 8, 8, 3072) 1572864 batch_normalization_109[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_110 (BatchN (None, 8, 8, 3072) 12288 conv2d_147[0][0] \n__________________________________________________________________________________________________\nswish_110 (Swish) (None, 8, 8, 3072) 0 batch_normalization_110[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_38 (DepthwiseC (None, 8, 8, 3072) 27648 swish_110[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_111 (BatchN (None, 8, 8, 3072) 12288 depthwise_conv2d_38[0][0] \n__________________________________________________________________________________________________\nswish_111 (Swish) (None, 8, 8, 3072) 0 batch_normalization_111[0][0] \n__________________________________________________________________________________________________\nlambda_38 (Lambda) (None, 1, 1, 3072) 0 swish_111[0][0] \n__________________________________________________________________________________________________\nconv2d_148 (Conv2D) (None, 1, 1, 128) 393344 lambda_38[0][0] \n__________________________________________________________________________________________________\nswish_112 (Swish) (None, 1, 1, 128) 0 conv2d_148[0][0] \n__________________________________________________________________________________________________\nconv2d_149 (Conv2D) (None, 1, 1, 3072) 396288 swish_112[0][0] \n__________________________________________________________________________________________________\nactivation_38 (Activation) (None, 1, 1, 3072) 0 conv2d_149[0][0] \n__________________________________________________________________________________________________\nmultiply_38 (Multiply) (None, 8, 8, 3072) 0 activation_38[0][0] \n swish_111[0][0] \n__________________________________________________________________________________________________\nconv2d_150 (Conv2D) (None, 8, 8, 512) 1572864 multiply_38[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_112 (BatchN (None, 8, 8, 512) 2048 conv2d_150[0][0] \n__________________________________________________________________________________________________\ndrop_connect_31 (DropConnect) (None, 8, 8, 512) 0 batch_normalization_112[0][0] \n__________________________________________________________________________________________________\nadd_31 (Add) (None, 8, 8, 512) 0 drop_connect_31[0][0] \n batch_normalization_109[0][0] \n__________________________________________________________________________________________________\nconv2d_151 (Conv2D) (None, 8, 8, 3072) 1572864 add_31[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_113 (BatchN (None, 8, 8, 3072) 12288 conv2d_151[0][0] \n__________________________________________________________________________________________________\nswish_113 (Swish) (None, 8, 8, 3072) 0 batch_normalization_113[0][0] \n__________________________________________________________________________________________________\ndepthwise_conv2d_39 (DepthwiseC (None, 8, 8, 3072) 27648 swish_113[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_114 (BatchN (None, 8, 8, 3072) 12288 depthwise_conv2d_39[0][0] \n__________________________________________________________________________________________________\nswish_114 (Swish) (None, 8, 8, 3072) 0 batch_normalization_114[0][0] \n__________________________________________________________________________________________________\nlambda_39 (Lambda) (None, 1, 1, 3072) 0 swish_114[0][0] \n__________________________________________________________________________________________________\nconv2d_152 (Conv2D) (None, 1, 1, 128) 393344 lambda_39[0][0] \n__________________________________________________________________________________________________\nswish_115 (Swish) (None, 1, 1, 128) 0 conv2d_152[0][0] \n__________________________________________________________________________________________________\nconv2d_153 (Conv2D) (None, 1, 1, 3072) 396288 swish_115[0][0] \n__________________________________________________________________________________________________\nactivation_39 (Activation) (None, 1, 1, 3072) 0 conv2d_153[0][0] \n__________________________________________________________________________________________________\nmultiply_39 (Multiply) (None, 8, 8, 3072) 0 activation_39[0][0] \n swish_114[0][0] \n__________________________________________________________________________________________________\nconv2d_154 (Conv2D) (None, 8, 8, 512) 1572864 multiply_39[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_115 (BatchN (None, 8, 8, 512) 2048 conv2d_154[0][0] \n__________________________________________________________________________________________________\ndrop_connect_32 (DropConnect) (None, 8, 8, 512) 0 batch_normalization_115[0][0] \n__________________________________________________________________________________________________\nadd_32 (Add) (None, 8, 8, 512) 0 drop_connect_32[0][0] \n add_31[0][0] \n__________________________________________________________________________________________________\nconv2d_155 (Conv2D) (None, 8, 8, 2048) 1048576 add_32[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_116 (BatchN (None, 8, 8, 2048) 8192 conv2d_155[0][0] \n__________________________________________________________________________________________________\nswish_116 (Swish) (None, 8, 8, 2048) 0 batch_normalization_116[0][0] \n__________________________________________________________________________________________________\nglobal_average_pooling2d_1 (Glo (None, 2048) 0 swish_116[0][0] \n__________________________________________________________________________________________________\nfinal_output (Dense) (None, 1) 2049 global_average_pooling2d_1[0][0] \n==================================================================================================\nTotal params: 28,515,569\nTrainable params: 28,342,833\nNon-trainable params: 172,736\n__________________________________________________________________________________________________\n" ], [ "history = model.fit_generator(generator=train_generator,\n steps_per_epoch=STEP_SIZE_TRAIN,\n validation_data=valid_generator,\n validation_steps=STEP_SIZE_VALID,\n epochs=EPOCHS,\n callbacks=callback_list,\n verbose=2).history", "Epoch 1/10\n - 172s - loss: 0.4699 - acc: 0.6831 - val_loss: 0.4170 - val_acc: 0.7615\nEpoch 2/10\n - 119s - loss: 0.3443 - acc: 0.7312 - val_loss: 0.3200 - val_acc: 0.7852\nEpoch 3/10\n - 119s - loss: 0.3618 - acc: 0.7391 - val_loss: 0.3073 - val_acc: 0.7392\nEpoch 4/10\n - 118s - loss: 0.2940 - acc: 0.7572 - val_loss: 0.3226 - val_acc: 0.7964\nEpoch 5/10\n - 118s - loss: 0.2552 - acc: 0.7828 - val_loss: 0.3066 - val_acc: 0.7462\nEpoch 6/10\n - 118s - loss: 0.2261 - acc: 0.7924 - val_loss: 0.3077 - val_acc: 0.7922\nEpoch 7/10\n - 118s - loss: 0.2125 - acc: 0.8105 - val_loss: 0.2614 - val_acc: 0.8215\nEpoch 8/10\n - 118s - loss: 0.1792 - acc: 0.8303 - val_loss: 0.2636 - val_acc: 0.7936\nEpoch 9/10\n - 118s - loss: 0.1606 - acc: 0.8490 - val_loss: 0.2898 - val_acc: 0.8006\nEpoch 10/10\n - 118s - loss: 0.1440 - acc: 0.8610 - val_loss: 0.2561 - val_acc: 0.8173\n" ], [ "fig, (ax1, ax2) = plt.subplots(2, 1, sharex='col', figsize=(20, 6))\n\nax1.plot(cosine_lr_1st.learning_rates)\nax1.set_title('Warm up learning rates')\n\nax2.plot(cosine_lr_2nd.learning_rates)\nax2.set_title('Fine-tune learning rates')\n\nplt.xlabel('Steps')\nplt.ylabel('Learning rate')\nsns.despine()\nplt.show()", "_____no_output_____" ] ], [ [ "# Model loss graph ", "_____no_output_____" ] ], [ [ "fig, (ax1, ax2) = plt.subplots(2, 1, sharex='col', figsize=(20, 14))\n\nax1.plot(history['loss'], label='Train loss')\nax1.plot(history['val_loss'], label='Validation loss')\nax1.legend(loc='best')\nax1.set_title('Loss')\n\nax2.plot(history['acc'], label='Train accuracy')\nax2.plot(history['val_acc'], label='Validation accuracy')\nax2.legend(loc='best')\nax2.set_title('Accuracy')\n\nplt.xlabel('Epochs')\nsns.despine()\nplt.show()", "_____no_output_____" ], [ "# Create empty arays to keep the predictions and labels\ndf_preds = pd.DataFrame(columns=['label', 'pred', 'set'])\ntrain_generator.reset()\nvalid_generator.reset()\n\n# Add train predictions and labels\nfor i in range(STEP_SIZE_TRAIN + 1):\n im, lbl = next(train_generator)\n preds = model.predict(im, batch_size=train_generator.batch_size)\n for index in range(len(preds)):\n df_preds.loc[len(df_preds)] = [lbl[index], preds[index][0], 'train']\n\n# Add validation predictions and labels\nfor i in range(STEP_SIZE_VALID + 1):\n im, lbl = next(valid_generator)\n preds = model.predict(im, batch_size=valid_generator.batch_size)\n for index in range(len(preds)):\n df_preds.loc[len(df_preds)] = [lbl[index], preds[index][0], 'validation']\n\ndf_preds['label'] = df_preds['label'].astype('int')", "_____no_output_____" ], [ "def classify(x):\n if x < 0.5:\n return 0\n elif x < 1.5:\n return 1\n elif x < 2.5:\n return 2\n elif x < 3.5:\n return 3\n return 4\n\n# Classify predictions\ndf_preds['predictions'] = df_preds['pred'].apply(lambda x: classify(x))\n\ntrain_preds = df_preds[df_preds['set'] == 'train']\nvalidation_preds = df_preds[df_preds['set'] == 'validation']", "_____no_output_____" ] ], [ [ "# Model Evaluation", "_____no_output_____" ], [ "## Confusion Matrix\n\n### Original thresholds", "_____no_output_____" ] ], [ [ "labels = ['0 - No DR', '1 - Mild', '2 - Moderate', '3 - Severe', '4 - Proliferative DR']\ndef plot_confusion_matrix(train, validation, labels=labels):\n train_labels, train_preds = train\n validation_labels, validation_preds = validation\n fig, (ax1, ax2) = plt.subplots(1, 2, sharex='col', figsize=(24, 7))\n train_cnf_matrix = confusion_matrix(train_labels, train_preds)\n validation_cnf_matrix = confusion_matrix(validation_labels, validation_preds)\n\n train_cnf_matrix_norm = train_cnf_matrix.astype('float') / train_cnf_matrix.sum(axis=1)[:, np.newaxis]\n validation_cnf_matrix_norm = validation_cnf_matrix.astype('float') / validation_cnf_matrix.sum(axis=1)[:, np.newaxis]\n\n train_df_cm = pd.DataFrame(train_cnf_matrix_norm, index=labels, columns=labels)\n validation_df_cm = pd.DataFrame(validation_cnf_matrix_norm, index=labels, columns=labels)\n\n sns.heatmap(train_df_cm, annot=True, fmt='.2f', cmap=\"Blues\",ax=ax1).set_title('Train')\n sns.heatmap(validation_df_cm, annot=True, fmt='.2f', cmap=sns.cubehelix_palette(8),ax=ax2).set_title('Validation')\n plt.show()\n\nplot_confusion_matrix((train_preds['label'], train_preds['predictions']), (validation_preds['label'], validation_preds['predictions']))", "_____no_output_____" ] ], [ [ "## Quadratic Weighted Kappa", "_____no_output_____" ] ], [ [ "def evaluate_model(train, validation):\n train_labels, train_preds = train\n validation_labels, validation_preds = validation\n print(\"Train Cohen Kappa score: %.3f\" % cohen_kappa_score(train_preds, train_labels, weights='quadratic'))\n print(\"Validation Cohen Kappa score: %.3f\" % cohen_kappa_score(validation_preds, validation_labels, weights='quadratic'))\n print(\"Complete set Cohen Kappa score: %.3f\" % cohen_kappa_score(np.append(train_preds, validation_preds), np.append(train_labels, validation_labels), weights='quadratic'))\n \nevaluate_model((train_preds['label'], train_preds['predictions']), (validation_preds['label'], validation_preds['predictions']))", "Train Cohen Kappa score: 0.964\nValidation Cohen Kappa score: 0.903\nComplete set Cohen Kappa score: 0.952\n" ] ], [ [ "## Apply model to test set and output predictions", "_____no_output_____" ] ], [ [ "def apply_tta(model, generator, steps=10):\n step_size = generator.n//generator.batch_size\n preds_tta = []\n for i in range(steps):\n generator.reset()\n preds = model.predict_generator(generator, steps=step_size)\n preds_tta.append(preds)\n\n return np.mean(preds_tta, axis=0)\n\npreds = apply_tta(model, test_generator, TTA_STEPS)\npredictions = [classify(x) for x in preds]\n\nresults = pd.DataFrame({'id_code':test['id_code'], 'diagnosis':predictions})\nresults['id_code'] = results['id_code'].map(lambda x: str(x)[:-4])", "_____no_output_____" ], [ "# Cleaning created directories\nif os.path.exists(train_dest_path):\n shutil.rmtree(train_dest_path)\nif os.path.exists(validation_dest_path):\n shutil.rmtree(validation_dest_path)\nif os.path.exists(test_dest_path):\n shutil.rmtree(test_dest_path)", "_____no_output_____" ] ], [ [ "# Predictions class distribution", "_____no_output_____" ] ], [ [ "fig = plt.subplots(sharex='col', figsize=(24, 8.7))\nsns.countplot(x=\"diagnosis\", data=results, palette=\"GnBu_d\").set_title('Test')\nsns.despine()\nplt.show()", "_____no_output_____" ], [ "results.to_csv('submission.csv', index=False)\ndisplay(results.head())", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
4aa32ec0504e4c55a58eab3bb3db54e2f36495d4
277,348
ipynb
Jupyter Notebook
1_mosaic_data_attention_experiments/3_stage_wise_training/alternate_minimization/theory/type0_post_mortem/codes/linear_linear_init0_sgdmomentum_simultaneous.ipynb
lnpandey/DL_explore_synth_data
0a5d8b417091897f4c7f358377d5198a155f3f24
[ "MIT" ]
2
2019-08-24T07:20:35.000Z
2020-03-27T08:16:59.000Z
1_mosaic_data_attention_experiments/3_stage_wise_training/alternate_minimization/theory/type0_post_mortem/codes/linear_linear_init0_sgdmomentum_simultaneous.ipynb
lnpandey/DL_explore_synth_data
0a5d8b417091897f4c7f358377d5198a155f3f24
[ "MIT" ]
null
null
null
1_mosaic_data_attention_experiments/3_stage_wise_training/alternate_minimization/theory/type0_post_mortem/codes/linear_linear_init0_sgdmomentum_simultaneous.ipynb
lnpandey/DL_explore_synth_data
0a5d8b417091897f4c7f358377d5198a155f3f24
[ "MIT" ]
3
2019-06-21T09:34:32.000Z
2019-09-19T10:43:07.000Z
83.48826
36,982
0.7127
[ [ [ "import numpy as np\nimport pandas as pd\nfrom matplotlib import pyplot as plt\nfrom tqdm import tqdm\n%matplotlib inline\nfrom torch.utils.data import Dataset, DataLoader\nimport torch\nimport torchvision\n\nimport torch.nn as nn\nimport torch.optim as optim\nfrom torch.nn import functional as F\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nprint(device)", "cuda\n" ] ], [ [ "# Generate dataset", "_____no_output_____" ] ], [ [ "y = np.random.randint(0,3,500)\nidx= []\nfor i in range(3):\n print(i,sum(y==i))\n idx.append(y==i)", "0 160\n1 165\n2 175\n" ], [ "x = np.zeros((500,))", "_____no_output_____" ], [ "np.random.seed(12)\nx[idx[0]] = np.random.uniform(low =-1,high =0,size= sum(idx[0]))\nx[idx[1]] = np.random.uniform(low =0,high =1,size= sum(idx[1]))\nx[idx[2]] = np.random.uniform(low =2,high =3,size= sum(idx[2]))", "_____no_output_____" ], [ "x[idx[0]][0], x[idx[2]][5] ", "_____no_output_____" ], [ "print(x.shape,y.shape)", "(500,) (500,)\n" ], [ "idx= []\nfor i in range(3):\n idx.append(y==i)", "_____no_output_____" ], [ "for i in range(3):\n y= np.zeros(x[idx[i]].shape[0])\n plt.scatter(x[idx[i]],y,label=\"class_\"+str(i))\nplt.legend()", "_____no_output_____" ], [ "bg_idx = [ np.where(idx[2] == True)[0]]\n\nbg_idx = np.concatenate(bg_idx, axis = 0)\nbg_idx.shape", "_____no_output_____" ], [ "np.unique(bg_idx).shape", "_____no_output_____" ], [ "x = x - np.mean(x[bg_idx], axis = 0, keepdims = True)\n", "_____no_output_____" ], [ "np.mean(x[bg_idx], axis = 0, keepdims = True), np.mean(x, axis = 0, keepdims = True)", "_____no_output_____" ], [ "x = x/np.std(x[bg_idx], axis = 0, keepdims = True)", "_____no_output_____" ], [ "np.std(x[bg_idx], axis = 0, keepdims = True), np.std(x, axis = 0, keepdims = True)", "_____no_output_____" ], [ "for i in range(3):\n y= np.zeros(x[idx[i]].shape[0])\n plt.scatter(x[idx[i]],y,label=\"class_\"+str(i))\nplt.legend()", "_____no_output_____" ], [ "foreground_classes = {'class_0','class_1' }\n\nbackground_classes = {'class_2'}", "_____no_output_____" ], [ "fg_class = np.random.randint(0,2)\nfg_idx = np.random.randint(0,9)\n\na = []\nfor i in range(9):\n if i == fg_idx:\n b = np.random.choice(np.where(idx[fg_class]==True)[0],size=1)\n a.append(x[b])\n print(\"foreground \"+str(fg_class)+\" present at \" + str(fg_idx))\n else:\n bg_class = np.random.randint(2,3)\n b = np.random.choice(np.where(idx[bg_class]==True)[0],size=1)\n a.append(x[b])\n print(\"background \"+str(bg_class)+\" present at \" + str(i))\na = np.concatenate(a,axis=0)\nprint(a.shape)\n\nprint(fg_class , fg_idx)", "background 2 present at 0\nbackground 2 present at 1\nbackground 2 present at 2\nbackground 2 present at 3\nbackground 2 present at 4\nbackground 2 present at 5\nbackground 2 present at 6\nbackground 2 present at 7\nforeground 1 present at 8\n(9,)\n1 8\n" ], [ "a.shape", "_____no_output_____" ], [ "np.reshape(a,(9,1))", "_____no_output_____" ], [ "a=np.reshape(a,(3,3))", "_____no_output_____" ], [ "plt.imshow(a)", "_____no_output_____" ], [ "desired_num = 2000\nmosaic_list_of_images =[]\nmosaic_label = []\nfore_idx=[]\nfor j in range(desired_num):\n np.random.seed(j)\n fg_class = np.random.randint(0,2)\n fg_idx = 0\n a = []\n for i in range(9):\n if i == fg_idx:\n b = np.random.choice(np.where(idx[fg_class]==True)[0],size=1)\n a.append(x[b])\n# print(\"foreground \"+str(fg_class)+\" present at \" + str(fg_idx))\n else:\n bg_class = np.random.randint(2,3)\n b = np.random.choice(np.where(idx[bg_class]==True)[0],size=1)\n a.append(x[b])\n# print(\"background \"+str(bg_class)+\" present at \" + str(i))\n a = np.concatenate(a,axis=0)\n mosaic_list_of_images.append(np.reshape(a,(9,1)))\n mosaic_label.append(fg_class)\n fore_idx.append(fg_idx)", "_____no_output_____" ], [ "mosaic_list_of_images = np.concatenate(mosaic_list_of_images,axis=1).T\n", "_____no_output_____" ], [ "mosaic_list_of_images.shape, mosaic_list_of_images[0]", "_____no_output_____" ], [ "for j in range(9):\n print(mosaic_list_of_images[0][j])\n ", "-10.468070470054734\n1.0565004769628443\n1.278100178112481\n-0.017251837431010407\n1.598543342240201\n-0.8863798117059832\n-0.45938889515789527\n-1.531628222899254\n1.3226618426278711\n" ], [ "class MosaicDataset(Dataset):\n \"\"\"MosaicDataset dataset.\"\"\"\n\n def __init__(self, mosaic_list_of_images, mosaic_label, fore_idx):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.mosaic = mosaic_list_of_images\n self.label = mosaic_label\n self.fore_idx = fore_idx\n\n def __len__(self):\n return len(self.label)\n\n def __getitem__(self, idx):\n return self.mosaic[idx] , self.label[idx], self.fore_idx[idx]\n\n", "_____no_output_____" ], [ "batch = 250\nmsd1 = MosaicDataset(mosaic_list_of_images[0:1000], mosaic_label[0:1000] , fore_idx[0:1000])\ntrain_loader = DataLoader( msd1 ,batch_size= batch ,shuffle=True)", "_____no_output_____" ], [ "batch = 250\nmsd2 = MosaicDataset(mosaic_list_of_images[1000:2000], mosaic_label[1000:2000] , fore_idx[1000:2000])\ntest_loader = DataLoader( msd2 ,batch_size= batch ,shuffle=True)", "_____no_output_____" ], [ "class Focus(nn.Module):\n def __init__(self):\n super(Focus, self).__init__()\n\n self.fc1 = nn.Linear(1, 1)\n # self.fc2 = nn.Linear(2, 1)\n\n def forward(self,z): #y is avg image #z batch of list of 9 images\n y = torch.zeros([batch], dtype=torch.float64)\n x = torch.zeros([batch,9],dtype=torch.float64)\n y = y.to(\"cuda\")\n x = x.to(\"cuda\")\n # print(x.shape, z.shape)\n for i in range(9):\n # print(z[:,i].shape)\n # print(self.helper(z[:,i])[:,0].shape)\n x[:,i] = self.helper(z[:,i])[:,0]\n # print(x.shape, z.shape)\n x = F.softmax(x,dim=1)\n # print(x.shape, z.shape)\n # x1 = x[:,0]\n # print(torch.mul(x[:,0],z[:,0]).shape)\n\n for i in range(9): \n # x1 = x[:,i] \n y = y + torch.mul(x[:,i],z[:,i])\n\n # print(x.shape, y.shape)\n return x, y\n \n def helper(self, x):\n x = x.view(-1, 1)\n # x = F.relu(self.fc1(x))\n x = (self.fc1(x))\n\n return x", "_____no_output_____" ], [ "class Classification(nn.Module):\n def __init__(self):\n super(Classification, self).__init__()\n self.fc1 = nn.Linear(1, 2)\n\n\n def forward(self, x):\n x = x.view(-1, 1)\n x = self.fc1(x)\n # print(x.shape)\n return x", "_____no_output_____" ], [ "torch.manual_seed(12)\nfocus_net = Focus().double()\nfocus_net = focus_net.to(\"cuda\")", "_____no_output_____" ], [ "torch.manual_seed(12)\nclassify = Classification().double()\nclassify = classify.to(\"cuda\")", "_____no_output_____" ], [ "focus_net.fc1.weight, focus_net.fc1.bias", "_____no_output_____" ], [ "classify.fc1.weight, classify.fc1.bias", "_____no_output_____" ], [ "focus_net.fc1.weight = torch.nn.Parameter(torch.tensor(np.array([[0.0]])))\nfocus_net.fc1.bias = torch.nn.Parameter(torch.tensor(np.array([0.0])))\nfocus_net.fc1.weight, focus_net.fc1.bias", "_____no_output_____" ], [ "classify.fc1.weight = torch.nn.Parameter(torch.tensor(np.array([[0.0],[0.0]])))\nclassify.fc1.bias = torch.nn.Parameter(torch.tensor(np.array([0.0, 0.0])))\nclassify.fc1.weight, classify.fc1.bias", "_____no_output_____" ], [ "focus_net = focus_net.to(\"cuda\")\nclassify = classify.to(\"cuda\")", "_____no_output_____" ], [ "focus_net.fc1.weight, focus_net.fc1.bias", "_____no_output_____" ], [ "classify.fc1.weight, classify.fc1.bias", "_____no_output_____" ], [ "import torch.optim as optim\ncriterion = nn.CrossEntropyLoss()\noptimizer_classify = optim.SGD(classify.parameters(), lr=0.01, momentum=0.9)\noptimizer_focus = optim.SGD(focus_net.parameters(), lr=0.01, momentum=0.9)\n\n# optimizer_classify = optim.Adam(classify.parameters(), lr=0.01)\n# optimizer_focus = optim.Adam(focus_net.parameters(), lr=0.01)", "_____no_output_____" ], [ "col1=[]\ncol2=[]\ncol3=[]\ncol4=[]\ncol5=[]\ncol6=[]\ncol7=[]\ncol8=[]\ncol9=[]\ncol10=[]\ncol11=[]\ncol12=[]\ncol13=[]", "_____no_output_____" ], [ "correct = 0\ntotal = 0\ncount = 0\nflag = 1\nfocus_true_pred_true =0\nfocus_false_pred_true =0\nfocus_true_pred_false =0\nfocus_false_pred_false =0\n\nargmax_more_than_half = 0\nargmax_less_than_half =0\n\nwith torch.no_grad():\n for data in train_loader:\n inputs, labels , fore_idx = data\n inputs = inputs.double()\n inputs, labels , fore_idx = inputs.to(\"cuda\"),labels.to(\"cuda\"), fore_idx.to(\"cuda\")\n alphas, avg_images = focus_net(inputs)\n outputs = classify(avg_images)\n # print(outputs.shape)\n _, predicted = torch.max(outputs.data, 1)\n # print(predicted.shape)\n\n for j in range(labels.size(0)):\n count += 1\n focus = torch.argmax(alphas[j])\n if alphas[j][focus] >= 0.5 :\n argmax_more_than_half += 1\n else:\n argmax_less_than_half += 1\n\n # print(focus, fore_idx[j], predicted[j])\n if(focus == fore_idx[j] and predicted[j] == labels[j]):\n focus_true_pred_true += 1\n elif(focus != fore_idx[j] and predicted[j] == labels[j]):\n focus_false_pred_true += 1\n elif(focus == fore_idx[j] and predicted[j] != labels[j]):\n focus_true_pred_false += 1\n elif(focus != fore_idx[j] and predicted[j] != labels[j]):\n focus_false_pred_false += 1\n\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n\nprint('Accuracy of the network on the 1000 train images: %d %%' % ( 100 * correct / total))\nprint(\"total correct\", correct)\nprint(\"total train set images\", total)\n\nprint(\"focus_true_pred_true %d =============> FTPT : %d %%\" % (focus_true_pred_true , (100 * focus_true_pred_true / total) ) )\nprint(\"focus_false_pred_true %d =============> FFPT : %d %%\" % (focus_false_pred_true, (100 * focus_false_pred_true / total) ) )\nprint(\"focus_true_pred_false %d =============> FTPF : %d %%\" %( focus_true_pred_false , ( 100 * focus_true_pred_false / total) ) )\nprint(\"focus_false_pred_false %d =============> FFPF : %d %%\" % (focus_false_pred_false, ( 100 * focus_false_pred_false / total) ) )\n\nprint(\"argmax_more_than_half ==================> \",argmax_more_than_half)\nprint(\"argmax_less_than_half ==================> \",argmax_less_than_half)\nprint(count)\n\nprint(\"=\"*100)\n\ncol1.append(0)\ncol2.append(argmax_more_than_half)\ncol3.append(argmax_less_than_half)\ncol4.append(focus_true_pred_true)\ncol5.append(focus_false_pred_true)\ncol6.append(focus_true_pred_false)\ncol7.append(focus_false_pred_false)", "Accuracy of the network on the 1000 train images: 49 %\ntotal correct 493\ntotal train set images 1000\nfocus_true_pred_true 493 =============> FTPT : 49 %\nfocus_false_pred_true 0 =============> FFPT : 0 %\nfocus_true_pred_false 507 =============> FTPF : 50 %\nfocus_false_pred_false 0 =============> FFPF : 0 %\nargmax_more_than_half ==================> 0\nargmax_less_than_half ==================> 1000\n1000\n====================================================================================================\n" ], [ "correct = 0\ntotal = 0\ncount = 0\nflag = 1\nfocus_true_pred_true =0\nfocus_false_pred_true =0\nfocus_true_pred_false =0\nfocus_false_pred_false =0\n\nargmax_more_than_half = 0\nargmax_less_than_half =0\n\nwith torch.no_grad():\n for data in test_loader:\n inputs, labels , fore_idx = data\n inputs = inputs.double()\n inputs, labels , fore_idx = inputs.to(\"cuda\"),labels.to(\"cuda\"), fore_idx.to(\"cuda\")\n alphas, avg_images = focus_net(inputs)\n outputs = classify(avg_images)\n\n _, predicted = torch.max(outputs.data, 1)\n\n for j in range(labels.size(0)):\n focus = torch.argmax(alphas[j])\n if alphas[j][focus] >= 0.5 :\n argmax_more_than_half += 1\n else:\n argmax_less_than_half += 1\n\n if(focus == fore_idx[j] and predicted[j] == labels[j]):\n focus_true_pred_true += 1\n elif(focus != fore_idx[j] and predicted[j] == labels[j]):\n focus_false_pred_true += 1\n elif(focus == fore_idx[j] and predicted[j] != labels[j]):\n focus_true_pred_false += 1\n elif(focus != fore_idx[j] and predicted[j] != labels[j]):\n focus_false_pred_false += 1\n\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n\nprint('Accuracy of the network on the 1000 test images: %d %%' % (\n 100 * correct / total))\nprint(\"total correct\", correct)\nprint(\"total train set images\", total)\n\nprint(\"focus_true_pred_true %d =============> FTPT : %d %%\" % (focus_true_pred_true , (100 * focus_true_pred_true / total) ) )\nprint(\"focus_false_pred_true %d =============> FFPT : %d %%\" % (focus_false_pred_true, (100 * focus_false_pred_true / total) ) )\nprint(\"focus_true_pred_false %d =============> FTPF : %d %%\" %( focus_true_pred_false , ( 100 * focus_true_pred_false / total) ) )\nprint(\"focus_false_pred_false %d =============> FFPF : %d %%\" % (focus_false_pred_false, ( 100 * focus_false_pred_false / total) ) )\n\nprint(\"argmax_more_than_half ==================> \",argmax_more_than_half)\nprint(\"argmax_less_than_half ==================> \",argmax_less_than_half)\ncol8.append(argmax_more_than_half)\ncol9.append(argmax_less_than_half)\ncol10.append(focus_true_pred_true)\ncol11.append(focus_false_pred_true)\ncol12.append(focus_true_pred_false)\ncol13.append(focus_false_pred_false)", "Accuracy of the network on the 1000 test images: 51 %\ntotal correct 519\ntotal train set images 1000\nfocus_true_pred_true 519 =============> FTPT : 51 %\nfocus_false_pred_true 0 =============> FFPT : 0 %\nfocus_true_pred_false 481 =============> FTPF : 48 %\nfocus_false_pred_false 0 =============> FFPF : 0 %\nargmax_more_than_half ==================> 0\nargmax_less_than_half ==================> 1000\n" ], [ "nos_epochs = 1000\nfocus_true_pred_true =0\nfocus_false_pred_true =0\nfocus_true_pred_false =0\nfocus_false_pred_false =0\n\nargmax_more_than_half = 0\nargmax_less_than_half =0\n\n\nfor epoch in range(nos_epochs): # loop over the dataset multiple times\n\n focus_true_pred_true =0\n focus_false_pred_true =0\n focus_true_pred_false =0\n focus_false_pred_false =0\n \n argmax_more_than_half = 0\n argmax_less_than_half =0\n \n running_loss = 0.0\n epoch_loss = []\n cnt=0\n\n iteration = desired_num // batch\n \n #training data set\n \n for i, data in enumerate(train_loader):\n inputs , labels , fore_idx = data\n inputs, labels = inputs.to(\"cuda\"), labels.to(\"cuda\")\n inputs = inputs.double()\n # zero the parameter gradients\n \n optimizer_focus.zero_grad()\n optimizer_classify.zero_grad()\n \n alphas, avg_images = focus_net(inputs)\n outputs = classify(avg_images)\n\n _, predicted = torch.max(outputs.data, 1)\n# print(outputs)\n# print(outputs.shape,labels.shape , torch.argmax(outputs, dim=1))\n\n loss = criterion(outputs, labels) \n loss.backward()\n optimizer_focus.step()\n optimizer_classify.step()\n\n running_loss += loss.item()\n mini = 3\n if cnt % mini == mini-1: # print every 40 mini-batches\n print('[%d, %5d] loss: %.3f' %(epoch + 1, cnt + 1, running_loss / mini))\n epoch_loss.append(running_loss/mini)\n running_loss = 0.0\n cnt=cnt+1\n \n if epoch % 5 == 0:\n for j in range (batch):\n focus = torch.argmax(alphas[j])\n\n if(alphas[j][focus] >= 0.5):\n argmax_more_than_half +=1\n else:\n argmax_less_than_half +=1\n\n if(focus == fore_idx[j] and predicted[j] == labels[j]):\n focus_true_pred_true += 1\n\n elif(focus != fore_idx[j] and predicted[j] == labels[j]):\n focus_false_pred_true +=1\n\n elif(focus == fore_idx[j] and predicted[j] != labels[j]):\n focus_true_pred_false +=1\n\n elif(focus != fore_idx[j] and predicted[j] != labels[j]):\n focus_false_pred_false +=1\n\n if(np.mean(epoch_loss) <= 0.001):\n break;\n\n if epoch % 5 == 0:\n col1.append(epoch + 1)\n col2.append(argmax_more_than_half)\n col3.append(argmax_less_than_half)\n col4.append(focus_true_pred_true)\n col5.append(focus_false_pred_true)\n col6.append(focus_true_pred_false)\n col7.append(focus_false_pred_false)\n # print(\"=\"*20)\n # print(\"Train FTPT : \", col4)\n # print(\"Train FFPT : \", col5)\n #************************************************************************\n #testing data set \n # focus_net.eval()\n with torch.no_grad():\n focus_true_pred_true =0\n focus_false_pred_true =0\n focus_true_pred_false =0\n focus_false_pred_false =0\n\n argmax_more_than_half = 0\n argmax_less_than_half =0\n for data in test_loader:\n inputs, labels , fore_idx = data\n inputs = inputs.double()\n inputs, labels = inputs.to(\"cuda\"), labels.to(\"cuda\")\n alphas, avg_images = focus_net(inputs)\n outputs = classify(avg_images)\n _, predicted = torch.max(outputs.data, 1)\n\n for j in range (batch):\n focus = torch.argmax(alphas[j])\n\n if(alphas[j][focus] >= 0.5):\n argmax_more_than_half +=1\n else:\n argmax_less_than_half +=1\n\n if(focus == fore_idx[j] and predicted[j] == labels[j]):\n focus_true_pred_true += 1\n\n elif(focus != fore_idx[j] and predicted[j] == labels[j]):\n focus_false_pred_true +=1\n\n elif(focus == fore_idx[j] and predicted[j] != labels[j]):\n focus_true_pred_false +=1\n\n elif(focus != fore_idx[j] and predicted[j] != labels[j]):\n focus_false_pred_false +=1\n \n col8.append(argmax_more_than_half)\n col9.append(argmax_less_than_half)\n col10.append(focus_true_pred_true)\n col11.append(focus_false_pred_true)\n col12.append(focus_true_pred_false)\n col13.append(focus_false_pred_false)\n # print(\"Test FTPT : \", col10)\n # print(\"Test FFPT : \", col11)\n # print(\"=\"*20)\n \nprint('Finished Training')", "[1, 3] loss: 0.693\n[2, 3] loss: 0.692\n[3, 3] loss: 0.689\n[4, 3] loss: 0.684\n[5, 3] loss: 0.676\n[6, 3] loss: 0.654\n[7, 3] loss: 0.632\n[8, 3] loss: 0.627\n[9, 3] loss: 0.603\n[10, 3] loss: 0.586\n[11, 3] loss: 0.573\n[12, 3] loss: 0.555\n[13, 3] loss: 0.543\n[14, 3] loss: 0.530\n[15, 3] loss: 0.514\n[16, 3] loss: 0.500\n[17, 3] loss: 0.486\n[18, 3] loss: 0.480\n[19, 3] loss: 0.462\n[20, 3] loss: 0.460\n[21, 3] loss: 0.455\n[22, 3] loss: 0.433\n[23, 3] loss: 0.433\n[24, 3] loss: 0.427\n[25, 3] loss: 0.421\n[26, 3] loss: 0.404\n[27, 3] loss: 0.396\n[28, 3] loss: 0.393\n[29, 3] loss: 0.388\n[30, 3] loss: 0.374\n[31, 3] loss: 0.373\n[32, 3] loss: 0.370\n[33, 3] loss: 0.357\n[34, 3] loss: 0.354\n[35, 3] loss: 0.349\n[36, 3] loss: 0.343\n[37, 3] loss: 0.338\n[38, 3] loss: 0.336\n[39, 3] loss: 0.333\n[40, 3] loss: 0.329\n[41, 3] loss: 0.321\n[42, 3] loss: 0.323\n[43, 3] loss: 0.310\n[44, 3] loss: 0.312\n[45, 3] loss: 0.306\n[46, 3] loss: 0.301\n[47, 3] loss: 0.295\n[48, 3] loss: 0.297\n[49, 3] loss: 0.294\n[50, 3] loss: 0.293\n[51, 3] loss: 0.290\n[52, 3] loss: 0.283\n[53, 3] loss: 0.280\n[54, 3] loss: 0.278\n[55, 3] loss: 0.271\n[56, 3] loss: 0.273\n[57, 3] loss: 0.268\n[58, 3] loss: 0.271\n[59, 3] loss: 0.264\n[60, 3] loss: 0.268\n[61, 3] loss: 0.258\n[62, 3] loss: 0.264\n[63, 3] loss: 0.255\n[64, 3] loss: 0.255\n[65, 3] loss: 0.252\n[66, 3] loss: 0.249\n[67, 3] loss: 0.244\n[68, 3] loss: 0.253\n[69, 3] loss: 0.255\n[70, 3] loss: 0.247\n[71, 3] loss: 0.244\n[72, 3] loss: 0.239\n[73, 3] loss: 0.243\n[74, 3] loss: 0.238\n[75, 3] loss: 0.235\n[76, 3] loss: 0.234\n[77, 3] loss: 0.229\n[78, 3] loss: 0.236\n[79, 3] loss: 0.233\n[80, 3] loss: 0.229\n[81, 3] loss: 0.227\n[82, 3] loss: 0.227\n[83, 3] loss: 0.222\n[84, 3] loss: 0.226\n[85, 3] loss: 0.225\n[86, 3] loss: 0.220\n[87, 3] loss: 0.220\n[88, 3] loss: 0.224\n[89, 3] loss: 0.218\n[90, 3] loss: 0.217\n[91, 3] loss: 0.223\n[92, 3] loss: 0.213\n[93, 3] loss: 0.212\n[94, 3] loss: 0.207\n[95, 3] loss: 0.213\n[96, 3] loss: 0.212\n[97, 3] loss: 0.204\n[98, 3] loss: 0.206\n[99, 3] loss: 0.211\n[100, 3] loss: 0.214\n[101, 3] loss: 0.201\n[102, 3] loss: 0.204\n[103, 3] loss: 0.211\n[104, 3] loss: 0.205\n[105, 3] loss: 0.202\n[106, 3] loss: 0.200\n[107, 3] loss: 0.203\n[108, 3] loss: 0.199\n[109, 3] loss: 0.204\n[110, 3] loss: 0.200\n[111, 3] loss: 0.197\n[112, 3] loss: 0.196\n[113, 3] loss: 0.191\n[114, 3] loss: 0.194\n[115, 3] loss: 0.198\n[116, 3] loss: 0.198\n[117, 3] loss: 0.188\n[118, 3] loss: 0.188\n[119, 3] loss: 0.189\n[120, 3] loss: 0.184\n[121, 3] loss: 0.184\n[122, 3] loss: 0.186\n[123, 3] loss: 0.187\n[124, 3] loss: 0.188\n[125, 3] loss: 0.182\n[126, 3] loss: 0.184\n[127, 3] loss: 0.186\n[128, 3] loss: 0.187\n[129, 3] loss: 0.185\n[130, 3] loss: 0.181\n[131, 3] loss: 0.179\n[132, 3] loss: 0.182\n[133, 3] loss: 0.186\n[134, 3] loss: 0.181\n[135, 3] loss: 0.180\n[136, 3] loss: 0.183\n[137, 3] loss: 0.181\n[138, 3] loss: 0.179\n[139, 3] loss: 0.175\n[140, 3] loss: 0.175\n[141, 3] loss: 0.174\n[142, 3] loss: 0.173\n[143, 3] loss: 0.168\n[144, 3] loss: 0.175\n[145, 3] loss: 0.168\n[146, 3] loss: 0.173\n[147, 3] loss: 0.174\n[148, 3] loss: 0.177\n[149, 3] loss: 0.170\n[150, 3] loss: 0.169\n[151, 3] loss: 0.169\n[152, 3] loss: 0.169\n[153, 3] loss: 0.167\n[154, 3] loss: 0.171\n[155, 3] loss: 0.173\n[156, 3] loss: 0.164\n[157, 3] loss: 0.170\n[158, 3] loss: 0.169\n[159, 3] loss: 0.165\n[160, 3] loss: 0.170\n[161, 3] loss: 0.158\n[162, 3] loss: 0.169\n[163, 3] loss: 0.174\n[164, 3] loss: 0.166\n[165, 3] loss: 0.163\n[166, 3] loss: 0.163\n[167, 3] loss: 0.164\n[168, 3] loss: 0.167\n[169, 3] loss: 0.160\n[170, 3] loss: 0.164\n[171, 3] loss: 0.163\n[172, 3] loss: 0.167\n[173, 3] loss: 0.159\n[174, 3] loss: 0.162\n[175, 3] loss: 0.153\n[176, 3] loss: 0.157\n[177, 3] loss: 0.162\n[178, 3] loss: 0.161\n[179, 3] loss: 0.162\n[180, 3] loss: 0.160\n[181, 3] loss: 0.161\n[182, 3] loss: 0.161\n[183, 3] loss: 0.161\n[184, 3] loss: 0.159\n[185, 3] loss: 0.167\n[186, 3] loss: 0.153\n[187, 3] loss: 0.159\n[188, 3] loss: 0.155\n[189, 3] loss: 0.158\n[190, 3] loss: 0.152\n[191, 3] loss: 0.161\n[192, 3] loss: 0.152\n[193, 3] loss: 0.163\n[194, 3] loss: 0.152\n[195, 3] loss: 0.158\n[196, 3] loss: 0.156\n[197, 3] loss: 0.165\n[198, 3] loss: 0.152\n[199, 3] loss: 0.153\n[200, 3] loss: 0.152\n[201, 3] loss: 0.151\n[202, 3] loss: 0.149\n[203, 3] loss: 0.152\n[204, 3] loss: 0.159\n[205, 3] loss: 0.144\n[206, 3] loss: 0.148\n[207, 3] loss: 0.149\n[208, 3] loss: 0.146\n[209, 3] loss: 0.161\n[210, 3] loss: 0.149\n[211, 3] loss: 0.151\n[212, 3] loss: 0.154\n[213, 3] loss: 0.146\n[214, 3] loss: 0.142\n[215, 3] loss: 0.149\n[216, 3] loss: 0.146\n[217, 3] loss: 0.137\n[218, 3] loss: 0.147\n[219, 3] loss: 0.144\n[220, 3] loss: 0.152\n[221, 3] loss: 0.154\n[222, 3] loss: 0.155\n[223, 3] loss: 0.151\n[224, 3] loss: 0.141\n[225, 3] loss: 0.139\n[226, 3] loss: 0.146\n[227, 3] loss: 0.150\n[228, 3] loss: 0.143\n[229, 3] loss: 0.145\n[230, 3] loss: 0.149\n[231, 3] loss: 0.141\n[232, 3] loss: 0.139\n[233, 3] loss: 0.142\n[234, 3] loss: 0.137\n[235, 3] loss: 0.138\n[236, 3] loss: 0.140\n[237, 3] loss: 0.137\n[238, 3] loss: 0.147\n[239, 3] loss: 0.142\n[240, 3] loss: 0.148\n[241, 3] loss: 0.137\n[242, 3] loss: 0.136\n[243, 3] loss: 0.136\n[244, 3] loss: 0.141\n[245, 3] loss: 0.142\n[246, 3] loss: 0.139\n[247, 3] loss: 0.145\n[248, 3] loss: 0.139\n[249, 3] loss: 0.134\n[250, 3] loss: 0.133\n[251, 3] loss: 0.144\n[252, 3] loss: 0.143\n[253, 3] loss: 0.137\n[254, 3] loss: 0.140\n[255, 3] loss: 0.142\n[256, 3] loss: 0.148\n[257, 3] loss: 0.137\n[258, 3] loss: 0.137\n[259, 3] loss: 0.149\n[260, 3] loss: 0.141\n[261, 3] loss: 0.135\n[262, 3] loss: 0.133\n[263, 3] loss: 0.138\n[264, 3] loss: 0.135\n[265, 3] loss: 0.139\n[266, 3] loss: 0.132\n[267, 3] loss: 0.134\n[268, 3] loss: 0.131\n[269, 3] loss: 0.137\n[270, 3] loss: 0.133\n[271, 3] loss: 0.133\n[272, 3] loss: 0.132\n[273, 3] loss: 0.135\n[274, 3] loss: 0.129\n[275, 3] loss: 0.134\n[276, 3] loss: 0.134\n[277, 3] loss: 0.137\n[278, 3] loss: 0.136\n[279, 3] loss: 0.129\n[280, 3] loss: 0.132\n[281, 3] loss: 0.132\n[282, 3] loss: 0.126\n[283, 3] loss: 0.131\n[284, 3] loss: 0.119\n[285, 3] loss: 0.134\n[286, 3] loss: 0.133\n[287, 3] loss: 0.134\n[288, 3] loss: 0.134\n[289, 3] loss: 0.126\n[290, 3] loss: 0.130\n[291, 3] loss: 0.134\n[292, 3] loss: 0.131\n[293, 3] loss: 0.130\n[294, 3] loss: 0.132\n[295, 3] loss: 0.128\n[296, 3] loss: 0.134\n[297, 3] loss: 0.126\n[298, 3] loss: 0.122\n[299, 3] loss: 0.133\n[300, 3] loss: 0.124\n[301, 3] loss: 0.125\n[302, 3] loss: 0.124\n[303, 3] loss: 0.129\n[304, 3] loss: 0.129\n[305, 3] loss: 0.126\n[306, 3] loss: 0.126\n[307, 3] loss: 0.126\n[308, 3] loss: 0.131\n[309, 3] loss: 0.128\n[310, 3] loss: 0.125\n[311, 3] loss: 0.121\n[312, 3] loss: 0.126\n[313, 3] loss: 0.124\n[314, 3] loss: 0.127\n[315, 3] loss: 0.127\n[316, 3] loss: 0.125\n[317, 3] loss: 0.127\n[318, 3] loss: 0.132\n[319, 3] loss: 0.128\n[320, 3] loss: 0.135\n[321, 3] loss: 0.126\n[322, 3] loss: 0.132\n[323, 3] loss: 0.126\n[324, 3] loss: 0.129\n[325, 3] loss: 0.128\n[326, 3] loss: 0.126\n[327, 3] loss: 0.126\n[328, 3] loss: 0.124\n[329, 3] loss: 0.127\n[330, 3] loss: 0.125\n[331, 3] loss: 0.122\n[332, 3] loss: 0.120\n[333, 3] loss: 0.126\n[334, 3] loss: 0.126\n[335, 3] loss: 0.117\n[336, 3] loss: 0.123\n[337, 3] loss: 0.126\n[338, 3] loss: 0.131\n[339, 3] loss: 0.120\n[340, 3] loss: 0.126\n[341, 3] loss: 0.119\n[342, 3] loss: 0.118\n[343, 3] loss: 0.129\n[344, 3] loss: 0.115\n[345, 3] loss: 0.126\n[346, 3] loss: 0.128\n[347, 3] loss: 0.129\n[348, 3] loss: 0.129\n[349, 3] loss: 0.123\n[350, 3] loss: 0.119\n[351, 3] loss: 0.118\n[352, 3] loss: 0.124\n[353, 3] loss: 0.128\n[354, 3] loss: 0.128\n[355, 3] loss: 0.121\n[356, 3] loss: 0.121\n[357, 3] loss: 0.128\n[358, 3] loss: 0.126\n[359, 3] loss: 0.127\n[360, 3] loss: 0.121\n[361, 3] loss: 0.125\n[362, 3] loss: 0.124\n[363, 3] loss: 0.121\n[364, 3] loss: 0.124\n[365, 3] loss: 0.119\n[366, 3] loss: 0.125\n[367, 3] loss: 0.133\n[368, 3] loss: 0.132\n[369, 3] loss: 0.133\n[370, 3] loss: 0.124\n[371, 3] loss: 0.120\n[372, 3] loss: 0.130\n[373, 3] loss: 0.127\n[374, 3] loss: 0.121\n[375, 3] loss: 0.120\n[376, 3] loss: 0.124\n[377, 3] loss: 0.123\n[378, 3] loss: 0.122\n[379, 3] loss: 0.119\n[380, 3] loss: 0.113\n[381, 3] loss: 0.116\n[382, 3] loss: 0.116\n[383, 3] loss: 0.116\n[384, 3] loss: 0.121\n[385, 3] loss: 0.120\n[386, 3] loss: 0.113\n[387, 3] loss: 0.113\n[388, 3] loss: 0.111\n[389, 3] loss: 0.124\n[390, 3] loss: 0.120\n[391, 3] loss: 0.117\n[392, 3] loss: 0.117\n[393, 3] loss: 0.115\n[394, 3] loss: 0.125\n[395, 3] loss: 0.124\n[396, 3] loss: 0.123\n[397, 3] loss: 0.127\n[398, 3] loss: 0.120\n[399, 3] loss: 0.118\n[400, 3] loss: 0.114\n[401, 3] loss: 0.115\n[402, 3] loss: 0.123\n[403, 3] loss: 0.116\n[404, 3] loss: 0.120\n[405, 3] loss: 0.120\n[406, 3] loss: 0.112\n[407, 3] loss: 0.115\n[408, 3] loss: 0.121\n[409, 3] loss: 0.111\n[410, 3] loss: 0.114\n[411, 3] loss: 0.113\n[412, 3] loss: 0.117\n[413, 3] loss: 0.118\n[414, 3] loss: 0.115\n[415, 3] loss: 0.117\n[416, 3] loss: 0.112\n[417, 3] loss: 0.111\n[418, 3] loss: 0.113\n[419, 3] loss: 0.108\n[420, 3] loss: 0.119\n[421, 3] loss: 0.109\n[422, 3] loss: 0.115\n[423, 3] loss: 0.119\n[424, 3] loss: 0.112\n[425, 3] loss: 0.113\n[426, 3] loss: 0.112\n[427, 3] loss: 0.114\n[428, 3] loss: 0.112\n[429, 3] loss: 0.112\n[430, 3] loss: 0.113\n[431, 3] loss: 0.109\n[432, 3] loss: 0.115\n[433, 3] loss: 0.128\n[434, 3] loss: 0.130\n[435, 3] loss: 0.119\n[436, 3] loss: 0.116\n[437, 3] loss: 0.105\n[438, 3] loss: 0.114\n[439, 3] loss: 0.120\n[440, 3] loss: 0.117\n[441, 3] loss: 0.130\n[442, 3] loss: 0.113\n[443, 3] loss: 0.106\n[444, 3] loss: 0.105\n[445, 3] loss: 0.113\n[446, 3] loss: 0.108\n[447, 3] loss: 0.115\n[448, 3] loss: 0.113\n[449, 3] loss: 0.109\n[450, 3] loss: 0.110\n[451, 3] loss: 0.113\n[452, 3] loss: 0.109\n[453, 3] loss: 0.111\n[454, 3] loss: 0.113\n[455, 3] loss: 0.116\n[456, 3] loss: 0.109\n[457, 3] loss: 0.109\n[458, 3] loss: 0.116\n[459, 3] loss: 0.117\n[460, 3] loss: 0.116\n[461, 3] loss: 0.110\n[462, 3] loss: 0.120\n[463, 3] loss: 0.105\n[464, 3] loss: 0.106\n[465, 3] loss: 0.103\n[466, 3] loss: 0.112\n[467, 3] loss: 0.109\n[468, 3] loss: 0.114\n[469, 3] loss: 0.109\n[470, 3] loss: 0.108\n[471, 3] loss: 0.124\n[472, 3] loss: 0.105\n[473, 3] loss: 0.111\n[474, 3] loss: 0.113\n[475, 3] loss: 0.109\n[476, 3] loss: 0.109\n[477, 3] loss: 0.110\n[478, 3] loss: 0.114\n[479, 3] loss: 0.104\n[480, 3] loss: 0.106\n[481, 3] loss: 0.112\n[482, 3] loss: 0.103\n[483, 3] loss: 0.104\n[484, 3] loss: 0.102\n[485, 3] loss: 0.103\n[486, 3] loss: 0.107\n[487, 3] loss: 0.101\n[488, 3] loss: 0.113\n[489, 3] loss: 0.103\n[490, 3] loss: 0.107\n[491, 3] loss: 0.107\n[492, 3] loss: 0.113\n[493, 3] loss: 0.109\n[494, 3] loss: 0.104\n[495, 3] loss: 0.110\n[496, 3] loss: 0.106\n[497, 3] loss: 0.102\n[498, 3] loss: 0.107\n[499, 3] loss: 0.106\n[500, 3] loss: 0.111\n[501, 3] loss: 0.105\n[502, 3] loss: 0.108\n[503, 3] loss: 0.102\n[504, 3] loss: 0.110\n[505, 3] loss: 0.114\n[506, 3] loss: 0.113\n[507, 3] loss: 0.110\n[508, 3] loss: 0.110\n[509, 3] loss: 0.097\n[510, 3] loss: 0.101\n[511, 3] loss: 0.100\n[512, 3] loss: 0.106\n[513, 3] loss: 0.106\n[514, 3] loss: 0.109\n[515, 3] loss: 0.103\n[516, 3] loss: 0.106\n[517, 3] loss: 0.106\n[518, 3] loss: 0.099\n[519, 3] loss: 0.103\n[520, 3] loss: 0.107\n[521, 3] loss: 0.108\n[522, 3] loss: 0.106\n[523, 3] loss: 0.106\n[524, 3] loss: 0.103\n[525, 3] loss: 0.106\n[526, 3] loss: 0.103\n[527, 3] loss: 0.107\n[528, 3] loss: 0.105\n[529, 3] loss: 0.099\n[530, 3] loss: 0.102\n[531, 3] loss: 0.106\n[532, 3] loss: 0.105\n[533, 3] loss: 0.108\n[534, 3] loss: 0.102\n[535, 3] loss: 0.119\n[536, 3] loss: 0.114\n[537, 3] loss: 0.109\n[538, 3] loss: 0.113\n[539, 3] loss: 0.109\n[540, 3] loss: 0.106\n[541, 3] loss: 0.112\n[542, 3] loss: 0.104\n[543, 3] loss: 0.103\n[544, 3] loss: 0.103\n[545, 3] loss: 0.104\n[546, 3] loss: 0.102\n[547, 3] loss: 0.106\n[548, 3] loss: 0.101\n[549, 3] loss: 0.106\n[550, 3] loss: 0.102\n[551, 3] loss: 0.111\n[552, 3] loss: 0.107\n[553, 3] loss: 0.103\n[554, 3] loss: 0.098\n[555, 3] loss: 0.098\n[556, 3] loss: 0.111\n[557, 3] loss: 0.110\n[558, 3] loss: 0.122\n[559, 3] loss: 0.115\n[560, 3] loss: 0.110\n[561, 3] loss: 0.100\n[562, 3] loss: 0.100\n[563, 3] loss: 0.099\n[564, 3] loss: 0.101\n[565, 3] loss: 0.099\n[566, 3] loss: 0.105\n[567, 3] loss: 0.100\n[568, 3] loss: 0.101\n[569, 3] loss: 0.099\n[570, 3] loss: 0.100\n[571, 3] loss: 0.099\n[572, 3] loss: 0.098\n[573, 3] loss: 0.098\n[574, 3] loss: 0.106\n[575, 3] loss: 0.104\n[576, 3] loss: 0.104\n[577, 3] loss: 0.107\n[578, 3] loss: 0.102\n[579, 3] loss: 0.100\n[580, 3] loss: 0.112\n[581, 3] loss: 0.105\n[582, 3] loss: 0.107\n[583, 3] loss: 0.114\n[584, 3] loss: 0.100\n[585, 3] loss: 0.100\n[586, 3] loss: 0.105\n[587, 3] loss: 0.108\n[588, 3] loss: 0.103\n[589, 3] loss: 0.106\n[590, 3] loss: 0.120\n[591, 3] loss: 0.101\n[592, 3] loss: 0.113\n[593, 3] loss: 0.099\n[594, 3] loss: 0.101\n[595, 3] loss: 0.101\n[596, 3] loss: 0.108\n[597, 3] loss: 0.099\n[598, 3] loss: 0.096\n[599, 3] loss: 0.101\n[600, 3] loss: 0.104\n[601, 3] loss: 0.100\n[602, 3] loss: 0.103\n[603, 3] loss: 0.099\n[604, 3] loss: 0.096\n[605, 3] loss: 0.100\n[606, 3] loss: 0.097\n[607, 3] loss: 0.096\n[608, 3] loss: 0.101\n[609, 3] loss: 0.099\n[610, 3] loss: 0.103\n[611, 3] loss: 0.099\n[612, 3] loss: 0.110\n[613, 3] loss: 0.104\n[614, 3] loss: 0.095\n[615, 3] loss: 0.101\n[616, 3] loss: 0.103\n[617, 3] loss: 0.095\n[618, 3] loss: 0.098\n[619, 3] loss: 0.101\n[620, 3] loss: 0.101\n[621, 3] loss: 0.098\n[622, 3] loss: 0.097\n[623, 3] loss: 0.089\n[624, 3] loss: 0.095\n[625, 3] loss: 0.104\n[626, 3] loss: 0.093\n[627, 3] loss: 0.100\n[628, 3] loss: 0.099\n[629, 3] loss: 0.105\n[630, 3] loss: 0.100\n[631, 3] loss: 0.102\n[632, 3] loss: 0.094\n[633, 3] loss: 0.107\n[634, 3] loss: 0.100\n[635, 3] loss: 0.098\n[636, 3] loss: 0.111\n[637, 3] loss: 0.106\n[638, 3] loss: 0.117\n[639, 3] loss: 0.098\n[640, 3] loss: 0.107\n[641, 3] loss: 0.119\n[642, 3] loss: 0.100\n[643, 3] loss: 0.107\n[644, 3] loss: 0.097\n[645, 3] loss: 0.100\n[646, 3] loss: 0.096\n[647, 3] loss: 0.101\n[648, 3] loss: 0.109\n[649, 3] loss: 0.098\n[650, 3] loss: 0.104\n[651, 3] loss: 0.100\n[652, 3] loss: 0.105\n[653, 3] loss: 0.106\n[654, 3] loss: 0.096\n[655, 3] loss: 0.114\n[656, 3] loss: 0.103\n[657, 3] loss: 0.113\n[658, 3] loss: 0.108\n[659, 3] loss: 0.108\n[660, 3] loss: 0.098\n[661, 3] loss: 0.101\n[662, 3] loss: 0.099\n[663, 3] loss: 0.101\n[664, 3] loss: 0.102\n[665, 3] loss: 0.098\n[666, 3] loss: 0.108\n[667, 3] loss: 0.112\n[668, 3] loss: 0.129\n[669, 3] loss: 0.100\n[670, 3] loss: 0.101\n[671, 3] loss: 0.091\n[672, 3] loss: 0.099\n[673, 3] loss: 0.096\n[674, 3] loss: 0.091\n[675, 3] loss: 0.096\n[676, 3] loss: 0.093\n[677, 3] loss: 0.098\n[678, 3] loss: 0.094\n[679, 3] loss: 0.097\n[680, 3] loss: 0.096\n[681, 3] loss: 0.094\n[682, 3] loss: 0.099\n[683, 3] loss: 0.090\n[684, 3] loss: 0.096\n[685, 3] loss: 0.098\n[686, 3] loss: 0.099\n[687, 3] loss: 0.102\n[688, 3] loss: 0.108\n[689, 3] loss: 0.109\n[690, 3] loss: 0.096\n[691, 3] loss: 0.122\n[692, 3] loss: 0.111\n[693, 3] loss: 0.108\n[694, 3] loss: 0.108\n[695, 3] loss: 0.095\n[696, 3] loss: 0.090\n[697, 3] loss: 0.094\n[698, 3] loss: 0.089\n[699, 3] loss: 0.093\n[700, 3] loss: 0.097\n[701, 3] loss: 0.100\n[702, 3] loss: 0.093\n[703, 3] loss: 0.104\n[704, 3] loss: 0.096\n[705, 3] loss: 0.100\n[706, 3] loss: 0.103\n[707, 3] loss: 0.097\n[708, 3] loss: 0.102\n[709, 3] loss: 0.099\n[710, 3] loss: 0.099\n[711, 3] loss: 0.096\n[712, 3] loss: 0.097\n[713, 3] loss: 0.096\n[714, 3] loss: 0.090\n[715, 3] loss: 0.098\n[716, 3] loss: 0.104\n[717, 3] loss: 0.102\n[718, 3] loss: 0.106\n[719, 3] loss: 0.106\n[720, 3] loss: 0.115\n[721, 3] loss: 0.092\n[722, 3] loss: 0.098\n[723, 3] loss: 0.095\n[724, 3] loss: 0.104\n[725, 3] loss: 0.102\n[726, 3] loss: 0.099\n[727, 3] loss: 0.095\n[728, 3] loss: 0.101\n[729, 3] loss: 0.090\n[730, 3] loss: 0.091\n[731, 3] loss: 0.092\n[732, 3] loss: 0.088\n[733, 3] loss: 0.093\n[734, 3] loss: 0.093\n[735, 3] loss: 0.093\n[736, 3] loss: 0.089\n[737, 3] loss: 0.093\n[738, 3] loss: 0.097\n[739, 3] loss: 0.093\n[740, 3] loss: 0.089\n[741, 3] loss: 0.097\n[742, 3] loss: 0.091\n[743, 3] loss: 0.085\n[744, 3] loss: 0.090\n[745, 3] loss: 0.092\n[746, 3] loss: 0.093\n[747, 3] loss: 0.089\n[748, 3] loss: 0.096\n[749, 3] loss: 0.090\n[750, 3] loss: 0.088\n[751, 3] loss: 0.091\n[752, 3] loss: 0.092\n[753, 3] loss: 0.093\n[754, 3] loss: 0.095\n[755, 3] loss: 0.096\n[756, 3] loss: 0.091\n[757, 3] loss: 0.093\n[758, 3] loss: 0.098\n[759, 3] loss: 0.094\n[760, 3] loss: 0.093\n[761, 3] loss: 0.087\n[762, 3] loss: 0.088\n[763, 3] loss: 0.096\n[764, 3] loss: 0.096\n[765, 3] loss: 0.106\n[766, 3] loss: 0.109\n[767, 3] loss: 0.108\n[768, 3] loss: 0.099\n[769, 3] loss: 0.092\n[770, 3] loss: 0.094\n[771, 3] loss: 0.092\n[772, 3] loss: 0.093\n[773, 3] loss: 0.092\n[774, 3] loss: 0.095\n[775, 3] loss: 0.096\n[776, 3] loss: 0.091\n[777, 3] loss: 0.087\n[778, 3] loss: 0.093\n[779, 3] loss: 0.091\n[780, 3] loss: 0.090\n[781, 3] loss: 0.093\n[782, 3] loss: 0.094\n[783, 3] loss: 0.089\n[784, 3] loss: 0.091\n[785, 3] loss: 0.093\n[786, 3] loss: 0.090\n[787, 3] loss: 0.090\n[788, 3] loss: 0.085\n[789, 3] loss: 0.093\n[790, 3] loss: 0.093\n[791, 3] loss: 0.087\n[792, 3] loss: 0.097\n[793, 3] loss: 0.098\n[794, 3] loss: 0.102\n[795, 3] loss: 0.089\n[796, 3] loss: 0.089\n[797, 3] loss: 0.091\n[798, 3] loss: 0.097\n[799, 3] loss: 0.099\n[800, 3] loss: 0.093\n[801, 3] loss: 0.091\n[802, 3] loss: 0.102\n[803, 3] loss: 0.096\n[804, 3] loss: 0.096\n[805, 3] loss: 0.101\n[806, 3] loss: 0.090\n[807, 3] loss: 0.093\n[808, 3] loss: 0.090\n[809, 3] loss: 0.092\n[810, 3] loss: 0.088\n[811, 3] loss: 0.090\n[812, 3] loss: 0.087\n[813, 3] loss: 0.087\n[814, 3] loss: 0.091\n[815, 3] loss: 0.092\n[816, 3] loss: 0.087\n[817, 3] loss: 0.089\n[818, 3] loss: 0.088\n[819, 3] loss: 0.091\n[820, 3] loss: 0.092\n[821, 3] loss: 0.091\n[822, 3] loss: 0.089\n[823, 3] loss: 0.090\n[824, 3] loss: 0.089\n[825, 3] loss: 0.091\n[826, 3] loss: 0.087\n[827, 3] loss: 0.093\n[828, 3] loss: 0.088\n[829, 3] loss: 0.090\n[830, 3] loss: 0.084\n[831, 3] loss: 0.092\n[832, 3] loss: 0.086\n[833, 3] loss: 0.089\n[834, 3] loss: 0.094\n[835, 3] loss: 0.091\n[836, 3] loss: 0.096\n[837, 3] loss: 0.093\n[838, 3] loss: 0.088\n[839, 3] loss: 0.076\n[840, 3] loss: 0.089\n[841, 3] loss: 0.090\n[842, 3] loss: 0.090\n[843, 3] loss: 0.084\n[844, 3] loss: 0.092\n[845, 3] loss: 0.089\n[846, 3] loss: 0.090\n[847, 3] loss: 0.082\n[848, 3] loss: 0.093\n[849, 3] loss: 0.092\n[850, 3] loss: 0.094\n[851, 3] loss: 0.099\n[852, 3] loss: 0.107\n[853, 3] loss: 0.099\n[854, 3] loss: 0.084\n[855, 3] loss: 0.088\n[856, 3] loss: 0.095\n[857, 3] loss: 0.085\n[858, 3] loss: 0.089\n[859, 3] loss: 0.087\n[860, 3] loss: 0.088\n[861, 3] loss: 0.089\n[862, 3] loss: 0.096\n[863, 3] loss: 0.094\n[864, 3] loss: 0.090\n[865, 3] loss: 0.092\n[866, 3] loss: 0.101\n[867, 3] loss: 0.088\n[868, 3] loss: 0.090\n[869, 3] loss: 0.091\n[870, 3] loss: 0.088\n[871, 3] loss: 0.085\n[872, 3] loss: 0.086\n[873, 3] loss: 0.088\n[874, 3] loss: 0.088\n[875, 3] loss: 0.087\n[876, 3] loss: 0.084\n[877, 3] loss: 0.091\n[878, 3] loss: 0.090\n[879, 3] loss: 0.090\n[880, 3] loss: 0.086\n[881, 3] loss: 0.084\n[882, 3] loss: 0.087\n[883, 3] loss: 0.094\n[884, 3] loss: 0.094\n[885, 3] loss: 0.099\n[886, 3] loss: 0.095\n[887, 3] loss: 0.099\n[888, 3] loss: 0.094\n[889, 3] loss: 0.088\n[890, 3] loss: 0.097\n[891, 3] loss: 0.092\n[892, 3] loss: 0.090\n[893, 3] loss: 0.086\n[894, 3] loss: 0.086\n[895, 3] loss: 0.083\n[896, 3] loss: 0.086\n[897, 3] loss: 0.080\n[898, 3] loss: 0.086\n[899, 3] loss: 0.088\n[900, 3] loss: 0.089\n[901, 3] loss: 0.095\n[902, 3] loss: 0.094\n[903, 3] loss: 0.096\n[904, 3] loss: 0.097\n[905, 3] loss: 0.097\n[906, 3] loss: 0.085\n[907, 3] loss: 0.089\n[908, 3] loss: 0.093\n[909, 3] loss: 0.098\n[910, 3] loss: 0.085\n[911, 3] loss: 0.085\n[912, 3] loss: 0.094\n[913, 3] loss: 0.088\n[914, 3] loss: 0.095\n[915, 3] loss: 0.089\n[916, 3] loss: 0.085\n[917, 3] loss: 0.091\n[918, 3] loss: 0.098\n[919, 3] loss: 0.090\n[920, 3] loss: 0.088\n[921, 3] loss: 0.086\n[922, 3] loss: 0.088\n[923, 3] loss: 0.088\n[924, 3] loss: 0.087\n[925, 3] loss: 0.088\n[926, 3] loss: 0.087\n[927, 3] loss: 0.092\n[928, 3] loss: 0.085\n[929, 3] loss: 0.090\n[930, 3] loss: 0.086\n[931, 3] loss: 0.093\n[932, 3] loss: 0.091\n[933, 3] loss: 0.082\n[934, 3] loss: 0.093\n[935, 3] loss: 0.086\n[936, 3] loss: 0.089\n[937, 3] loss: 0.081\n[938, 3] loss: 0.087\n[939, 3] loss: 0.082\n[940, 3] loss: 0.084\n[941, 3] loss: 0.084\n[942, 3] loss: 0.093\n[943, 3] loss: 0.094\n[944, 3] loss: 0.087\n[945, 3] loss: 0.085\n[946, 3] loss: 0.085\n[947, 3] loss: 0.090\n[948, 3] loss: 0.101\n[949, 3] loss: 0.104\n[950, 3] loss: 0.093\n[951, 3] loss: 0.089\n[952, 3] loss: 0.090\n[953, 3] loss: 0.091\n[954, 3] loss: 0.096\n[955, 3] loss: 0.088\n[956, 3] loss: 0.100\n[957, 3] loss: 0.093\n[958, 3] loss: 0.098\n[959, 3] loss: 0.091\n[960, 3] loss: 0.101\n[961, 3] loss: 0.101\n[962, 3] loss: 0.087\n[963, 3] loss: 0.083\n[964, 3] loss: 0.081\n[965, 3] loss: 0.093\n[966, 3] loss: 0.095\n[967, 3] loss: 0.086\n[968, 3] loss: 0.090\n[969, 3] loss: 0.081\n[970, 3] loss: 0.088\n[971, 3] loss: 0.095\n[972, 3] loss: 0.086\n[973, 3] loss: 0.087\n[974, 3] loss: 0.078\n[975, 3] loss: 0.082\n[976, 3] loss: 0.088\n[977, 3] loss: 0.086\n[978, 3] loss: 0.081\n[979, 3] loss: 0.085\n[980, 3] loss: 0.102\n[981, 3] loss: 0.104\n[982, 3] loss: 0.106\n[983, 3] loss: 0.105\n[984, 3] loss: 0.110\n[985, 3] loss: 0.093\n[986, 3] loss: 0.098\n[987, 3] loss: 0.093\n[988, 3] loss: 0.091\n[989, 3] loss: 0.089\n[990, 3] loss: 0.078\n[991, 3] loss: 0.082\n[992, 3] loss: 0.083\n[993, 3] loss: 0.090\n[994, 3] loss: 0.085\n[995, 3] loss: 0.083\n[996, 3] loss: 0.082\n[997, 3] loss: 0.092\n[998, 3] loss: 0.088\n[999, 3] loss: 0.083\n[1000, 3] loss: 0.082\nFinished Training\n" ], [ "df_train = pd.DataFrame()\ndf_test = pd.DataFrame()", "_____no_output_____" ], [ "columns = [\"epochs\", \"argmax > 0.5\" ,\"argmax < 0.5\", \"focus_true_pred_true\", \"focus_false_pred_true\", \"focus_true_pred_false\", \"focus_false_pred_false\" ]", "_____no_output_____" ], [ "df_train[columns[0]] = col1\ndf_train[columns[1]] = col2\ndf_train[columns[2]] = col3\ndf_train[columns[3]] = col4\ndf_train[columns[4]] = col5\ndf_train[columns[5]] = col6\ndf_train[columns[6]] = col7\n\ndf_test[columns[0]] = col1\ndf_test[columns[1]] = col8\ndf_test[columns[2]] = col9\ndf_test[columns[3]] = col10\ndf_test[columns[4]] = col11\ndf_test[columns[5]] = col12\ndf_test[columns[6]] = col13", "_____no_output_____" ], [ "df_train", "_____no_output_____" ], [ "# plt.figure(12,12)\nplt.plot(col1,np.array(col2)/10, label='argmax > 0.5')\nplt.plot(col1,np.array(col3)/10, label='argmax < 0.5')\n\nplt.legend(loc='center left', bbox_to_anchor=(1, 0.5))\nplt.xlabel(\"epochs\")\nplt.ylabel(\"training data\")\nplt.title(\"On Training set\")\nplt.show()\n\nplt.plot(col1,np.array(col4)/10, label =\"focus_true_pred_true \")\nplt.plot(col1,np.array(col5)/10, label =\"focus_false_pred_true \")\nplt.plot(col1,np.array(col6)/10, label =\"focus_true_pred_false \")\nplt.plot(col1,np.array(col7)/10, label =\"focus_false_pred_false \")\nplt.title(\"On Training set\")\nplt.legend(loc='center left', bbox_to_anchor=(1, 0.5))\nplt.xlabel(\"epochs\")\nplt.ylabel(\"training data\")\nplt.show()", "_____no_output_____" ], [ "df_test", "_____no_output_____" ], [ "# plt.figure(12,12)\nplt.plot(col1,np.array(col8)/10, label='argmax > 0.5')\nplt.plot(col1,np.array(col9)/10, label='argmax < 0.5')\n\nplt.legend(loc='center left', bbox_to_anchor=(1, 0.5))\nplt.xlabel(\"epochs\")\nplt.ylabel(\"Testing data\")\nplt.title(\"On Testing set\")\nplt.show()\n\nplt.plot(col1,np.array(col10)/10, label =\"focus_true_pred_true \")\nplt.plot(col1,np.array(col11)/10, label =\"focus_false_pred_true \")\nplt.plot(col1,np.array(col12)/10, label =\"focus_true_pred_false \")\nplt.plot(col1,np.array(col13)/10, label =\"focus_false_pred_false \")\nplt.title(\"On Testing set\")\nplt.legend(loc='center left', bbox_to_anchor=(1, 0.5))\nplt.xlabel(\"epochs\")\nplt.ylabel(\"Testing data\")\nplt.show()", "_____no_output_____" ], [ "correct = 0\ntotal = 0\ncount = 0\nflag = 1\nfocus_true_pred_true =0\nfocus_false_pred_true =0\nfocus_true_pred_false =0\nfocus_false_pred_false =0\n\nargmax_more_than_half = 0\nargmax_less_than_half =0\n\nwith torch.no_grad():\n for data in train_loader:\n inputs, labels , fore_idx = data\n inputs = inputs.double()\n inputs, labels , fore_idx = inputs.to(\"cuda\"),labels.to(\"cuda\"), fore_idx.to(\"cuda\")\n alphas, avg_images = focus_net(inputs)\n outputs = classify(avg_images)\n\n _, predicted = torch.max(outputs.data, 1)\n\n for j in range(labels.size(0)):\n focus = torch.argmax(alphas[j])\n if alphas[j][focus] >= 0.5 :\n argmax_more_than_half += 1\n else:\n argmax_less_than_half += 1\n\n if(focus == fore_idx[j] and predicted[j] == labels[j]):\n focus_true_pred_true += 1\n elif(focus != fore_idx[j] and predicted[j] == labels[j]):\n focus_false_pred_true += 1\n elif(focus == fore_idx[j] and predicted[j] != labels[j]):\n focus_true_pred_false += 1\n elif(focus != fore_idx[j] and predicted[j] != labels[j]):\n focus_false_pred_false += 1\n\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n\nprint('Accuracy of the network on the 1000 train images: %d %%' % (\n 100 * correct / total))\nprint(\"total correct\", correct)\nprint(\"total train set images\", total)\n\nprint(\"focus_true_pred_true %d =============> FTPT : %d %%\" % (focus_true_pred_true , (100 * focus_true_pred_true / total) ) )\nprint(\"focus_false_pred_true %d =============> FFPT : %d %%\" % (focus_false_pred_true, (100 * focus_false_pred_true / total) ) )\nprint(\"focus_true_pred_false %d =============> FTPF : %d %%\" %( focus_true_pred_false , ( 100 * focus_true_pred_false / total) ) )\nprint(\"focus_false_pred_false %d =============> FFPF : %d %%\" % (focus_false_pred_false, ( 100 * focus_false_pred_false / total) ) )\n\nprint(\"argmax_more_than_half ==================> \",argmax_more_than_half)\nprint(\"argmax_less_than_half ==================> \",argmax_less_than_half)", "Accuracy of the network on the 1000 train images: 99 %\ntotal correct 997\ntotal train set images 1000\nfocus_true_pred_true 997 =============> FTPT : 99 %\nfocus_false_pred_true 0 =============> FFPT : 0 %\nfocus_true_pred_false 3 =============> FTPF : 0 %\nfocus_false_pred_false 0 =============> FFPF : 0 %\nargmax_more_than_half ==================> 650\nargmax_less_than_half ==================> 350\n" ], [ "correct = 0\ntotal = 0\ncount = 0\nflag = 1\nfocus_true_pred_true =0\nfocus_false_pred_true =0\nfocus_true_pred_false =0\nfocus_false_pred_false =0\n\nargmax_more_than_half = 0\nargmax_less_than_half =0\n\nwith torch.no_grad():\n for data in test_loader:\n inputs, labels , fore_idx = data\n inputs = inputs.double()\n inputs, labels , fore_idx = inputs.to(\"cuda\"),labels.to(\"cuda\"), fore_idx.to(\"cuda\")\n alphas, avg_images = focus_net(inputs)\n outputs = classify(avg_images)\n\n _, predicted = torch.max(outputs.data, 1)\n\n for j in range(labels.size(0)):\n focus = torch.argmax(alphas[j])\n if alphas[j][focus] >= 0.5 :\n argmax_more_than_half += 1\n else:\n argmax_less_than_half += 1\n\n if(focus == fore_idx[j] and predicted[j] == labels[j]):\n focus_true_pred_true += 1\n elif(focus != fore_idx[j] and predicted[j] == labels[j]):\n focus_false_pred_true += 1\n elif(focus == fore_idx[j] and predicted[j] != labels[j]):\n focus_true_pred_false += 1\n elif(focus != fore_idx[j] and predicted[j] != labels[j]):\n focus_false_pred_false += 1\n\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n\nprint('Accuracy of the network on the 1000 test images: %d %%' % (\n 100 * correct / total))\nprint(\"total correct\", correct)\nprint(\"total train set images\", total)\n\nprint(\"focus_true_pred_true %d =============> FTPT : %d %%\" % (focus_true_pred_true , (100 * focus_true_pred_true / total) ) )\nprint(\"focus_false_pred_true %d =============> FFPT : %d %%\" % (focus_false_pred_true, (100 * focus_false_pred_true / total) ) )\nprint(\"focus_true_pred_false %d =============> FTPF : %d %%\" %( focus_true_pred_false , ( 100 * focus_true_pred_false / total) ) )\nprint(\"focus_false_pred_false %d =============> FFPF : %d %%\" % (focus_false_pred_false, ( 100 * focus_false_pred_false / total) ) )\n\nprint(\"argmax_more_than_half ==================> \",argmax_more_than_half)\nprint(\"argmax_less_than_half ==================> \",argmax_less_than_half)", "Accuracy of the network on the 1000 test images: 99 %\ntotal correct 997\ntotal train set images 1000\nfocus_true_pred_true 997 =============> FTPT : 99 %\nfocus_false_pred_true 0 =============> FFPT : 0 %\nfocus_true_pred_false 3 =============> FTPF : 0 %\nfocus_false_pred_false 0 =============> FFPF : 0 %\nargmax_more_than_half ==================> 669\nargmax_less_than_half ==================> 331\n" ], [ "correct = 0\ntotal = 0\n\nwith torch.no_grad():\n for data in train_loader:\n inputs, labels , fore_idx = data\n inputs = inputs.double()\n inputs, labels = inputs.to(\"cuda\"), labels.to(\"cuda\")\n alphas, avg_images = focus_net(inputs)\n outputs = classify(avg_images)\n\n _, predicted = torch.max(outputs.data, 1)\n\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n\nprint('Accuracy of the network on the 1000 train images: %d %%' % ( 100 * correct / total))\nprint(\"total correct\", correct)\nprint(\"total train set images\", total)", "Accuracy of the network on the 1000 train images: 99 %\ntotal correct 997\ntotal train set images 1000\n" ], [ "correct = 0\ntotal = 0\n\nwith torch.no_grad():\n for data in test_loader:\n inputs, labels , fore_idx = data\n inputs = inputs.double()\n inputs, labels = inputs.to(\"cuda\"), labels.to(\"cuda\")\n alphas, avg_images = focus_net(inputs)\n outputs = classify(avg_images)\n\n _, predicted = torch.max(outputs.data, 1)\n\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n\nprint('Accuracy of the network on the 1000 test images: %d %%' % ( 100 * correct / total))\nprint(\"total correct\", correct)\nprint(\"total train set images\", total)", "Accuracy of the network on the 1000 test images: 99 %\ntotal correct 997\ntotal train set images 1000\n" ], [ "focus_net.fc1.weight, focus_net.fc1.bias", "_____no_output_____" ], [ "classify.fc1.weight, classify.fc1.bias", "_____no_output_____" ], [ "", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa33cbc837a8da89703553ef4582259a5431953
21,009
ipynb
Jupyter Notebook
demo.ipynb
graphemecluster/first-order-model
99f18fc5c61209bb2dd451d34cc6f48334c4b6df
[ "MIT" ]
1
2022-02-20T02:32:22.000Z
2022-02-20T02:32:22.000Z
demo.ipynb
graphemecluster/first-order-model
99f18fc5c61209bb2dd451d34cc6f48334c4b6df
[ "MIT" ]
null
null
null
demo.ipynb
graphemecluster/first-order-model
99f18fc5c61209bb2dd451d34cc6f48334c4b6df
[ "MIT" ]
1
2022-02-20T02:31:14.000Z
2022-02-20T02:31:14.000Z
40.247126
239
0.531772
[ [ [ "<a href=\"https://colab.research.google.com/github/AliaksandrSiarohin/first-order-model/blob/master/demo.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "# Demo for paper \"First Order Motion Model for Image Animation\"\nTo try the demo, press the 2 play buttons in order and scroll to the bottom. Note that it may take several minutes to load.", "_____no_output_____" ] ], [ [ "!pip install ffmpy &> /dev/null\n!git init -q .\n!git remote add origin https://github.com/AliaksandrSiarohin/first-order-model\n!git pull -q origin master\n!git clone -q https://github.com/graphemecluster/first-order-model-demo demo", "_____no_output_____" ], [ "import IPython.display\nimport PIL.Image\nimport cv2\nimport imageio\nimport io\nimport ipywidgets\nimport numpy\nimport os.path\nimport requests\nimport skimage.transform\nimport warnings\nfrom base64 import b64encode\nfrom demo import load_checkpoints, make_animation\nfrom ffmpy import FFmpeg\nfrom google.colab import files, output\nfrom IPython.display import HTML, Javascript\nfrom skimage import img_as_ubyte\nwarnings.filterwarnings(\"ignore\")\nos.makedirs(\"user\", exist_ok=True)\n\ndisplay(HTML(\"\"\"\n<style>\n.widget-box > * {\n\tflex-shrink: 0;\n}\n.widget-tab {\n\tmin-width: 0;\n\tflex: 1 1 auto;\n}\n.widget-tab .p-TabBar-tabLabel {\n\tfont-size: 15px;\n}\n.widget-upload {\n\tbackground-color: tan;\n}\n.widget-button {\n\tfont-size: 18px;\n\twidth: 160px;\n\theight: 34px;\n\tline-height: 34px;\n}\n.widget-dropdown {\n\twidth: 250px;\n}\n.widget-checkbox {\n width: 650px;\n}\n.widget-checkbox + .widget-checkbox {\n margin-top: -6px;\n}\n.input-widget .output_html {\n\ttext-align: center;\n\twidth: 266px;\n\theight: 266px;\n\tline-height: 266px;\n\tcolor: lightgray;\n\tfont-size: 72px;\n}\ndiv.stream {\n\tdisplay: none;\n}\n.title {\n\tfont-size: 20px;\n\tfont-weight: bold;\n\tmargin: 12px 0 6px 0;\n}\n.warning {\n\tdisplay: none;\n\tcolor: red;\n\tmargin-left: 10px;\n}\n.warn {\n\tdisplay: initial;\n}\n.resource {\n\tcursor: pointer;\n\tborder: 1px solid gray;\n\tmargin: 5px;\n\twidth: 160px;\n\theight: 160px;\n\tmin-width: 160px;\n\tmin-height: 160px;\n\tmax-width: 160px;\n\tmax-height: 160px;\n\t-webkit-box-sizing: initial;\n\tbox-sizing: initial;\n}\n.resource:hover {\n\tborder: 6px solid crimson;\n\tmargin: 0;\n}\n.selected {\n\tborder: 6px solid seagreen;\n\tmargin: 0;\n}\n.input-widget {\n\twidth: 266px;\n\theight: 266px;\n\tborder: 1px solid gray;\n}\n.input-button {\n\twidth: 268px;\n\tfont-size: 15px;\n\tmargin: 2px 0 0;\n}\n.output-widget {\n\twidth: 256px;\n\theight: 256px;\n\tborder: 1px solid gray;\n}\n.output-button {\n\twidth: 258px;\n\tfont-size: 15px;\n\tmargin: 2px 0 0;\n}\n.uploaded {\n\twidth: 256px;\n\theight: 256px;\n\tborder: 6px solid seagreen;\n\tmargin: 0;\n}\n.label-or {\n\talign-self: center;\n\tfont-size: 20px;\n\tmargin: 16px;\n}\n.loading {\n\talign-items: center;\n\twidth: fit-content;\n}\n.loader {\n\tmargin: 32px 0 16px 0;\n\twidth: 48px;\n\theight: 48px;\n\tmin-width: 48px;\n\tmin-height: 48px;\n\tmax-width: 48px;\n\tmax-height: 48px;\n\tborder: 4px solid whitesmoke;\n\tborder-top-color: gray;\n\tborder-radius: 50%;\n\tanimation: spin 1.8s linear infinite;\n}\n.loading-label {\n\tcolor: gray;\n}\n.comparison-widget {\n\twidth: 256px;\n\theight: 256px;\n\tborder: 1px solid gray;\n\tmargin-left: 2px;\n}\n.comparison-label {\n\tcolor: gray;\n\tfont-size: 14px;\n\ttext-align: center;\n\tposition: relative;\n\tbottom: 3px;\n}\n@keyframes spin {\n\tfrom { transform: rotate(0deg); }\n\tto { transform: rotate(360deg); }\n}\n</style>\n\"\"\"))\n\ndef thumbnail(file):\n\treturn imageio.get_reader(file, mode='I', format='FFMPEG').get_next_data()\n\ndef create_image(i, j):\n\timage_widget = ipywidgets.Image(\n\t\tvalue=open('demo/images/%d%d.png' % (i, j), 'rb').read(),\n\t\tformat='png'\n\t)\n\timage_widget.add_class('resource')\n\timage_widget.add_class('resource-image')\n\timage_widget.add_class('resource-image%d%d' % (i, j))\n\treturn image_widget\n\ndef create_video(i):\n\tvideo_widget = ipywidgets.Image(\n\t\tvalue=cv2.imencode('.png', cv2.cvtColor(thumbnail('demo/videos/%d.mp4' % i), cv2.COLOR_RGB2BGR))[1].tostring(),\n\t\tformat='png'\n\t)\n\tvideo_widget.add_class('resource')\n\tvideo_widget.add_class('resource-video')\n\tvideo_widget.add_class('resource-video%d' % i)\n\treturn video_widget\n\ndef create_title(title):\n\ttitle_widget = ipywidgets.Label(title)\n\ttitle_widget.add_class('title')\n\treturn title_widget\n\ndef download_output(button):\n\tcomplete.layout.display = 'none'\n\tloading.layout.display = ''\n\tfiles.download('output.mp4')\n\tloading.layout.display = 'none'\n\tcomplete.layout.display = ''\n\ndef convert_output(button):\n\tcomplete.layout.display = 'none'\n\tloading.layout.display = ''\n\tFFmpeg(inputs={'output.mp4': None}, outputs={'scaled.mp4': '-vf \"scale=1080x1080:flags=lanczos,pad=1920:1080:420:0\" -y'}).run()\n\tfiles.download('scaled.mp4')\n\tloading.layout.display = 'none'\n\tcomplete.layout.display = ''\n\ndef back_to_main(button):\n\tcomplete.layout.display = 'none'\n\tmain.layout.display = ''\n\nlabel_or = ipywidgets.Label('or')\nlabel_or.add_class('label-or')\n\nimage_titles = ['Peoples', 'Cartoons', 'Dolls', 'Game of Thrones', 'Statues']\nimage_lengths = [8, 4, 8, 9, 4]\n\nimage_tab = ipywidgets.Tab()\nimage_tab.children = [ipywidgets.HBox([create_image(i, j) for j in range(length)]) for i, length in enumerate(image_lengths)]\nfor i, title in enumerate(image_titles):\n\timage_tab.set_title(i, title)\n\ninput_image_widget = ipywidgets.Output()\ninput_image_widget.add_class('input-widget')\nupload_input_image_button = ipywidgets.FileUpload(accept='image/*', button_style='primary')\nupload_input_image_button.add_class('input-button')\nimage_part = ipywidgets.HBox([\n\tipywidgets.VBox([input_image_widget, upload_input_image_button]),\n\tlabel_or,\n\timage_tab\n])\n\nvideo_tab = ipywidgets.Tab()\nvideo_tab.children = [ipywidgets.HBox([create_video(i) for i in range(5)])]\nvideo_tab.set_title(0, 'All Videos')\n\ninput_video_widget = ipywidgets.Output()\ninput_video_widget.add_class('input-widget')\nupload_input_video_button = ipywidgets.FileUpload(accept='video/*', button_style='primary')\nupload_input_video_button.add_class('input-button')\nvideo_part = ipywidgets.HBox([\n\tipywidgets.VBox([input_video_widget, upload_input_video_button]),\n\tlabel_or,\n\tvideo_tab\n])\n\nmodel = ipywidgets.Dropdown(\n\tdescription=\"Model:\",\n\toptions=[\n\t\t'vox',\n\t\t'vox-adv',\n\t\t'taichi',\n\t\t'taichi-adv',\n\t\t'nemo',\n\t\t'mgif',\n\t\t'fashion',\n\t\t'bair'\n\t]\n)\nwarning = ipywidgets.HTML('<b>Warning:</b> Upload your own images and videos (see README)')\nwarning.add_class('warning')\nmodel_part = ipywidgets.HBox([model, warning])\n\nrelative = ipywidgets.Checkbox(description=\"Relative keypoint displacement (Inherit object proporions from the video)\", value=True)\nadapt_movement_scale = ipywidgets.Checkbox(description=\"Adapt movement scale (Don’t touch unless you know want you are doing)\", value=True)\ngenerate_button = ipywidgets.Button(description=\"Generate\", button_style='primary')\nmain = ipywidgets.VBox([\n\tcreate_title('Choose Image'),\n\timage_part,\n\tcreate_title('Choose Video'),\n\tvideo_part,\n\tcreate_title('Settings'),\n\tmodel_part,\n\trelative,\n\tadapt_movement_scale,\n\tgenerate_button\n])\n\nloader = ipywidgets.Label()\nloader.add_class(\"loader\")\nloading_label = ipywidgets.Label(\"This may take several minutes to process…\")\nloading_label.add_class(\"loading-label\")\nloading = ipywidgets.VBox([loader, loading_label])\nloading.add_class('loading')\n\noutput_widget = ipywidgets.Output()\noutput_widget.add_class('output-widget')\ndownload = ipywidgets.Button(description='Download', button_style='primary')\ndownload.add_class('output-button')\ndownload.on_click(download_output)\nconvert = ipywidgets.Button(description='Convert to 1920×1080', button_style='primary')\nconvert.add_class('output-button')\nconvert.on_click(convert_output)\nback = ipywidgets.Button(description='Back', button_style='primary')\nback.add_class('output-button')\nback.on_click(back_to_main)\n\ncomparison_widget = ipywidgets.Output()\ncomparison_widget.add_class('comparison-widget')\ncomparison_label = ipywidgets.Label('Comparison')\ncomparison_label.add_class('comparison-label')\ncomplete = ipywidgets.HBox([\n\tipywidgets.VBox([output_widget, download, convert, back]),\n\tipywidgets.VBox([comparison_widget, comparison_label])\n])\n\ndisplay(ipywidgets.VBox([main, loading, complete]))\ndisplay(Javascript(\"\"\"\nvar images, videos;\nfunction deselectImages() {\n\timages.forEach(function(item) {\n\t\titem.classList.remove(\"selected\");\n\t});\n}\nfunction deselectVideos() {\n\tvideos.forEach(function(item) {\n\t\titem.classList.remove(\"selected\");\n\t});\n}\nfunction invokePython(func) {\n\tgoogle.colab.kernel.invokeFunction(\"notebook.\" + func, [].slice.call(arguments, 1), {});\n}\nsetTimeout(function() {\n\t(images = [].slice.call(document.getElementsByClassName(\"resource-image\"))).forEach(function(item) {\n\t\titem.addEventListener(\"click\", function() {\n\t\t\tdeselectImages();\n\t\t\titem.classList.add(\"selected\");\n\t\t\tinvokePython(\"select_image\", item.className.match(/resource-image(\\d\\d)/)[1]);\n\t\t});\n\t});\n\timages[0].classList.add(\"selected\");\n\t(videos = [].slice.call(document.getElementsByClassName(\"resource-video\"))).forEach(function(item) {\n\t\titem.addEventListener(\"click\", function() {\n\t\t\tdeselectVideos();\n\t\t\titem.classList.add(\"selected\");\n\t\t\tinvokePython(\"select_video\", item.className.match(/resource-video(\\d)/)[1]);\n\t\t});\n\t});\n\tvideos[0].classList.add(\"selected\");\n}, 1000);\n\"\"\"))\n\nselected_image = None\ndef select_image(filename):\n\tglobal selected_image\n\tselected_image = resize(PIL.Image.open('demo/images/%s.png' % filename).convert(\"RGB\"))\n\tinput_image_widget.clear_output(wait=True)\n\twith input_image_widget:\n\t\tdisplay(HTML('Image'))\n\tinput_image_widget.remove_class('uploaded')\noutput.register_callback(\"notebook.select_image\", select_image)\n\nselected_video = None\ndef select_video(filename):\n\tglobal selected_video\n\tselected_video = 'demo/videos/%s.mp4' % filename\n\tinput_video_widget.clear_output(wait=True)\n\twith input_video_widget:\n\t\tdisplay(HTML('Video'))\n\tinput_video_widget.remove_class('uploaded')\noutput.register_callback(\"notebook.select_video\", select_video)\n\ndef resize(image, size=(256, 256)):\n w, h = image.size\n d = min(w, h)\n r = ((w - d) // 2, (h - d) // 2, (w + d) // 2, (h + d) // 2)\n return image.resize(size, resample=PIL.Image.LANCZOS, box=r)\n\ndef upload_image(change):\n\tglobal selected_image\n\tfor name, file_info in upload_input_image_button.value.items():\n\t\tcontent = file_info['content']\n\tif content is not None:\n\t\tselected_image = resize(PIL.Image.open(io.BytesIO(content)).convert(\"RGB\"))\n\t\tinput_image_widget.clear_output(wait=True)\n\t\twith input_image_widget:\n\t\t\tdisplay(selected_image)\n\t\tinput_image_widget.add_class('uploaded')\n\t\tdisplay(Javascript('deselectImages()'))\nupload_input_image_button.observe(upload_image, names='value')\n\ndef upload_video(change):\n\tglobal selected_video\n\tfor name, file_info in upload_input_video_button.value.items():\n\t\tcontent = file_info['content']\n\tif content is not None:\n\t\tselected_video = 'user/' + name\n\t\tpreview = resize(PIL.Image.fromarray(thumbnail(content)).convert(\"RGB\"))\n\t\tinput_video_widget.clear_output(wait=True)\n\t\twith input_video_widget:\n\t\t\tdisplay(preview)\n\t\tinput_video_widget.add_class('uploaded')\n\t\tdisplay(Javascript('deselectVideos()'))\n\t\twith open(selected_video, 'wb') as video:\n\t\t\tvideo.write(content)\nupload_input_video_button.observe(upload_video, names='value')\n\ndef change_model(change):\n\tif model.value.startswith('vox'):\n\t\twarning.remove_class('warn')\n\telse:\n\t\twarning.add_class('warn')\nmodel.observe(change_model, names='value')\n\ndef generate(button):\n\tmain.layout.display = 'none'\n\tloading.layout.display = ''\n\tfilename = model.value + ('' if model.value == 'fashion' else '-cpk') + '.pth.tar'\n\tif not os.path.isfile(filename):\n\t\tdownload = requests.get(requests.get('https://cloud-api.yandex.net/v1/disk/public/resources/download?public_key=https://yadi.sk/d/lEw8uRm140L_eQ&path=/' + filename).json().get('href'))\n\t\twith open(filename, 'wb') as checkpoint:\n\t\t\tcheckpoint.write(download.content)\n\treader = imageio.get_reader(selected_video, mode='I', format='FFMPEG')\n\tfps = reader.get_meta_data()['fps']\n\tdriving_video = []\n\tfor frame in reader:\n\t\tdriving_video.append(frame)\n\tgenerator, kp_detector = load_checkpoints(config_path='config/%s-256.yaml' % model.value, checkpoint_path=filename)\n\tpredictions = make_animation(\n\t\tskimage.transform.resize(numpy.asarray(selected_image), (256, 256)),\n\t\t[skimage.transform.resize(frame, (256, 256)) for frame in driving_video],\n\t\tgenerator,\n\t\tkp_detector,\n\t\trelative=relative.value,\n\t\tadapt_movement_scale=adapt_movement_scale.value\n\t)\n\tif selected_video.startswith('user/') or selected_video == 'demo/videos/0.mp4':\n\t\timageio.mimsave('temp.mp4', [img_as_ubyte(frame) for frame in predictions], fps=fps)\n\t\tFFmpeg(inputs={'temp.mp4': None, selected_video: None}, outputs={'output.mp4': '-c copy -y'}).run()\n\telse:\n\t\timageio.mimsave('output.mp4', [img_as_ubyte(frame) for frame in predictions], fps=fps)\n\tloading.layout.display = 'none'\n\tcomplete.layout.display = ''\n\twith output_widget:\n\t\tdisplay(HTML('<video id=\"left\" controls src=\"data:video/mp4;base64,%s\" />' % b64encode(open('output.mp4', 'rb').read()).decode()))\n\twith comparison_widget:\n\t\tdisplay(HTML('<video id=\"right\" muted src=\"data:video/mp4;base64,%s\" />' % b64encode(open(selected_video, 'rb').read()).decode()))\n\tdisplay(Javascript(\"\"\"\n\t(function(left, right) {\n\t\tleft.addEventListener(\"play\", function() {\n\t\t\tright.play();\n\t\t});\n\t\tleft.addEventListener(\"pause\", function() {\n\t\t\tright.pause();\n\t\t});\n\t\tleft.addEventListener(\"seeking\", function() {\n\t\t\tright.currentTime = left.currentTime;\n\t\t});\n\t})(document.getElementById(\"left\"), document.getElementById(\"right\"));\n\t\"\"\"))\n\t\ngenerate_button.on_click(generate)\n\nloading.layout.display = 'none'\ncomplete.layout.display = 'none'\nselect_image('00')\nselect_video('0')", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code" ] ]
4aa33e4628e96e1080c45a23980de201a7122a88
1,814
ipynb
Jupyter Notebook
week07/Untitled.ipynb
AnaRita93/spiced_projects
64f0caec4008cc9ccb528e71ec16afba78728b8e
[ "MIT" ]
null
null
null
week07/Untitled.ipynb
AnaRita93/spiced_projects
64f0caec4008cc9ccb528e71ec16afba78728b8e
[ "MIT" ]
null
null
null
week07/Untitled.ipynb
AnaRita93/spiced_projects
64f0caec4008cc9ccb528e71ec16afba78728b8e
[ "MIT" ]
null
null
null
29.737705
526
0.54796
[ [ [ "#db.getCollection('farmers').insert([{}])", "_____no_output_____" ], [ "\n\nimport numpy as np\na = np.array([1,2,3,4,5])\n\n", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code" ] ]
4aa35340d5aba27d57a43c1fc1c14ab0d2171553
14,170
ipynb
Jupyter Notebook
notebooks/Lab0py.ipynb
brianreyes3/PyLabs-Analisis-de-Datos
75aa129b2472177cabd814acf81cc03f3459d735
[ "MIT" ]
null
null
null
notebooks/Lab0py.ipynb
brianreyes3/PyLabs-Analisis-de-Datos
75aa129b2472177cabd814acf81cc03f3459d735
[ "MIT" ]
null
null
null
notebooks/Lab0py.ipynb
brianreyes3/PyLabs-Analisis-de-Datos
75aa129b2472177cabd814acf81cc03f3459d735
[ "MIT" ]
null
null
null
21.901082
70
0.421313
[ [ [ "# Primer Notebook\n## ESMA 3016\n## Edgar Acuna\n## Febrero 1, 2018", "_____no_output_____" ] ], [ [ "# Una operacion basica elemental\n(3+5*6)/float(12)", "_____no_output_____" ], [ "# Entrando el dato con el teclado\nage = input(\"How old are you? \")", "How old are you? 55\n" ], [ "print \"Your age is\", age", "Your age is 55\n" ], [ "print \"You have\", 65 - age, \"years until retirement\"", "You have 10 years until retirement\n" ], [ "name = \"Edgar Acuna Fernandez\"\nlength = len(name)", "_____no_output_____" ], [ "#imprimiendo en Mayuscula e imprimiendo la longitud\nbig_name = str.upper(name)\nprint big_name, \"tiene\", length, \"caracteres\"", "EDGAR ACUNA FERNANDEZ tiene 21 caracteres\n" ], [ "names = [\"Ana\", \"Rosa\", \"Julia\"]", "_____no_output_____" ], [ "names[0]", "_____no_output_____" ], [ "names[-2]", "_____no_output_____" ], [ "#uso de la funcion range\nrange(5)", "_____no_output_____" ], [ "range(5,10)", "_____no_output_____" ], [ "range(5,20,3)", "_____no_output_____" ], [ "#uso del condicional If\ngpa = 3.4\nif gpa > 2.0:\n print \"Su solicitud de admision es aceptada.\"", "Su solicitud de admision es aceptada.\n" ], [ "# Uso de if/else\ngpa = 1.4\nif gpa >= 2.5:\n print \"Bienvenido al Colegio de Mayaguez!\"\nelse:\n print \"Su solicitud de admision ha sido denegada.\"", "Su solicitud de admision ha sido denegada.\n" ], [ "#Ejemplo con operadores logicos\nAna=3\nRosa=25\nif (Ana <= 5 and Rosa >= 10):\n print \"Ana and Rosa\"", "Ana and Rosa\n" ], [ "if (Rosa == 500 or Ana != 5):\n print \"Otra vez Aana y Rosa\"", "Otra vez Aana y Rosa\n" ], [ "#Ejemplo de loop\nfor x in range(1, 4):\n print x, \"squared is\", x * x", "1 squared is 1\n2 squared is 4\n3 squared is 9\n" ], [ "# Otro ejemplo de loop\nnames = [\"Ana\", \"Rosa\", \"Julia\"]\nfor name in names:\n print name", "Ana\nRosa\nJulia\n" ], [ "# Ejemplo de break y continue\nfor value in [3, 1, 4, 1, 5, 9, 2]:\n print \"Checking\", value\n if value > 8:\n print \"Exiting for loop\"\n break\n elif value < 3:\n print \"Ignoring\"\n continue\n print \"The square is\", value**2", "Checking 3\nThe square is 9\nChecking 1\nIgnoring\nChecking 4\nThe square is 16\nChecking 1\nIgnoring\nChecking 5\nThe square is 25\nChecking 9\nExiting for loop\n" ], [ "#Ejemplo de while\nnumber = 1\nwhile number < 200:\n print number, \n number = number * 2", "1 2 4 8 16 32 64 128\n" ], [ "#Sunamndo una constante 10 a una lista\nvec1=[3,4,5]\n[x +10 for x in vec1]", "_____no_output_____" ], [ "#summando dos vectores componenete a componente\nvec2=[9,10,11]\nfor a,b in zip(vec1,vec2):\n print a+b", "12\n14\n16\n" ], [ "#usando el modulo matematico math\nimport math\nmath.pi", "_____no_output_____" ], [ "#usando el modulo matematico math con el alias m\nimport math as m\nm.pi", "_____no_output_____" ], [ "#importando solamente la funcion pi del modulo math\nfrom math import pi\npi", "_____no_output_____" ], [ "import pandas as pd\ndf=pd.read_csv(\"http://academic.uprm.edu/eacuna/Animals2.csv\")", "_____no_output_____" ], [ "print df", " Specie body brain\n0 Lesser short-tailed shrew 0.005 0.14\n1 Little brown bat 0.010 0.25\n2 Big brown bat 0.023 0.30\n3 Mouse 0.023 0.40\n4 Musk shrew 0.048 0.33\n5 Star-nosed mole 0.060 1.00\n6 E. American mole 0.075 1.20\n7 Ground squirrel 0.101 4.00\n8 Tree shrew 0.104 2.50\n9 Golden hamster 0.120 1.00\n10 Mole 0.122 3.00\n11 Galago 0.200 5.00\n12 Rat 0.280 1.90\n13 Chinchilla 0.425 6.40\n14 Owl monkey 0.480 15.50\n15 Desert hedgehog 0.550 2.40\n16 Rock hyrax-a 0.750 12.30\n17 European hedgehog 0.785 3.50\n18 Tenrec 0.900 2.60\n19 Artic ground squirrel 0.920 5.70\n20 African giant pouched rat 1.000 6.60\n21 Guinea pig 1.040 5.50\n22 Mountain beaver 1.350 8.10\n23 Slow loris 1.400 12.50\n24 Genet 1.410 17.50\n25 Phalanger 1.620 11.40\n26 N.A. opossum 1.700 6.30\n27 Tree hyrax 2.000 12.30\n28 Rabbit 2.500 12.10\n29 Echidna 3.000 25.00\n.. ... ... ...\n35 Yellow-bellied marmot 4.050 17.00\n36 Verbet 4.190 58.00\n37 Red fox 4.235 50.40\n38 Raccoon 4.288 39.20\n39 Rhesus monkey 6.800 179.00\n40 Potar monkey 10.000 115.00\n41 Baboon 10.550 179.50\n42 Roe deer 14.830 98.20\n43 Goat 27.660 115.00\n44 Kangaroo 35.000 56.00\n45 Grey wolf 36.330 119.50\n46 Chimpanzee 52.160 440.00\n47 Sheep 55.500 175.00\n48 Giant armadillo 60.000 81.00\n49 Human 62.000 1320.00\n50 Grey seal 85.000 325.00\n51 Jaguar 100.000 157.00\n52 Brazilian tapir 160.000 169.00\n53 Donkey 187.100 419.00\n54 Pig 192.000 180.00\n55 Gorilla 207.000 406.00\n56 Okapi 250.000 490.00\n57 Cow 465.000 423.00\n58 Horse 521.000 655.00\n59 Giraffe 529.000 680.00\n60 Asian elephant 2547.000 4603.00\n61 African elephant 6654.000 5712.00\n62 Triceratops 9400.000 70.00\n63 Dipliodocus 11700.000 50.00\n64 Brachiosaurus 87000.000 154.50\n\n[65 rows x 3 columns]\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa35998d11251111d304697a19b52a626c433f8
58,069
ipynb
Jupyter Notebook
src/Hw3_bsm_price_change.ipynb
Jun-629/20MA573
addad663d2dede0422ae690e49b230815aea4c70
[ "MIT" ]
null
null
null
src/Hw3_bsm_price_change.ipynb
Jun-629/20MA573
addad663d2dede0422ae690e49b230815aea4c70
[ "MIT" ]
null
null
null
src/Hw3_bsm_price_change.ipynb
Jun-629/20MA573
addad663d2dede0422ae690e49b230815aea4c70
[ "MIT" ]
1
2020-02-05T21:42:08.000Z
2020-02-05T21:42:08.000Z
166.864943
16,652
0.848112
[ [ [ "<a href=\"https://colab.research.google.com/github/Jun-629/20MA573/blob/master/src/bsm_price_change.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "**Consider** an european option with\n- call type\n- strike = 110\n- maturity = T\nunderlying a Gbm stock with\n- initial: 100\n- interest rate: 4.75%\n- vol ratio: $\\sigma$\n\nWe denote this bsm price by $f(\\sigma, T)$.\n\n- Let $\\sigma = 20\\%$ fixed. plot $T \\mapsto f(0.2, T)$ when $T$ is ranging over $(0.5, 2)$.\n\n- Let $T = 1$ fixed. plot $\\sigma \\mapsto f(\\sigma, 1)$ when $\\sigma$ is ranging over $(.05, 0.5)$\n\n- Describe your observations. Do you think the same behavior is also true for put?\n\n- Could you prove your observations?", "_____no_output_____" ] ], [ [ "import scipy.stats as stats\n\nclass VanillaOption:\n def __init__(\n self,\n otype = 1, # 1: 'call' -1: 'put'\n strike = 110.,\n maturity = 1.,\n market_price = 10.):\n self.otype = otype\n self.strike = strike\n self.maturity = maturity\n self.market_price = market_price\n\nclass Gbm:\n def __init__(self, init_state = 100., drift_ratio = .0475, vol_ratio = .2):\n self.init_state = init_state\n self.drift_ratio = drift_ratio\n self.vol_ratio = vol_ratio\n\ndef bsm_price(self, vanilla_option):\n s0 = self.init_state\n sigma = self.vol_ratio\n r = self.drift_ratio\n \n otype = vanilla_option.otype\n k = vanilla_option.strike\n maturity = vanilla_option.maturity\n \n d1 = (np.log(s0 / k) + (r + 0.5 * sigma ** 2) * maturity) / (sigma * np.sqrt(maturity))\n d2 = d1 - sigma * np.sqrt(maturity)\n \n return (otype * s0 * stats.norm.cdf(otype * d1) - otype * np.exp(-r * maturity) * k * stats.norm.cdf(otype * d2))\n\nGbm.bsm_price = bsm_price", "_____no_output_____" ] ], [ [ "**Soln:**\n- Let $\\sigma = 20\\%$ fixed. plot $T \\mapsto f(0.2, T)$ when $T$ is ranging over $(0.5, 2)$.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport matplotlib.pyplot as plt\n\nT = np.arange(0.5,2.1,0.1)\ngbm1 = Gbm()\noption1 = VanillaOption(otype=1, strike=110, maturity=T)\nBS_price1 = gbm1.bsm_price(option1)\n\nplt.plot(T, BS_price1, label = 'BSM price of f(0.2, T=(0.5,2.0))')\nplt.legend()", "_____no_output_____" ] ], [ [ "- Let $T = 1$ fixed. plot $\\sigma \\mapsto f(\\sigma, 1)$ when $\\sigma$ is ranging over $(.05, 0.5)$", "_____no_output_____" ] ], [ [ "sigma = np.arange(0.05, 0.53, 0.03)\n\ngbm2 = Gbm(100., .0475, sigma)\noption2 = VanillaOption()\nBS_price2 = gbm2.bsm_price(option2)\n\nplt.plot(sigma, BS_price2, label = 'BSM price of f(σ=(0.05,0.5),1)')\nplt.legend()", "_____no_output_____" ] ], [ [ "__Observation:__\n\n**1.** For the fixed volatility $\\sigma$, with the increasing of maturity time, the bsm price of call option increases.\n\n**2.** For the fixed time, with the increasing of volatility $\\sigma$, the bsm price of call option increases.\n\nThe put option shares the same conclusion in **observation 2.** as the call option.", "_____no_output_____" ], [ "__Pf:__\n\nFirstly, we will show the counter example of **observation 1.** for the put option.", "_____no_output_____" ] ], [ [ "T = np.arange(0.2,4.,0.1)\ngbm1 = Gbm()\noption3 = VanillaOption(otype=-1, strike=110, maturity=T)\nBS_price1 = gbm1.bsm_price(option3)\n\nplt.plot(T, BS_price1, label = 'BSM price of f(0.2, T=(0.5,2.0))')\nplt.legend()", "_____no_output_____" ] ], [ [ "The plot shows that when maturity time is big enough, the bsm price of put option will decrease as the maturity time increases.\n\nNow we will prove the **observation 1.** and the same conclusion for the put option.\n\nDue to the Put-Call parity $$C_t - P_t = S(t) - Ke^{-r(T-t)} ,$$\nwhen $C_t$ increases as the volatility increases, $P_t$ has to increase since $S(t) - Ke^{-r(T-t)}$ is a constant, which means that we just need to prove the **observation 1.**.\n\n$$C_t = \\mathbb E [e^{-r(T-t)} (S(T-t) - K)^+] = S_t \\Phi(d_1) - K e^{-r(T-t)} \\Phi(d_2),$$\nwhere $d_i$ are given as\n$$d_1 = \\frac{(r + \\frac 1 2 \\sigma^2) T - \\ln \\frac{K}{S_0}}{\\sigma \\sqrt T},$$\nand\n$$d_2 = \\frac{(r - \\frac 1 2 \\sigma^2) T - \\ln \\frac{K}{S_0}}{\\sigma \\sqrt T} = d_1 - \\sigma \\sqrt T.$$\nWithout loss of generality, assuming that $t = 0$, then we will have\n$$C_0 = \\mathbb E [e^{-rT} (S(T) - K)^+] = S_0 \\Phi(d_1) - K e^{-rT} \\Phi(d_2),$$\nthus\n\\begin{equation}\n\\begin{split}\n\\frac{\\partial C_0}{\\partial \\sigma} &= S_0 \\frac{\\partial \\Phi(d_1)}{\\partial \\sigma} - K e^{-rT} \\frac{\\partial \\Phi(d_2)}{\\partial \\sigma} \\\\\n&= S_0 \\Phi'(d_1) \\frac{\\partial d_1}{\\partial \\sigma} - K e^{-rT} \\Phi'(d_2) (\\frac{\\partial d_1}{\\partial \\sigma} - \\sqrt T) \\\\\n\\frac{\\partial d_1}{\\partial \\sigma} &= \\frac{1}{\\sqrt T}\\frac{\\sigma T \\cdot \\sigma - [(r + \\frac 1 2 \\sigma^2) T - \\ln \\frac{K}{S_0}]}{\\sigma^2} \\\\\n&= \\sqrt T - \\frac{d_1}{\\sigma} \\\\\n\\Phi'(d_i) &= \\frac{1}{\\sqrt {2\\pi}}e^{-\\frac{d_i^2}{2}}, i = 1,2\n\\end{split}\n\\end{equation}\nthen by calculation, we will have \n\\begin{equation}\n\\begin{split}\n\\frac{\\partial C_0}{\\partial \\sigma} &= \\frac{1}{\\sqrt {2\\pi}} [S_0 \\cdot e^{-\\frac{d_1^2}{2}} \\cdot (\\sqrt T - \\frac{d_1}{\\sigma}) + K e^{-rT} \\cdot e^{-\\frac{d_1^2 - 2\\sigma \\sqrt T d_1 + \\sigma^2 T}{2}} \\cdot \\frac{d_1}{\\sigma}] \\\\\n&= \\frac{1}{\\sqrt {2\\pi}} e^{-\\frac{d_1^2}{2}} [S_0 \\cdot (\\sqrt T - \\frac{d_1}{\\sigma}) + K e^{-rT} \\cdot \\frac{d_1}{\\sigma} \\cdot e^{\\sigma \\sqrt T d_1 - \\frac{\\sigma^2 T}{2}}] \\\\\n&= \\frac{1}{\\sqrt {2\\pi}} e^{-\\frac{d_1^2}{2}} [S_0 \\cdot (\\sqrt T - \\frac{d_1}{\\sigma}) + K e^{-rT} \\cdot \\frac{d_1}{\\sigma} \\cdot e^{rT - ln{\\frac{K}{S_0}}}]\\\\\n&= \\frac{1}{\\sqrt {2\\pi}} e^{-\\frac{d_1^2}{2}} [S_0 \\cdot (\\sqrt T - \\frac{d_1}{\\sigma}) + K \\cdot \\frac{d_1}{\\sigma} \\cdot e^{ln{\\frac{S_0}{K}}}] \\\\\n&= \\frac{1}{\\sqrt {2\\pi}} e^{-\\frac{d_1^2}{2}} \\cdot S_0 \\cdot \\sqrt T > 0\n\\end{split}\n\\end{equation}\nSince every term of right hand side of the equation is positive, which means the BSM price increases with the increasing of volatility $\\sigma$.\n\n__Q.E.D.__\n\n\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ] ]
4aa35e724d711886807a0592087ac8f8e6219060
22,484
ipynb
Jupyter Notebook
tutorials/02_uploading_program.ipynb
eggerdj/qiskit-runtime
906473bc773b91fe99432aad4c046d06525f74c4
[ "Apache-2.0" ]
null
null
null
tutorials/02_uploading_program.ipynb
eggerdj/qiskit-runtime
906473bc773b91fe99432aad4c046d06525f74c4
[ "Apache-2.0" ]
null
null
null
tutorials/02_uploading_program.ipynb
eggerdj/qiskit-runtime
906473bc773b91fe99432aad4c046d06525f74c4
[ "Apache-2.0" ]
null
null
null
36.980263
1,222
0.608255
[ [ [ "# Uploading a Qiskit runtime program", "_____no_output_____" ], [ "<div class=\"alert alert-block alert-info\">\n<b>Note:</b> Qiskit Runtime allows authorized users to upload runtime programs. Access to the Qiskit Runtime service may not mean you have access to upload a runtime program.\n</div>", "_____no_output_____" ], [ "Here we provide an overview on how to construct and upload a runtime program. A runtime program is a piece of Python code that lives in the cloud and can be invoked by passing in just its parameters. Runtime programs are private by default, which means only you can see and access your programs. Some authorized users can also mark their programs as public, making them visible and accessible by everyone.", "_____no_output_____" ], [ "## Constructing a runtime program", "_____no_output_____" ], [ "Below is a template of a runtime program. You can find the template file in the \n[`qiskit-ibmq-provider`](https://github.com/Qiskit/qiskit-ibmq-provider/blob/master/qiskit/providers/ibmq/runtime/program/program_template.py) repository.", "_____no_output_____" ] ], [ [ "import sys\nimport json\n\nfrom qiskit.providers.ibmq.runtime import UserMessenger, ProgramBackend\n\n\ndef program(backend: ProgramBackend, user_messenger: UserMessenger, **kwargs):\n \"\"\"Function that does classical-quantum calculation.\"\"\"\n # UserMessenger can be used to publish interim results.\n user_messenger.publish(\"This is an interim result.\")\n return \"final result\"\n\n\ndef main(backend: ProgramBackend, user_messenger: UserMessenger, **kwargs):\n \"\"\"This is the main entry point of a runtime program.\n\n The name of this method must not change. It also must have ``backend``\n and ``user_messenger`` as the first two positional arguments.\n\n Args:\n backend: Backend for the circuits to run on.\n user_messenger: Used to communicate with the program user.\n kwargs: User inputs.\n \"\"\"\n # Massage the input if necessary.\n result = program(backend, user_messenger, **kwargs)\n # Final result can be directly returned\n return result\n", "_____no_output_____" ] ], [ [ "Each runtime program must have a `main()` function, which serves as the entry point to the program. This function must have `backend` and `user_messenger` as the first two positional arguments:\n\n- `backend` is an instance of [`ProgramBackend`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.ProgramBackend.html#qiskit.providers.ibmq.runtime.ProgramBackend) and has a [`run()`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.ProgramBackend.run.html#qiskit.providers.ibmq.runtime.ProgramBackend.run) method that can be used to submit circuits.\n- `user_messenger` is an instance of [`UserMessenger`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.UserMessenger.html#qiskit.providers.ibmq.runtime.UserMessenger) and has a [`publish()`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.UserMessenger.publish.html#qiskit.providers.ibmq.runtime.UserMessenger.publish) method that can be used to send interim and final results to the program user. This method takes a parameter `final` that indicates whether it's a final result. However, it is recommended to return the final result directly from the `main()` function. Currently only final results are stored after a program execution finishes.", "_____no_output_____" ], [ "There are several runtime programs in the `qiskit_runtime` directory in this repository. `qiskit_runtime/sample_program/sample_program.py` is one of them. It is a sample runtime program that submits random circuits for user-specified iterations:", "_____no_output_____" ] ], [ [ "\"\"\"A sample runtime program that submits random circuits for user-specified iterations.\"\"\"\n\nimport random\n\nfrom qiskit import transpile\nfrom qiskit.circuit.random import random_circuit\n\n\ndef prepare_circuits(backend):\n \"\"\"Generate a random circuit.\n\n Args:\n backend: Backend used for transpilation.\n\n Returns:\n Generated circuit.\n \"\"\"\n circuit = random_circuit(num_qubits=5, depth=4, measure=True,\n seed=random.randint(0, 1000))\n return transpile(circuit, backend)\n\n\ndef main(backend, user_messenger, **kwargs):\n \"\"\"Main entry point of the program.\n\n Args:\n backend: Backend to submit the circuits to.\n user_messenger: Used to communicate with the program consumer.\n kwargs: User inputs.\n \"\"\"\n iterations = kwargs.pop('iterations', 5)\n for it in range(iterations):\n qc = prepare_circuits(backend)\n result = backend.run(qc).result()\n user_messenger.publish({\"iteration\": it, \"counts\": result.get_counts()})\n\n return \"All done!\"\n", "_____no_output_____" ] ], [ [ "## Data serialization", "_____no_output_____" ], [ "Runtime programs live in the cloud, and JSON is the standard way of passing data to and from cloud services. Therefore, when a user invokes a runtime program, the input parameters must first be serialized into the JSON format and then deserialized once received by the server. By default, this serialization and deserialization is done automatically using the [`RuntimeEncoder`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.RuntimeEncoder.html#qiskit.providers.ibmq.runtime.RuntimeEncoder) and [`RuntimeDecoder`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.RuntimeDecoder.html#qiskit.providers.ibmq.runtime.RuntimeDecoder) classes.\n", "_____no_output_____" ], [ "### Custom classes", "_____no_output_____" ], [ "`RuntimeEncoder` and `RuntimeDecoder` only support types commonly used in Qiskit, such as complex numbers and numpy arrays. If your program uses custom Python classes for input or output, these two methods only have partial support for that. \n\nYour custom class should have the following methods:\n\n- a `to_json()` method that returns a JSON string representation of the object\n- a `from_json()` class method that accepts a JSON string and returns the corresponding object. \n\nWhen `RuntimeEncoder` serializes a Python object, it checks whether the object has a `to_json()` method. If so, it calls the method to serialize the object. `RuntimeDecoder`, however, does _not_ invoke `from_json()` to convert the data back because it doesn't know how to import your custom class. Therefore the deserialization needs to be done explicitly. ", "_____no_output_____" ], [ "Here is an example of serializing and deserializing a custom class. First we define the class `MyCustomClass`:", "_____no_output_____" ] ], [ [ "import json\n\nclass MyCustomClass:\n \n def __init__(self, foo, bar):\n self._foo = foo\n self._bar = bar\n \n def to_json(self):\n \"\"\"Convert this instance to a JSON string.\"\"\"\n return json.dumps({\"foo\": self._foo, \"bar\": self._bar})\n \n @classmethod\n def from_json(cls, json_str):\n \"\"\"Return a MyCustomClass instance based on the input JSON string.\"\"\"\n return cls(**json.loads(json_str))", "_____no_output_____" ] ], [ [ "Note that it has the `to_json()` method that converts a `MyCustomClass` instance to a JSON string, and a `from_json()` class method that converts a JSON string back to a `MyCustomClass` instance.", "_____no_output_____" ], [ "Here is how one would use `MyCustomClass` as an **input** to your program:", "_____no_output_____" ], [ "```\nprogram_inputs = {\n 'my_obj': MyCustomClass(\"my foo\", \"my bar\")\n}\n\noptions = {\"backend_name\": \"ibmq_qasm_simulator\"}\njob = provider.runtime.run(program_id=\"some-program\",\n options=options,\n inputs=program_inputs\n )\n```", "_____no_output_____" ], [ "Since `MyCustomClass` has a `to_json()` method, the method is automatically called to convert the instance to a JSON string when `provider.runtime.run()` is invoked. \n\nYour program can then use the `from_json()` method to restore the JSON string back to a `MyCustomClass` instance:", "_____no_output_____" ] ], [ [ "def main(backend, user_messenger, **kwargs):\n \"\"\"Main entry point of the program.\"\"\"\n my_obj_str = kwargs.pop('my_obj')\n my_obj = MyCustomClass.from_json(my_obj_str)", "_____no_output_____" ] ], [ [ "Similarly, if you pass a `MyCustomClass` instance as an **output** of your program, it is automatically converted to a JSON string (via the `to_json()` method):", "_____no_output_____" ] ], [ [ "def main(backend, user_messenger, **kwargs):\n \"\"\"Main entry point of the program.\"\"\"\n return MyCustomClass(\"this foo\", \"that bar\")", "_____no_output_____" ] ], [ [ "Now when the user of this program calls `job.result()`, they will receive a JSON string rather than a `MyCustomClass` instance. The user can convert the string back to `MyCustomClass` themselves:", "_____no_output_____" ], [ "```\noutput_str = job.result()\noutput = MyCustomClass.from_json(output_str)\n```", "_____no_output_____" ], [ "Alternatively, you can provide a decoder for the users. Your decoder class should inherit [`ResultDecoder`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.ResultDecoder.html#qiskit.providers.ibmq.runtime.ResultDecoder) and overwrites the `decode()` method:", "_____no_output_____" ] ], [ [ "from qiskit.providers.ibmq.runtime import ResultDecoder\n\nclass MyResultDecoder(ResultDecoder):\n\n @classmethod\n def decode(cls, data):\n data = super().decoded(data) # Perform any preprocessing.\n return MyCustomClass.from_json(data)", "_____no_output_____" ] ], [ [ "Your user can then use this `MyResultDecoder` to decode the result of your program:\n\n```\noutput = job.result(decoder=MyResultDecoder)\n```", "_____no_output_____" ], [ "## Testing your runtime program", "_____no_output_____" ], [ "You can test your runtime program using a local simulator or a real backend before uploading it. Simply import and invoke the `main()` function of your program and pass the following parameters:\n\n- the `backend` instance you want to use\n- a new `UserMessenger` instance.\n- program input parameters that are serialized and then deserialized using the correct encoder and decoder. While this may seem redundant, it is to ensure input parameters can be passed to your program properly once it's uploaded to the cloud.\n", "_____no_output_____" ], [ "The following example tests the `sample-program` program we saw earlier. It uses the `qasm_simulator` from Qiskit Aer as the test backend. It serializes and deserializes input data using `RuntimeEncoder` and `RuntimeDecoder`, which are the default en/decoders used by runtime.", "_____no_output_____" ] ], [ [ "import sys\nsys.path.insert(0, '..') # Add qiskit_runtime directory to the path\n\nfrom qiskit_runtime.sample_program import sample_program\nfrom qiskit import Aer\nfrom qiskit.providers.ibmq.runtime.utils import RuntimeEncoder, RuntimeDecoder\nfrom qiskit.providers.ibmq.runtime import UserMessenger\n\ninputs = {\"iterations\": 3}\n\nbackend = Aer.get_backend('qasm_simulator')\nuser_messenger = UserMessenger()\nserialized_inputs = json.dumps(inputs, cls=RuntimeEncoder)\ndeserialized_inputs = json.loads(serialized_inputs, cls=RuntimeDecoder)\n\nsample_program.main(backend, user_messenger, **deserialized_inputs)", "{\"iteration\": 0, \"counts\": {\"01000\": 4, \"00000\": 12, \"00011\": 872, \"01011\": 136}}\n{\"iteration\": 1, \"counts\": {\"01000\": 6, \"00000\": 19, \"00011\": 871, \"01011\": 128}}\n{\"iteration\": 2, \"counts\": {\"00001\": 1024}}\n" ] ], [ [ "## Defining program metadata", "_____no_output_____" ], [ "Program metadata helps users to understand how to use your program. It includes:\n\n- `name`: Name of the program.\n- `max_execution_time`: Maximum amount of time, in seconds, a program can run before being forcibly terminated.\n- `description`: Describes the program.\n- `spec`: Detailed information about the program, which includes the following attributes:\n - `backend_requirements`: Describes the backend attributes needed to run the program.\n - `parameters`: Describes the program input parameters as a JSON schema\n - `return_values`: Describes the return values as a JSON schema\n - `interim_results`: Describes the interim results as a JSON schema\n\nWhen uploading a program, you must specify at least `name`, `max_execution_time`, and `description`. It is strongly encouraged to also specify `parameters`, `return_values`, and `interim_results` within `spec` if the program has them.", "_____no_output_____" ], [ "Below shows the metadata JSON file of the `sample-program` program as an example:", "_____no_output_____" ] ], [ [ "import os\n\nsample_program_json = os.path.join(os.getcwd(), \"../qiskit_runtime/sample_program/sample_program.json\")\n\nwith open(sample_program_json, 'r') as file:\n data = file.read()\n\nprint(data)", "{\n \"name\": \"sample-program\",\n \"description\": \"A sample runtime program.\",\n \"max_execution_time\": 300,\n \"spec\": {\n \"backend_requirements\": {\n \"min_num_qubits\": 5\n },\n \"parameters\": {\n \"$schema\": \"https://json-schema.org/draft/2019-09/schema\",\n \"properties\": {\n \"iterations\": {\n \"type\": \"integer\",\n \"minimum\": 0,\n \"description\": \"Number of iterations to run. Each iteration generates a runs a random circuit.\"\n }\n },\n \"required\": [\n \"iterations\"\n ]\n },\n \"return_values\": {\n \"$schema\": \"https://json-schema.org/draft/2019-09/schema\",\n \"description\": \"A string that says 'All done!'.\",\n \"type\": \"string\"\n },\n \"interim_results\": {\n \"$schema\": \"https://json-schema.org/draft/2019-09/schema\",\n \"properties\": {\n \"iteration\": {\n \"type\": \"integer\",\n \"description\": \"Iteration number.\"\n },\n \"counts\": {\n \"description\": \"Histogram data of the circuit result.\",\n \"type\": \"object\"\n }\n }\n }\n }\n}\n\n" ] ], [ [ "## Uploading a program", "_____no_output_____" ], [ "You can use the [`IBMRuntimeService.upload_program()`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.IBMRuntimeService.html#qiskit.providers.ibmq.runtime.IBMRuntimeService.upload_program) method to upload your program. In the example below, the program data lives in the file `sample_program.py`, and its metadata, as described above, is in `sample_program.json`. ", "_____no_output_____" ] ], [ [ "import os\nfrom qiskit import IBMQ\n\nIBMQ.load_account()\nprovider = IBMQ.get_provider(project='qiskit-runtime') # Substitute with your provider.\n\nsample_program_data = os.path.join(os.getcwd(), \"../qiskit_runtime/sample_program/sample_program.py\")\nsample_program_json = os.path.join(os.getcwd(), \"../qiskit_runtime/sample_program/sample_program.json\")\n\nprogram_id = provider.runtime.upload_program(\n data=sample_program_data,\n metadata=sample_program_json\n)\nprint(program_id)", "sample-program-nQ9dgRjGEe\n" ] ], [ [ "`upload_program()` returns a program ID, which uniquely identifies the program. It is derived from the program name, usually with a randomly-generated suffix. Program ID is needed for users to invoke the program", "_____no_output_____" ], [ "## Updating a program", "_____no_output_____" ], [ "You can use the [`IBMRuntimeService.update_program()`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.IBMRuntimeService.update_program.html#qiskit.providers.ibmq.runtime.IBMRuntimeService.update_program) method to update the source code and/or metadata of a program:", "_____no_output_____" ] ], [ [ "provider.runtime.update_program(program_id=program_id, description=\"A new description.\")", "_____no_output_____" ] ], [ [ "This method allows you to make changes to your program while retaining the same program ID.", "_____no_output_____" ], [ "## Deleting a program", "_____no_output_____" ], [ "You can use the [`IBMRuntimeService.delete_program()`](https://qiskit.org/documentation/stubs/qiskit.providers.ibmq.runtime.IBMRuntimeService.html#qiskit.providers.ibmq.runtime.IBMRuntimeService.delete_program) method to delete a program. Only the person who uploaded the program can delete it. \n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ] ]
4aa36ed4d532813ea7e244af626798a097143340
45,263
ipynb
Jupyter Notebook
Tutorial#3- Keras and Tensorflow Introduction.ipynb
PEESEgroup/SysEn-5888
39ae66aea5c8686fa1aed14ec51475d58fea6b28
[ "MIT" ]
1
2022-01-13T18:17:35.000Z
2022-01-13T18:17:35.000Z
Tutorial#3- Keras and Tensorflow Introduction.ipynb
PEESEgroup/SysEn5888
39ae66aea5c8686fa1aed14ec51475d58fea6b28
[ "MIT" ]
null
null
null
Tutorial#3- Keras and Tensorflow Introduction.ipynb
PEESEgroup/SysEn5888
39ae66aea5c8686fa1aed14ec51475d58fea6b28
[ "MIT" ]
null
null
null
66.858198
14,016
0.777036
[ [ [ "# Installing Tensorflow\nWe will creat an environment for tensorflow that will activate every time we use th package\n\n\n### NOTE: it will take some time!", "_____no_output_____" ] ], [ [ "%pip install --upgrade pip\n%pip install tensorflow==2.5.0", "Requirement already satisfied: pip in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (21.1.3)\nNote: you may need to restart the kernel to use updated packages.\nCollecting tensorflow==2.5.0\n Using cached tensorflow-2.5.0-cp37-cp37m-win_amd64.whl (422.6 MB)\nCollecting absl-py~=0.10\n Using cached absl_py-0.13.0-py3-none-any.whl (132 kB)\nRequirement already satisfied: typing-extensions~=3.7.4 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorflow==2.5.0) (3.7.4.2)\nCollecting tensorflow-estimator<2.6.0,>=2.5.0rc0\n Using cached tensorflow_estimator-2.5.0-py2.py3-none-any.whl (462 kB)\nRequirement already satisfied: protobuf>=3.9.2 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorflow==2.5.0) (3.12.3)\nCollecting opt-einsum~=3.3.0\n Using cached opt_einsum-3.3.0-py3-none-any.whl (65 kB)\nRequirement already satisfied: termcolor~=1.1.0 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorflow==2.5.0) (1.1.0)\nCollecting grpcio~=1.34.0\n Using cached grpcio-1.34.1-cp37-cp37m-win_amd64.whl (2.9 MB)\nCollecting keras-nightly~=2.5.0.dev\n Using cached keras_nightly-2.5.0.dev2021032900-py2.py3-none-any.whl (1.2 MB)\nCollecting tensorboard~=2.5\n Using cached tensorboard-2.5.0-py3-none-any.whl (6.0 MB)\nCollecting h5py~=3.1.0\n Using cached h5py-3.1.0-cp37-cp37m-win_amd64.whl (2.7 MB)\nRequirement already satisfied: wheel~=0.35 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorflow==2.5.0) (0.36.2)\nCollecting flatbuffers~=1.12.0\n Using cached flatbuffers-1.12-py2.py3-none-any.whl (15 kB)\nCollecting gast==0.4.0\n Using cached gast-0.4.0-py3-none-any.whl (9.8 kB)\nCollecting astunparse~=1.6.3\n Using cached astunparse-1.6.3-py2.py3-none-any.whl (12 kB)\nCollecting wrapt~=1.12.1\n Using cached wrapt-1.12.1-cp37-cp37m-win_amd64.whl\nRequirement already satisfied: six~=1.15.0 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorflow==2.5.0) (1.15.0)\nRequirement already satisfied: google-pasta~=0.2 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorflow==2.5.0) (0.2.0)\nRequirement already satisfied: numpy~=1.19.2 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorflow==2.5.0) (1.19.5)\nCollecting keras-preprocessing~=1.1.2\n Using cached Keras_Preprocessing-1.1.2-py2.py3-none-any.whl (42 kB)\nCollecting cached-property\n Using cached cached_property-1.5.2-py2.py3-none-any.whl (7.6 kB)\nRequirement already satisfied: setuptools in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from protobuf>=3.9.2->tensorflow==2.5.0) (49.2.0.post20200714)\nRequirement already satisfied: requests<3,>=2.21.0 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorboard~=2.5->tensorflow==2.5.0) (2.24.0)\nRequirement already satisfied: markdown>=2.6.8 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorboard~=2.5->tensorflow==2.5.0) (3.1.1)\nRequirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorboard~=2.5->tensorflow==2.5.0) (0.4.1)\nRequirement already satisfied: werkzeug>=0.11.15 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorboard~=2.5->tensorflow==2.5.0) (0.16.1)\nRequirement already satisfied: google-auth<2,>=1.6.3 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorboard~=2.5->tensorflow==2.5.0) (1.17.2)\nRequirement already satisfied: tensorboard-plugin-wit>=1.6.0 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorboard~=2.5->tensorflow==2.5.0) (1.6.0)\nRequirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from tensorboard~=2.5->tensorflow==2.5.0) (0.6.1)\nRequirement already satisfied: cachetools<5.0,>=2.0.0 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from google-auth<2,>=1.6.3->tensorboard~=2.5->tensorflow==2.5.0) (4.2.2)\nRequirement already satisfied: pyasn1-modules>=0.2.1 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from google-auth<2,>=1.6.3->tensorboard~=2.5->tensorflow==2.5.0) (0.2.7)\nRequirement already satisfied: rsa<5,>=3.1.4 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from google-auth<2,>=1.6.3->tensorboard~=2.5->tensorflow==2.5.0) (4.0)\nRequirement already satisfied: requests-oauthlib>=0.7.0 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.5->tensorflow==2.5.0) (1.3.0)\nRequirement already satisfied: pyasn1<0.5.0,>=0.4.6 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.3->tensorboard~=2.5->tensorflow==2.5.0) (0.4.8)\nRequirement already satisfied: chardet<4,>=3.0.2 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from requests<3,>=2.21.0->tensorboard~=2.5->tensorflow==2.5.0) (3.0.4)\nRequirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from requests<3,>=2.21.0->tensorboard~=2.5->tensorflow==2.5.0) (1.25.9)\nRequirement already satisfied: certifi>=2017.4.17 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from requests<3,>=2.21.0->tensorboard~=2.5->tensorflow==2.5.0) (2020.12.5)\nRequirement already satisfied: idna<3,>=2.5 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from requests<3,>=2.21.0->tensorboard~=2.5->tensorflow==2.5.0) (2.10)\nRequirement already satisfied: oauthlib>=3.0.0 in c:\\users\\asa279\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.5->tensorflow==2.5.0) (3.1.0)\nInstalling collected packages: grpcio, cached-property, absl-py, wrapt, tensorflow-estimator, tensorboard, opt-einsum, keras-preprocessing, keras-nightly, h5py, gast, flatbuffers, astunparse, tensorflow\n Attempting uninstall: grpcio\n Found existing installation: grpcio 1.27.2\n Uninstalling grpcio-1.27.2:\n Successfully uninstalled grpcio-1.27.2\n Attempting uninstall: absl-py\n Found existing installation: absl-py 0.9.0\n Uninstalling absl-py-0.9.0:\n Successfully uninstalled absl-py-0.9.0\n Attempting uninstall: wrapt\n Found existing installation: wrapt 1.11.2\n Uninstalling wrapt-1.11.2:\n Successfully uninstalled wrapt-1.11.2\n Attempting uninstall: tensorflow-estimator\n Found existing installation: tensorflow-estimator 2.0.1\n Uninstalling tensorflow-estimator-2.0.1:\n Successfully uninstalled tensorflow-estimator-2.0.1\n Attempting uninstall: tensorboard\n Found existing installation: tensorboard 2.0.2\n Uninstalling tensorboard-2.0.2:\n Successfully uninstalled tensorboard-2.0.2\n Attempting uninstall: opt-einsum\n Found existing installation: opt-einsum 3.1.0\n Uninstalling opt-einsum-3.1.0:\n Successfully uninstalled opt-einsum-3.1.0\n Attempting uninstall: keras-preprocessing\n Found existing installation: Keras-Preprocessing 1.1.0\n Uninstalling Keras-Preprocessing-1.1.0:\n Successfully uninstalled Keras-Preprocessing-1.1.0\n Attempting uninstall: h5py\n Found existing installation: h5py 2.10.0\n Uninstalling h5py-2.10.0:\n Successfully uninstalled h5py-2.10.0\n Attempting uninstall: gast\n Found existing installation: gast 0.2.2\n Uninstalling gast-0.2.2:\n Successfully uninstalled gast-0.2.2\n Attempting uninstall: tensorflow\n Found existing installation: tensorflow 2.0.0\n Uninstalling tensorflow-2.0.0:\n Successfully uninstalled tensorflow-2.0.0\nSuccessfully installed absl-py-0.13.0 astunparse-1.6.3 cached-property-1.5.2 flatbuffers-1.12 gast-0.4.0 grpcio-1.34.1 h5py-3.1.0 keras-nightly-2.5.0.dev2021032900 keras-preprocessing-1.1.2 opt-einsum-3.3.0 tensorboard-2.5.0 tensorflow-2.5.0 tensorflow-estimator-2.5.0 wrapt-1.12.1\nNote: you may need to restart the kernel to use updated packages.\n" ] ], [ [ "#### If you see the message below, restart the kernel please from the panel above (Kernels>restart)! \n\n 'Note: you may need to restart the kernel to use updated packages.'\n\n#### Let's check if you have everything!", "_____no_output_____" ] ], [ [ "import tensorflow as tf\nprint(tf.__version__)", "2.5.0\n" ], [ "reachout='Please repeat the steps above. If it still does not work, reach out to me ([email protected])'\ntry:\n import tensorflow\n print('tensorflow is all good!')\nexcept:\n print(\"An exception occurred in tensorflow installation.\"+reachout)\ntry:\n import keras\n print('keras is all good!')\nexcept:\n print(\"An exception occurred in keras installation.\"+reachout)", "tensorflow is all good!\nAn exception occurred in keras installation.Please repeat the steps above. If it still does not work, reach out to me ([email protected])\n" ] ], [ [ "### Now let's explore tensorflow!\n\nFrom its name tensorflow stores constants as tensor objects! Let's create our first constant!", "_____no_output_____" ] ], [ [ "import tensorflow as tf\nimport tensorflow.compat.v1 as tf\ntf.disable_v2_behavior() \nmyfirstconst = tf.constant('Hello World')\nmyfirstconst", "WARNING:tensorflow:From C:\\Users\\asa279\\AppData\\Local\\Continuum\\anaconda3\\lib\\site-packages\\tensorflow\\python\\compat\\v2_compat.py:96: disable_resource_variables (from tensorflow.python.ops.variable_scope) is deprecated and will be removed in a future version.\nInstructions for updating:\nnon-resource variables are not supported in the long term\n" ], [ "x = tf.constant(130.272)\nx", "_____no_output_____" ] ], [ [ "### TF Sessions\n\nLet's create a TensorFlow Session. It can be thought of as a class for running TensorFlow operations. The session encapsulates the environment in which operations take place.\nLet's do a quick example:", "_____no_output_____" ] ], [ [ "a = tf.constant(1)\nb = tf.constant(5)\nwith tf.Session() as Session:\n print('TF simple Operations')\n print('Multiply',Session.run(a*b))\n print('Divide',Session.run(a/b))\n print('Add',Session.run(a+b))\n print('Subtract',Session.run(b-a))", "TF simple Operations\nMultiply 5\nDivide 0.2\nAdd 6\nSubtract 4\n" ] ], [ [ "#### Now let's multiply a matrix ", "_____no_output_____" ] ], [ [ "import numpy as np\nm = np.array([[1.0,2.0]])\nn = np.array([[3.0],[4.0]])\nmulti = tf.matmul(m,n)\nmulti", "_____no_output_____" ], [ "with tf.Session() as Session:\n res = Session.run(multi)\n print(res)", "[[11.]]\n" ] ], [ [ "### TF Variables\n\nSometimes you want to define a variable rsulting from operations. **tf.variable is ideal for this case!**\n\n\nLet's see how to use it!", "_____no_output_____" ] ], [ [ "#We have to start a session!\nsess = tf.InteractiveSession()\n\natensor = tf.random_uniform((2,2),0,1)\natensor", "_____no_output_____" ], [ "var = tf.Variable(initial_value=atensor)\nvar", "_____no_output_____" ], [ "try:\n with tf.Session() as Session:\n res = Session.run(var)\n print(res)\nexcept:\n print(\"error!\")", "error!\n" ], [ "initialize = tf.global_variables_initializer()\ninitialize.run()\nvar.eval()", "_____no_output_____" ], [ "sess.run(var)", "_____no_output_____" ] ], [ [ "## Now let's custom build our first neural networks!", "_____no_output_____" ] ], [ [ "xd = np.linspace(0,10,100) + np.random.uniform(-3,.5,100)\nyd = np.linspace(0,10,100) + np.random.uniform(-.5,2,100)", "_____no_output_____" ], [ " import matplotlib.pyplot as plt\nplt.plot(xd,yd,'o')", "_____no_output_____" ] ], [ [ "### Let's define our variables here\n\n$y=m*x+b$", "_____no_output_____" ] ], [ [ "#Let's intialize with a guess\nm = tf.Variable(1.0)\nb = tf.Variable(0.1)", "_____no_output_____" ], [ "#Let's build or objective function!\n#initialize error\ne=0\nfor x,y in zip(xd,yd):\n #our model\n y_pred = m*x + b\n # our error\n e += (y-y_pred)**2\n\n## tensorflow optimizer \noptimizer = tf.train.GradientDescentOptimizer(learning_rate=0.0001)\n\n## we want to minimize error\ntraining = optimizer.minimize(e)\n\n## initilize our variables with tensorflow\ninitalize = tf.global_variables_initializer()\n\n\n#start the session for 1000 epochs!\nwith tf.Session() as sess:\n \n sess.run(initalize)\n epochs = 100\n \n for i in range(epochs):\n \n sess.run(training)\n \n\n # Get results\n \n mf, bf = sess.run([m,b])\n", "_____no_output_____" ], [ "print(\"The slope is {} and the intercept is {}\".format(mf, bf))", "The slope is 1.0789515972137451 and the intercept is 1.1301825046539307\n" ], [ "#Let's evalute our results\nx_v = np.linspace(-3,11,300)\ny_v = mf*x_v + bf\nplt.plot(x_v,y_v,'r')\nplt.plot(xd,yd,'o')", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
4aa3a310208a3ae39a3b660508b27ee92c17cce4
35,640
ipynb
Jupyter Notebook
deep_learning_v2_pytorch/intro-to-pytorch/Part 4 - Fashion-MNIST (Solution).ipynb
TeoZosa/deep-learning-v2-pytorch
8e73c26f2ebf49769b798e9ff26bd90d7de69f7d
[ "Apache-2.0" ]
null
null
null
deep_learning_v2_pytorch/intro-to-pytorch/Part 4 - Fashion-MNIST (Solution).ipynb
TeoZosa/deep-learning-v2-pytorch
8e73c26f2ebf49769b798e9ff26bd90d7de69f7d
[ "Apache-2.0" ]
159
2021-05-07T21:34:19.000Z
2022-03-28T13:33:29.000Z
deep_learning_v2_pytorch/intro-to-pytorch/Part 4 - Fashion-MNIST (Solution).ipynb
TeoZosa/deep-learning-v2-pytorch
8e73c26f2ebf49769b798e9ff26bd90d7de69f7d
[ "Apache-2.0" ]
null
null
null
143.709677
24,056
0.883586
[ [ [ "# Classifying Fashion-MNIST\n\nNow it's your turn to build and train a neural network. You'll be using the [Fashion-MNIST dataset](https://github.com/zalandoresearch/fashion-mnist), a drop-in replacement for the MNIST dataset. MNIST is actually quite trivial with neural networks where you can easily achieve better than 97% accuracy. Fashion-MNIST is a set of 28x28 greyscale images of clothes. It's more complex than MNIST, so it's a better representation of the actual performance of your network, and a better representation of datasets you'll use in the real world.\n\n<img src='assets/fashion-mnist-sprite.png' width=500px>\n\nIn this notebook, you'll build your own neural network. For the most part, you could just copy and paste the code from Part 3, but you wouldn't be learning. It's important for you to write the code yourself and get it to work. Feel free to consult the previous notebooks though as you work through this.\n\nFirst off, let's load the dataset through torchvision.", "_____no_output_____" ] ], [ [ "import torch\nfrom torchvision import datasets, transforms\nimport helper\n\n# Define a transform to normalize the data\ntransform = transforms.Compose(\n [transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))]\n)\n# Download and load the training data\ntrainset = datasets.FashionMNIST(\n \"~/.pytorch/F_MNIST_data/\", download=True, train=True, transform=transform\n)\ntrainloader = torch.utils.data.DataLoader(trainset, batch_size=64, shuffle=True)\n\n# Download and load the test data\ntestset = datasets.FashionMNIST(\n \"~/.pytorch/F_MNIST_data/\", download=True, train=False, transform=transform\n)\ntestloader = torch.utils.data.DataLoader(testset, batch_size=64, shuffle=True)", "_____no_output_____" ] ], [ [ "Here we can see one of the images.", "_____no_output_____" ] ], [ [ "image, label = next(iter(trainloader))\nhelper.imshow(image[0, :]);", "_____no_output_____" ] ], [ [ "## Building the network\n\nHere you should define your network. As with MNIST, each image is 28x28 which is a total of 784 pixels, and there are 10 classes. You should include at least one hidden layer. We suggest you use ReLU activations for the layers and to return the logits or log-softmax from the forward pass. It's up to you how many layers you add and the size of those layers.", "_____no_output_____" ] ], [ [ "from torch import nn, optim\nimport torch.nn.functional as F", "_____no_output_____" ], [ "# TODO: Define your network architecture here\nclass Classifier(nn.Module):\n def __init__(self):\n super().__init__()\n self.fc1 = nn.Linear(784, 256)\n self.fc2 = nn.Linear(256, 128)\n self.fc3 = nn.Linear(128, 64)\n self.fc4 = nn.Linear(64, 10)\n\n def forward(self, x):\n # make sure input tensor is flattened\n x = x.view(x.shape[0], -1)\n\n x = F.relu(self.fc1(x))\n x = F.relu(self.fc2(x))\n x = F.relu(self.fc3(x))\n x = F.log_softmax(self.fc4(x), dim=1)\n\n return x", "_____no_output_____" ] ], [ [ "# Train the network\n\nNow you should create your network and train it. First you'll want to define [the criterion](http://pytorch.org/docs/master/nn.html#loss-functions) (something like `nn.CrossEntropyLoss` or `nn.NLLLoss`) and [the optimizer](http://pytorch.org/docs/master/optim.html) (typically `optim.SGD` or `optim.Adam`).\n\nThen write the training code. Remember the training pass is a fairly straightforward process:\n\n* Make a forward pass through the network to get the logits \n* Use the logits to calculate the loss\n* Perform a backward pass through the network with `loss.backward()` to calculate the gradients\n* Take a step with the optimizer to update the weights\n\nBy adjusting the hyperparameters (hidden units, learning rate, etc), you should be able to get the training loss below 0.4.", "_____no_output_____" ] ], [ [ "# TODO: Create the network, define the criterion and optimizer\nmodel = Classifier()\ncriterion = nn.NLLLoss()\noptimizer = optim.Adam(model.parameters(), lr=0.003)", "_____no_output_____" ], [ "# TODO: Train the network here\nepochs = 5\n\nfor e in range(epochs):\n running_loss = 0\n for images, labels in trainloader:\n log_ps = model(images)\n loss = criterion(log_ps, labels)\n\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n\n running_loss += loss.item()\n else:\n print(f\"Training loss: {running_loss/len(trainloader)}\")", "Training loss: 283.4510831311345\nTraining loss: 274.7842669263482\nTraining loss: 267.907463490963\nTraining loss: 258.2156918346882\nTraining loss: 251.79347000271082\n" ], [ "%matplotlib inline\n%config InlineBackend.figure_format = 'retina'\n\nimport helper\n\n# Test out your network!\n\ndataiter = iter(testloader)\nimages, labels = dataiter.next()\nimg = images[1]\n\n# TODO: Calculate the class probabilities (softmax) for img\nps = torch.exp(model(img))\n\n# Plot the image and probabilities\nhelper.view_classify(img, ps, version=\"Fashion\")", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
4aa3bdc0c5df7f1bea3abf79542fa142a111e846
12,121
ipynb
Jupyter Notebook
2018-03-14_UCSF_workshop/notebooks/2018-03-14_09_CCMI_UCSF_Classification_and_Prediction - RNAseq.ipynb
genepattern/tutorial-materials
2ab7978fd46343274de999d96e6a65e568dd129f
[ "BSD-3-Clause" ]
null
null
null
2018-03-14_UCSF_workshop/notebooks/2018-03-14_09_CCMI_UCSF_Classification_and_Prediction - RNAseq.ipynb
genepattern/tutorial-materials
2ab7978fd46343274de999d96e6a65e568dd129f
[ "BSD-3-Clause" ]
1
2019-05-03T18:53:07.000Z
2019-05-03T18:54:59.000Z
2018-03-14_UCSF_workshop/notebooks/2018-03-14_09_CCMI_UCSF_Classification_and_Prediction - RNAseq.ipynb
genepattern/tutorial-materials
2ab7978fd46343274de999d96e6a65e568dd129f
[ "BSD-3-Clause" ]
null
null
null
39.226537
356
0.624123
[ [ [ "# Classification and Prediction in GenePattern Notebook\n\nThis notebook will show you how to use k-Nearest Neighbors (kNN) to build a predictor, use it to classify leukemia subtypes, and assess its accuracy in cross-validation.", "_____no_output_____" ], [ "### K-nearest-neighbors (KNN)\nKNN classifies an unknown sample by assigning it the phenotype label most frequently represented among the k nearest known samples. \n\nAdditionally, you can select a weighting factor for the 'votes' of the nearest neighbors. For example, one might weight the votes by the reciprocal of the distance between neighbors to give closer neighors a greater vote.", "_____no_output_____" ], [ "<h2>1. Log in to GenePattern</h2>\n\n<ul>\n\t<li>Select Broad Institute as the server</li>\n\t<li>Enter your username and password.</li>\n\t<li>Click <em>Login to GenePattern</em>.</li>\n\t<li>When you are logged in, you can click the - button in the upper right hand corner to collapse the cell.</li>\n\t<li>Alternatively, if you are prompted to Login as your username, just click that button and give it a couple seconds to authenticate.</li>\n</ul>\n", "_____no_output_____" ] ], [ [ "# Requires GenePattern Notebook: pip install genepattern-notebook\nimport gp\nimport genepattern\n\n# Username and password removed for security reasons.\ngenepattern.GPAuthWidget(genepattern.register_session(\"https://gp-beta-ami.genepattern.org/gp\", \"\", \"\"))", "_____no_output_____" ] ], [ [ "## 2. Run k-Nearest Neighbors Cross Validation", "_____no_output_____" ], [ "<div class=\"alert alert-info\">\n- Drag [BRCA_HUGO_symbols.preprocessed.gct](https://datasets.genepattern.org/data/ccmi_tutorial/2017-12-15/BRCA_HUGO_symbols.preprocessed.gct) to the **data filename** field below.\n- Drag [BRCA_HUGO_symbols.preprocessed.cls](https://datasets.genepattern.org/data/ccmi_tutorial/2017-12-15/BRCA_HUGO_symbols.preprocessed.cls) to the **class filename** field.\n- Click **Run**.", "_____no_output_____" ] ], [ [ "knnxvalidation_task = gp.GPTask(genepattern.get_session(0), 'urn:lsid:broad.mit.edu:cancer.software.genepattern.module.analysis:00013')\nknnxvalidation_job_spec = knnxvalidation_task.make_job_spec()\nknnxvalidation_job_spec.set_parameter(\"data.filename\", \"\")\nknnxvalidation_job_spec.set_parameter(\"class.filename\", \"\")\nknnxvalidation_job_spec.set_parameter(\"num.features\", \"10\")\nknnxvalidation_job_spec.set_parameter(\"feature.selection.statistic\", \"0\")\nknnxvalidation_job_spec.set_parameter(\"min.std\", \"\")\nknnxvalidation_job_spec.set_parameter(\"num.neighbors\", \"3\")\nknnxvalidation_job_spec.set_parameter(\"weighting.type\", \"1\")\nknnxvalidation_job_spec.set_parameter(\"distance.measure\", \"1\")\nknnxvalidation_job_spec.set_parameter(\"pred.results.file\", \"<data.filename_basename>.pred.odf\")\nknnxvalidation_job_spec.set_parameter(\"feature.summary.file\", \"<data.filename_basename>.feat.odf\")\ngenepattern.GPTaskWidget(knnxvalidation_task)", "_____no_output_____" ] ], [ [ "## 3. View a list of features used in the prediction model", "_____no_output_____" ], [ "<div class=\"alert alert-info\">\n- Select the XXXXXX.KNNXvalidation job result cell by clicking anywhere in it.\n- Click on the i icon next to the `<filename>.**feat**.odf` file\n- Select \"Send to DataFrame\"\n- You will see a new cell created below the job result cell.\n- Execute this cell.\n- You will see a table of features, descriptions, and the number of times each feature was included in a model in a cross-validation loop.", "_____no_output_____" ], [ "## 4. View prediction results", "_____no_output_____" ], [ "<div class=\"alert alert-info\">\n- For the **prediction results file** parameter below, click the down arrow in the file input box.\n- Select the `BRCA_HUGO_symbols.preprocessed.pred.odf` file.\n- Click **Run**.\n- You will see the prediction results in an interactive viewer.", "_____no_output_____" ] ], [ [ "predictionresultsviewer_task = gp.GPTask(genepattern.get_session(0), 'urn:lsid:broad.mit.edu:cancer.software.genepattern.module.visualizer:00019')\npredictionresultsviewer_job_spec = predictionresultsviewer_task.make_job_spec()\npredictionresultsviewer_job_spec.set_parameter(\"prediction.results.file\", \"\")\ngenepattern.GPTaskWidget(predictionresultsviewer_task)", "_____no_output_____" ] ], [ [ "## References\n\nBreiman, L., Friedman, J. H., Olshen, R. A., & Stone, C. J. 1984. [Classification and regression trees](https://www.amazon.com/Classification-Regression-Wadsworth-Statistics-Probability/dp/0412048418?ie=UTF8&*Version*=1&*entries*=0). Wadsworth & Brooks/Cole Advanced Books & Software, Monterey, CA.\n\nGolub, T.R., Slonim, D.K., Tamayo, P., Huard, C., Gaasenbeek, M., Mesirov, J.P., Coller, H., Loh, M., Downing, J.R., Caligiuri, M.A., Bloomfield, C.D., and Lander, E.S. 1999. Molecular Classification of Cancer: Class Discovery and Class Prediction by Gene Expression. [Science 286:531-537](http://science.sciencemag.org/content/286/5439/531.long).\n\nLu, J., Getz, G., Miska, E.A., Alvarez-Saavedra, E., Lamb, J., Peck, D., Sweet-Cordero, A., Ebert, B.L., Mak, R.H., Ferrando, A.A, Downing, J.R., Jacks, T., Horvitz, H.R., Golub, T.R. 2005. MicroRNA expression profiles classify human cancers. [Nature 435:834-838](http://www.nature.com/nature/journal/v435/n7043/full/nature03702.html).\n\nRifkin, R., Mukherjee, S., Tamayo, P., Ramaswamy, S., Yeang, C-H, Angelo, M., Reich, M., Poggio, T., Lander, E.S., Golub, T.R., Mesirov, J.P. 2003. An Analytical Method for Multiclass Molecular Cancer Classification. [SIAM Review 45(4):706-723](http://epubs.siam.org/doi/abs/10.1137/S0036144502411986).\n\nSlonim, D.K., Tamayo, P., Mesirov, J.P., Golub, T.R., Lander, E.S. 2000. Class prediction and discovery using gene expression data. In [Proceedings of the Fourth Annual International Conference on Computational Molecular Biology (RECOMB)](http://dl.acm.org/citation.cfm?id=332564). ACM Press, New York. pp. 263-272.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ] ]
4aa3c262b74cc81eff01e4c502ff8257c6da92f8
3,795
ipynb
Jupyter Notebook
data/sunda_kuno/.ipynb_checkpoints/Untitled-checkpoint.ipynb
Satriosadrakha/CapsNet-Tensorflow-Sunda-Kuno
87673b2c6a146672fcbbe2e5c0382c2b8d4acaaf
[ "Apache-2.0" ]
null
null
null
data/sunda_kuno/.ipynb_checkpoints/Untitled-checkpoint.ipynb
Satriosadrakha/CapsNet-Tensorflow-Sunda-Kuno
87673b2c6a146672fcbbe2e5c0382c2b8d4acaaf
[ "Apache-2.0" ]
null
null
null
data/sunda_kuno/.ipynb_checkpoints/Untitled-checkpoint.ipynb
Satriosadrakha/CapsNet-Tensorflow-Sunda-Kuno
87673b2c6a146672fcbbe2e5c0382c2b8d4acaaf
[ "Apache-2.0" ]
null
null
null
34.189189
252
0.517523
[ [ [ "import os\nimport cv2 as cv\nimport numpy as np\nfrom PIL import Image, ImageFilter, ImageEnhance\nimport PIL.ImageOps\nimport os\nimport glob\nfrom sklearn.model_selection import train_test_split\n\ndef get_sunda_ran(aksara,subtract=0):\n script_dir = os.path.abspath('')\n i=[\"A\",\"BA\",\"CA\",\"DA\",\"GA\",\"HA\",\"I\",\"JA\",\"KA\",\"LA\",\"MA\",\"NA\",\"NGA\",\"NYA\",\"PA\",\"PANELENG\",\"PANEULEUNG\",\"PANGHULU\",\"PANGLAYAR\",\"PANOLONG\",\"PANYUKU\",\"PATEN\",\"RA\",\"SA\",\"TA\",\"U\",\"WA\",\"YA\"]\n j=[30,47,19,67,37,27,16,21,60,60,56,120,25,14,61,42,63,60,23,35,36,84,56,90,78,18,24,22]\n k=[12,30,7,45,16,11,7,9,40,25,24,80,10,6,39,28,42,40,10,24,24,36,36,60,52,7,10,9]\n ganed = [0,2,4,5,6,7,12,13,18,19,20,25,26,27]\n\n final_np = np.array([])\n final_label = np.array([])\n\n # train_image to ready_to_train\n#for x in range(0, len(i), 1):\n sumImg = 0\n directory_path = os.path.join(script_dir,\"train-test_image\")\n count_image = len(glob.glob1(directory_path,\"%s_*.png\" % (i[aksara])))\n\n for y in range(1, count_image+1):\n abs_file_path = os.path.join(directory_path, \"%s_%s.png\" % (i[aksara],str(y)))\n img = Image.open(abs_file_path)\n img = np.array(img)\n img = img[:, :]\n final_np = np.append(final_np,img)\n final_label = np.append(final_label,aksara)\n sumImg = sumImg + count_image\n final_np = final_np.reshape((sumImg, 28, 28, 1)).astype(np.float32)\n final_label = final_label.reshape((sumImg)).astype(np.int32)\n print(final_np.shape)\n idx = np.random.choice(sumImg,size=sumImg-subtract,replace=False)\n print(idx)\n print(final_np[idx,:].shape)\n \n data_train, data_test, labels_train, labels_test = train_test_split(final_np[idx,:], final_label[idx,:], test_size=0.30, random_state=42)\n# return(data_train,sumImg)\n# print(final_label)\n# print(data_train.shape)\n# print(data_test.shape)\n# print(labels_train)\n# print(labels_test)def get_sunda(aksara):\n", "_____no_output_____" ], [ "get_sunda_ran(3,22)", "(112, 28, 28, 1)\n[ 83 103 57 41 111 89 64 37 13 25 108 53 79 4 93 72 7 21\n 66 22 87 28 32 61 55 63 58 31 65 74 102 42 26 71 59 90\n 11 1 27 94 0 8 97 81 2 110 39 49 106 33 95 98 100 96\n 86 78 36 34 73 12 92 85 101 88 82 105 30 17 56 38 45 43\n 68 76 51 44 91 52 77 9 6 19 107 62 46 104 35 67 75 16]\n(90, 28, 28, 1)\n" ] ] ]
[ "code" ]
[ [ "code", "code" ] ]
4aa3c2aa6a371bc61173e27af51716bd4c0df364
3,016
ipynb
Jupyter Notebook
worksheet-template/worksheet-template.ipynb
joshcarp/CodeCamp
af13c19b768ec425f56b55e7f7c3ef70abfa3b67
[ "MIT" ]
1
2019-06-30T14:55:44.000Z
2019-06-30T14:55:44.000Z
worksheet-template/worksheet-template.ipynb
Joshcarp/CodeCamp
af13c19b768ec425f56b55e7f7c3ef70abfa3b67
[ "MIT" ]
6
2019-07-13T06:29:54.000Z
2019-08-12T23:52:51.000Z
worksheet-template/worksheet-template.ipynb
joshcarp/CodeCamp
af13c19b768ec425f56b55e7f7c3ef70abfa3b67
[ "MIT" ]
null
null
null
24.128
181
0.532162
[ [ [ "## Worksheet ##: Python Topic #\nBelow the title, a little bit of *theory* and `code` will be shown for the **benefit** of the reader.\n\n### Another Topic #\nLike this perhaps.\n\n#### Or Another #\nCan be shown with more or less hash symbols.\n\nThen a horizontal rule can be used to split the text up.\n***\n\nDot points may be used to\n- Briefly\n- Outline\n- The\n- Ideas\n\nOr perhaps a list to\n1. Demonstrate\n2. Some\n3. Logic\n\n***\nText may be quoted,\n> like such\n>> to show some\n> structure\n\nAnd math can be explored equally so, $ a + b^2 = c$\n\nBut if all else fails, link them to some [sources](http://google.com)", "_____no_output_____" ], [ "### Challenge 1 #\nTime for the student to write some code. Describe the problem and provide some `hints and helper code`. Here, let's create a simple function to calculate the area of a square.", "_____no_output_____" ] ], [ [ "# Important packages\nfrom math import pi\n\n# Create a framework for the function\ndef find_square_area(width):\n \"\"\"Return the area of a square.\"\"\"\n pass\n\ndef find_circle_area(radius):\n \"\"\"Return the area of a circle.\"\"\"\n pass", "_____no_output_____" ], [ "# Import unit_test from local relative directory. This will be streamlined later.\n\n# Add unit-test folder to project paths\nimport sys\nif r'..\\unit-test' not in sys.path:\n sys.path.insert(0, r'..\\unit-test')\n\nfrom verifier import Verifier", "_____no_output_____" ], [ "# Test the functions\nVerifier('../unit-test/test_verifier.py')\n# When creating Verifier files, kernel must be restarted for new file to be checked. Press '0' twice as shortcut.", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code", "code" ] ]
4aa3e2fca1f7619fd37dec7c06cc4570121489bb
3,956
ipynb
Jupyter Notebook
docs/_src/5.Development/2022-01-15-saving-and-loading-metagraphs.ipynb
snystrom/Mycelia
67a978ec2de3c53fced46b98adbdfa2c4ca82889
[ "MIT" ]
null
null
null
docs/_src/5.Development/2022-01-15-saving-and-loading-metagraphs.ipynb
snystrom/Mycelia
67a978ec2de3c53fced46b98adbdfa2c4ca82889
[ "MIT" ]
27
2021-06-24T17:53:36.000Z
2022-03-05T19:26:01.000Z
docs/_src/5.Development/2022-01-15-saving-and-loading-metagraphs.ipynb
snystrom/Mycelia
67a978ec2de3c53fced46b98adbdfa2c4ca82889
[ "MIT" ]
1
2022-01-08T14:45:20.000Z
2022-01-08T14:45:20.000Z
21.5
120
0.497978
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
4aa3eb3b83abae31469686b8d992f74c30909717
21,187
ipynb
Jupyter Notebook
notebooks/D01_Time_Series_1.ipynb
cliburn/bios-823-2021
2a8e8f38b60c947adaeafdcee15f5396a0d01b52
[ "MIT" ]
7
2021-07-15T02:20:30.000Z
2022-01-17T20:55:29.000Z
notebooks/D01_Time_Series_1.ipynb
cliburn/bios-823-2021
2a8e8f38b60c947adaeafdcee15f5396a0d01b52
[ "MIT" ]
null
null
null
notebooks/D01_Time_Series_1.ipynb
cliburn/bios-823-2021
2a8e8f38b60c947adaeafdcee15f5396a0d01b52
[ "MIT" ]
15
2021-08-23T16:29:44.000Z
2022-02-10T17:28:29.000Z
19.727188
265
0.512862
[ [ [ "# Time Series Analysis 1\n\nIn the first lecture, we are mainly concerned with how to manipulate and smooth time series data.", "_____no_output_____" ] ], [ [ "%matplotlib inline\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "import os\nimport time", "_____no_output_____" ], [ "import numpy as np\nimport pandas as pd", "_____no_output_____" ], [ "! python3 -m pip install --quiet gmaps", "_____no_output_____" ], [ "import gmaps\nimport gmaps.datasets", "_____no_output_____" ] ], [ [ "## Dates and times", "_____no_output_____" ], [ "### Timestamps", "_____no_output_____" ] ], [ [ "now = pd.to_datetime('now')", "_____no_output_____" ], [ "now", "_____no_output_____" ], [ "now.year, now.month, now.week, now.day, now.hour, now.minute, now.second, now.microsecond", "_____no_output_____" ], [ "now.month_name(), now.day_name()", "_____no_output_____" ] ], [ [ "### Formatting timestamps\n\nSee format [codes](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-behavior)", "_____no_output_____" ] ], [ [ "now.strftime('%I:%m%p %d-%b-%Y')", "_____no_output_____" ] ], [ [ "### Parsing time strings", "_____no_output_____" ], [ "#### `pandas` can handle standard formats", "_____no_output_____" ] ], [ [ "ts = pd.to_datetime('6-Dec-2018 4:45 PM')", "_____no_output_____" ], [ "ts", "_____no_output_____" ] ], [ [ "#### For unusual formats, use `strptime`", "_____no_output_____" ] ], [ [ "from datetime import datetime ", "_____no_output_____" ], [ "ts = datetime.strptime('10:11PM 02-Nov-2018', '%I:%m%p %d-%b-%Y')", "_____no_output_____" ], [ "ts", "_____no_output_____" ] ], [ [ "### Intervals", "_____no_output_____" ] ], [ [ "then = pd.to_datetime('now')\ntime.sleep(5)\nnow = pd.to_datetime('now')", "_____no_output_____" ], [ "now - then", "_____no_output_____" ] ], [ [ "### Date ranges\n\nA date range is just a collection of time stamps.", "_____no_output_____" ] ], [ [ "dates = pd.date_range(then, now, freq='s')", "_____no_output_____" ], [ "dates", "_____no_output_____" ], [ "(then - pd.to_timedelta('1.5s')) in dates", "_____no_output_____" ] ], [ [ "### Periods\n\nPeriods are intervals, not a collection of timestamps.", "_____no_output_____" ] ], [ [ "span = dates.to_period()", "_____no_output_____" ], [ "span", "_____no_output_____" ], [ "(then + pd.to_timedelta('1.5s')) in span", "_____no_output_____" ] ], [ [ "## Lag and lead with `shift`\n\nWe will use a periodic time series as an example. Periodicity is important because many biological phenomena are linked to natural periods (seasons, diurnal, menstrual cycle) or are intrinsically periodic (e.g. EEG, EKG measurements).", "_____no_output_____" ] ], [ [ "index = pd.date_range('1-1-2018', '31-1-2018', freq='12h')", "_____no_output_____" ] ], [ [ "You can shift by periods or by frequency. Shifting by frequency maintains boundary data.", "_____no_output_____" ] ], [ [ "wave = pd.Series(np.sin(np.arange(len(index))), index=index)", "_____no_output_____" ], [ "wave.shift(periods=1).head(3)", "_____no_output_____" ], [ "wave.shift(periods=1).tail(3)", "_____no_output_____" ], [ "wave.shift(freq=pd.Timedelta(1, freq='D')).head(3)", "_____no_output_____" ], [ "wave.shift(freq=pd.Timedelta(1, freq='D')).tail(3)", "_____no_output_____" ] ], [ [ "#### Visualizing shifts", "_____no_output_____" ] ], [ [ "wave.plot()\npass", "_____no_output_____" ], [ "wave.plot(c='blue')\nwave.shift(-1).plot(c='red')\npass", "_____no_output_____" ], [ "wave.plot(c='blue')\nwave.shift(1).plot(c='red')\npass", "_____no_output_____" ], [ "(wave - wave.shift(-6)).plot(c='blue')\n(wave - wave.shift(-3)).plot(c='red')\npass", "_____no_output_____" ] ], [ [ "Embedding the time series with its lagged version reveals its periodic nature.", "_____no_output_____" ] ], [ [ "plt.scatter(wave, wave.shift(-1))\npass", "_____no_output_____" ] ], [ [ "### Find percent change from previous period", "_____no_output_____" ] ], [ [ "wave.pct_change().head()", "_____no_output_____" ] ], [ [ "`pct_change` is just a convenience wrapper around the use of `shift`", "_____no_output_____" ] ], [ [ "((wave - wave.shift(-1, freq='12h'))/wave).head()", "_____no_output_____" ] ], [ [ "## Resampling and window functions\n\n\nThe `resample` and window method have the same syntax as `groupby`, in that you can apply an aggregate function to the new intervals.", "_____no_output_____" ], [ "### Resampling\n\nSometimes there is a need to generate new time intervals, for example, to regularize irregularly timed observations.", "_____no_output_____" ], [ "#### Down-sampling", "_____no_output_____" ] ], [ [ "index = pd.date_range(pd.to_datetime('1-1-2018'), periods=365, freq='d')", "_____no_output_____" ], [ "series = pd.Series(np.arange(len(index)), index=index)", "_____no_output_____" ], [ "series.head()", "_____no_output_____" ], [ "sereis_weekly_average = series.resample('w').mean()\nsereis_weekly_average.head()", "_____no_output_____" ], [ "sereis_monthly_sum = series.resample('m').sum()\nsereis_monthly_sum.head()", "_____no_output_____" ], [ "sereis_10day_median = series.resample('10d').median()\nsereis_10day_median.head()", "_____no_output_____" ] ], [ [ "#### Up-sampling\n\nFor up-sampling, we need to figure out what we want to do with the missing values. The usual choices are forward fill, backward fill, or interpolation using one of many built-in methods.", "_____no_output_____" ] ], [ [ "upsampled = series.resample('12h')", "_____no_output_____" ], [ "upsampled.asfreq()[:5]", "_____no_output_____" ], [ "upsampled.ffill().head()", "_____no_output_____" ], [ "upsampled.bfill().head()", "_____no_output_____" ], [ "upsampled.interpolate('linear').head()", "_____no_output_____" ] ], [ [ "### Window functions\n\nWindow functions are typically used to smooth time series data. There are 3 variants - rolling, expanding and exponentially weighted. We use the Nile flooding data for these examples.", "_____no_output_____" ] ], [ [ "df = pd.read_csv('data/nile.csv', index_col=0)", "_____no_output_____" ], [ "df.head()", "_____no_output_____" ], [ "df.plot()\npass", "_____no_output_____" ] ], [ [ "#### Rolling windows generate windows of a specified width", "_____no_output_____" ] ], [ [ "ts = pd.DataFrame(dict(ts=np.arange(5)))\nts['rolling'] = ts.rolling(window=3).sum()\nts", "_____no_output_____" ], [ "rolling10 = df.rolling(window=10)\nrolling100 = df.rolling(window=100)", "_____no_output_____" ], [ "df.plot()\nplt.plot(rolling10.mean(), c='orange')\nplt.plot(rolling100.mean(), c='red')\npass", "_____no_output_____" ] ], [ [ "#### Expanding windows grow as the time series progresses", "_____no_output_____" ] ], [ [ "ts['expanding'] = ts.ts.expanding().sum()\nts", "_____no_output_____" ], [ "df.plot()\nplt.plot(df.expanding(center=True).mean(), c='orange')\nplt.plot(df.expanding().mean(), c='red')\npass", "_____no_output_____" ] ], [ [ "#### Exponentially weighted windows place more weight on center of mass", "_____no_output_____" ] ], [ [ "n = 10\nxs = np.arange(n, dtype='float')[::-1]\nxs", "_____no_output_____" ] ], [ [ "Exponentially weighted windows without adjustment.", "_____no_output_____" ] ], [ [ "pd.Series(xs).ewm(alpha=0.8, adjust=False).mean()", "_____no_output_____" ] ], [ [ "Re-implementation for insight.", "_____no_output_____" ] ], [ [ "α = 0.8\nys = np.zeros_like(xs)\nys[0] = xs[0]\nfor i in range(1, len(xs)):\n ys[i] = (1-α)*ys[i-1] + α*xs[i]\nys", "_____no_output_____" ] ], [ [ "Exponentially weighted windows with adjustment (default)", "_____no_output_____" ] ], [ [ "pd.Series(xs).ewm(alpha=0.8, adjust=True).mean()", "_____no_output_____" ] ], [ [ "Re-implementation for insight.", "_____no_output_____" ] ], [ [ "α = 0.8\nys = np.zeros_like(xs)\nys[0] = xs[0]\nfor i in range(1, len(xs)):\n ws = np.array([(1-α)**(i-t) for t in range(i+1)])\n ys[i] = (ws * xs[:len(ws)]).sum()/ws.sum()\nys", "_____no_output_____" ], [ "df.plot()\nplt.plot(df.ewm(alpha=0.8).mean(), c='orange')\nplt.plot(df.ewm(alpha=0.2).mean(), c='red')\npass", "_____no_output_____" ] ], [ [ "Alternatives to $\\alpha$", "_____no_output_____" ], [ "Using `span`\n$$\n\\alpha = \\frac{2}{\\text{span} + 1}\n$$\n\nUsing `halflife`\n$$\n\\alpha = 1 - e^\\frac{-\\log{2}}{t_{1/2}}\n$$\n\nUsing `com`\n$$\n\\alpha = \\frac{1}{1 + \\text{com}}\n$$\n", "_____no_output_____" ] ], [ [ "df.plot()\nplt.plot(df.ewm(span=10).mean(), c='orange')\nplt.plot(1+ df.ewm(alpha=2/11).mean(), c='red') # offfset for visibility\npass", "_____no_output_____" ] ], [ [ "## Correlation between time series\n\nSuppose we had a reference time series. It is often of interest to know how any particular time series is correlated with the reference. Often the reference might be a population average, and we want to see where a particular time series deviates in behavior.", "_____no_output_____" ] ], [ [ "! python3 -m pip install --quiet pandas_datareader", "_____no_output_____" ], [ "import pandas_datareader.data as web", "_____no_output_____" ] ], [ [ "We will look at the correlation of some stocks.\n\n```\nQQQ tracks Nasdaq\nMSFT is Microsoft\nGOOG is Gogole\nBP is British Petroleum\n```\n\nWe expect that the technology stocks should be correlated with Nasdaq, but maybe not BP.", "_____no_output_____" ] ], [ [ "df = web.DataReader(['QQQ', 'MSFT','GOOG', 'BP'], 'stooq')\n# api_key=os.environ['IEX_SECRET_KEY'])", "_____no_output_____" ], [ "df = df[['Close']].reset_index()", "_____no_output_____" ], [ "df", "_____no_output_____" ], [ "df = df.set_index(( 'Date', ''))", "_____no_output_____" ], [ "df.head()", "_____no_output_____" ], [ "df.columns", "_____no_output_____" ], [ "df.rolling(100).corr(df[('Close', 'QQQ')]).plot()\npass", "_____no_output_____" ] ], [ [ "## Visualizing space and time data\n\nBeing able to visualize events in space and time can be impressive. With Python, often you need a trivial amount of code to produce an impressive visualization.\n\nFor example, lets generate a heatmap of crimes in Sacramento in 2006, and highlight the crimes committed 10 seconds before midnight.\n\nSee the [gmaps](https://github.com/pbugnion/gmaps) package for more information.", "_____no_output_____" ] ], [ [ "sacramento_crime = pd.read_csv('data/SacramentocrimeJanuary2006.csv', index_col=0)", "_____no_output_____" ], [ "sacramento_crime.index = pd.to_datetime(sacramento_crime.index)", "_____no_output_____" ], [ "sacramento_crime.head()", "_____no_output_____" ], [ "gmaps.configure(api_key=os.environ[\"GOOGLE_API_KEY\"])", "_____no_output_____" ], [ "locations = sacramento_crime[['latitude', 'longitude']]", "_____no_output_____" ], [ "late_locations = sacramento_crime.between_time('23:59', '23:59:59')[['latitude', 'longitude']]", "_____no_output_____" ], [ "fig = gmaps.figure()\nfig.add_layer(gmaps.heatmap_layer(locations))\nmarkers = gmaps.marker_layer(late_locations)\nfig.add_layer(markers)\nfig", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ] ]
4aa3f3b85c10dab313f9f1f150edbb18fe00ed6a
368,468
ipynb
Jupyter Notebook
Week06/Homework03.ipynb
ds-connectors/Physics-88-Sp21
a30c744d261b9cec8ae8b3e787c962c51742ff2f
[ "BSD-3-Clause" ]
null
null
null
Week06/Homework03.ipynb
ds-connectors/Physics-88-Sp21
a30c744d261b9cec8ae8b3e787c962c51742ff2f
[ "BSD-3-Clause" ]
null
null
null
Week06/Homework03.ipynb
ds-connectors/Physics-88-Sp21
a30c744d261b9cec8ae8b3e787c962c51742ff2f
[ "BSD-3-Clause" ]
null
null
null
1,283.860627
223,760
0.954251
[ [ [ "Your name here. \nYour Woskshop section here.", "_____no_output_____" ], [ "# Homework 3: Arrays, File I/O and Plotting", "_____no_output_____" ], [ "**Submit this notebook to bCourses to receive a grade for this Workshop.**\n\nPlease complete homework activities in code cells in this iPython notebook. Be sure to comment your code well so that anyone who reads it can follow it and use it. Enter your name in the cell at the top of the notebook. When you are ready to submit it, you should download it as a python notebook (click \"File\", \"Download as\", \"Notebook (.ipynb)\") and upload it on bCourses under the Assignments tab. Please also save the notebook as PDF and upload to bCourses. ", "_____no_output_____" ], [ "## Problem 1: Sunspots\n\n[Adapted from Newman, Exercise 3.1] At <a href=\"http://www-personal.umich.edu/~mejn/computational-physics/sunspots.txt\">this link</a> (and also in your current directory on datahub) you will find a file called `sunspots.txt`, which contains the observed number of sunspots on the Sun for each month since January 1749. The file contains two columns of numbers, the first being the month and the second being the sunspot number.\n\na. Write a program that reads in the data and makes a graph of sunspots as a function of time. Adjust the $x$ axis so that the data fills the whole horizontal width of the graph.", "_____no_output_____" ], [ "b. Modify your code to display two subplots in a single figure: The plot from Part 1 with all the data, and a second subplot with the first 1000 data points on the graph.", "_____no_output_____" ], [ "c. Write a function `running_average(y, r)` that takes an array or list $y$ and calculates the running average of the data, defined by \n$$ Y_k = \\frac{1}{2r+1} \\sum_{m=-r}^r y_{k+m},$$\nwhere $y_k$ are the sunspot numbers in our case. Use this function and modify your second subplot (the one with the first 1000 data points) to plot both the original data and the running average on the same graph, again over the range covered by the first 1000 data points. Use $r=5$, but make sure your program allows the user to easily change $r$.", "_____no_output_____" ], [ "The next two parts may require you to google for how to do things. Make a strong effort to do these parts on your own without asking for help. If you do ask for help from a GSI or friend, first ask them to point you to the resource they used, and do your best to learn the necessary techniques from that resource yourself. Finding and learning from online documentation and forums is a very important skill. (Hint: Stack Exchange/Stack Overflow is often a great resource.)\n\nd. Add legends to each of your subplots, but make them partially transparent, so that you can still see any data that they might overlap. *Note: In your program, you should only have to change $r$ for the running average in one place to adjust both the graph and the legend.*", "_____no_output_____" ], [ "e. Since the $x$ and $y$ axes in both subplots have the same units, add shared $x$ and $y$ labels to your plot that are centered on the horizontal and vertical dimensions of your figure, respectively. Also add a single title to your figure.\n\nWhen your are finished, your plot should look something close to this:", "_____no_output_____" ] ], [ [ "# Don't rerun this snippet of code.\n# If you accidentally do, uncomment the lines below and rerun\n\n#from IPython.display import Image\n#Image(filename=\"img/p1_output.png\")", "_____no_output_____" ] ], [ [ "#### Hints\n\n* The running average is not defined for the first and last few points that you're taking a running average over. (Why is that?) Notice, for instance, that the black curve in the plot above doesn't extend quite as far on either side as the red curve. For making your plot, it might be helpful if your `running_average` function returns an array of the $x$-values $x_k$ (or their corresponding indices $k$) along with an array of the $y$-values $Y_k$ that you compute for the running average.\n\n* You can use the Latex code `$\\pm$` for the $\\pm$ symbol in the legend. You can also just write `+/-` if you prefer.\n", "_____no_output_____" ], [ "## Problem 2: Variety Plot\n\nIn this problem, you will reproduce the following as a single figure with four subplots, as best you can:", "_____no_output_____" ] ], [ [ "# Don't rerun this snippet of code.\n# If you accidentally do, uncomment the lines below and rerun\n\n#from IPython.display import Image\n#Image(filename=\"img/p2_output.png\")", "_____no_output_____" ] ], [ [ "Here are some hints and directions for each one:\n\n**Upper-left:** This is an image of silicon taken with an electron microscope.\n\nYou can find the data file `stm.txt` [here](http://www-personal.umich.edu/~mejn/computational-physics/stm.txt) and in your datahub directory, among resources for the [Newman](http://www-personal.umich.edu/~mejn/computational-physics/) text.\n\nYou may assume that the upper-left of the array is indeed the upper-left of the image.\n\nBoth axes should run from 0 to 5.5.\n\nThis subplot uses the `gray` colormap.", "_____no_output_____" ], [ "**Upper-Right:** Matplotlib can plot any list of $(x,y)$ points you give it, including parametric or polar curves. The curve in this subplot is called a \"deltoid\", and is the result of the equations\n\n$$ \\begin{align*}\nx &= 2\\cos\\theta + \\cos2\\theta \\\\\ny &= 2\\sin\\theta - \\sin2\\theta\n\\end{align*} $$\n\nover a range of $\\theta$ from $0$ to $2\\pi$.\n\nTo get the aspect ratio equal with nice spacing around the curve, try one of the following, depending on how you are making your subplots:\n- if you're using `plt.subplot(...)` to get each subplot (the \"state-machine\" approach), add the `aspect='equal'` and `adjustable='datalim'` arguments to the deltoid subplot, so your command will look something like `plt.subplot(..., aspect='equal', adjustable='datalim')`.\n- if you're using `... = plt.subplots(...)` (note the 's'!) or `ax = fig.add_subplot(...)` on a figure `fig` to get subplots with axes objects (the \"object-oriented\" approach), add the line `ax.set_aspect(aspect='equal', adjustable='datalim')`, where `ax` is the axes object you want to affect.", "_____no_output_____" ], [ "**Lower-Left:** This kind of plot is called a log-log plot, where both axes are on a logarithmic scale. Google or look in the matplotlib gallery to learn how to make this kind of plot.\n\nThe three curves are $y = x$, $y = x^2$, and $y = x^3$, where $x$ ranges over $10^{-1}$ to $10^1$. (Note: You can write powers of ten in python using the shorthand `1e-1` for $10^{-1}$, `1e1` for $10^1$, and so on.)\n\nTo make the pretty mathematical labels you see in the sample figure above, you can use\n* `r'$y = x, x^2, x^3$'` for the title\n* `r'$x$'` for the $x$-axis, and\n* `r'$y$'` for the $y$-axis.\n\nJust put these bits of code as you see them (with the **`r`** outside the quotes!) where you would normally put a string for the title or axes labels.", "_____no_output_____" ], [ "**Lower-Right:** Here you see a density plot with contours of the function\n\n$$f(x,y) = \\cos^2(\\pi\\,x\\,y ) e^{-\\frac{x^2 + 4 y}{8}},$$\n\nover $x$ from -2 to 2 and $y$ from -3 to 0.2.\n\nUse `meshgrid` to generate the $x$ and $y$ values. Be careful to make sure that the point $(-2,-3)$ is in the bottom left corner of the plot.\n\nYou'll need to use both `imshow` and `contour` to generate the density plot and then overlay it with contours. This plot uses the default contour spacing, so you don't need to worry about adjusting that. The colormap is `jet`, matplotlib's current default. (The default colormap will be changing to `viridis` in the next version.)\n\nTo get the ticks spaced out like you see here, use matplotlib's `xticks` or `set_xticks` functions for the $x$-axis (depending on how you're making your plots), and similar functions for the $y$-axis. You can pass each of these a single argument: a simple list or array of the numbers you want ticked on each axis.\n\n**Spacing the subplots:** Once all is said and done and you run `plt.show()`, you may notice your plots are cramped and overlapping each other. Add the line `plt.tight_layout()` before `plt.show()`, and matplotlib will space things out in an attempt to avoid overlapping subplots.", "_____no_output_____" ], [ "## Problem 3: Wind Statistics\n\nIn this problem, you'll create a new array that summarizes a larger dataset. In the folder for this Homework assignment, you'll find a dataset \"`wind.data`\", and a description of the dataset \"`wind.desc`\". \n\nRead the description `wind.desc` to understand the format of `wind.data`. Then, write a program which loads the data and generates a new array with the following format: each row should contain the year, month, mean wind speed, min wind speed, and max wind speed, in that order. The means, mins, and maxes should be taken over all days in that particular month, over all the cities that `wind.data` includes. The rows should be in chronological order. Printing your array should give you something like this:\n\n [[ 61. 1. 11.38064516 0.5 27.71 ]\n [ 61. 2. 13.49235119 2.21 29.63 ]\n [ 61. 3. 11.07236559 1.46 23.45 ]\n ..., \n [ 78. 10. 9.23389785 0.21 28.21 ]\n [ 78. 11. 12.72102778 0.96 30.21 ]\n [ 78. 12. 12.70357527 0.42 41.46 ]]\n\nNext, plot the means, mins, and maxes over time using just a single `plt.plot()` command, along with whatever other commands you need to label and display the figure. (Hint: What happens when you plot a 2D array?) Be sure to label your figure with proper units, etc. The x-axis may be labeled using year/month or just the years (i.e. have ticks along the axis which are labeled with some of the years, like 1961,1964,1967,1970,etc.). There are several ways to accomplish this. One way is to use `plt.xticks()`, which will give you the most explicit control over which points on the x axis get labeled and what labels they get. To understand this function, check out the documentation for `matplotlib.pyplot.xticks` on the `matplotlib` website.\n\n** Just for fun **\n\nIf you look at your plot, you'll notice some periodicity in the wind speeds. To better see the period, use the `running_average` function you wrote for the Sunspots problem to plot the running average of the means, mins, and maxes along with the original curves. Depending on how you wrote your `running_average` function, it may work immediately on this new 2D array without any modifications! From visual inspection of your running average, how long is the period, and when are wind speeds fastest and slowest in Ireland?\n\n#### Hints\n\nDepending on how you do this problem, you may find it useful to use boolean indexing along with numpy's `logical_and` function. It's okay to use lists as intermediate steps in generating your array. That is, not everything has to be an array, though it is possible to do this with only arrays. (Challenge!)\n\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
4aa4125e889aa665351dc0a6929262a7cf66a35a
169,618
ipynb
Jupyter Notebook
5_DataWranglingWithPandas_CheatSheet.ipynb
maximcondon/Project_BabyNames
73176b6a2473830275db53dcd0d05e0c6f751978
[ "MIT" ]
null
null
null
5_DataWranglingWithPandas_CheatSheet.ipynb
maximcondon/Project_BabyNames
73176b6a2473830275db53dcd0d05e0c6f751978
[ "MIT" ]
null
null
null
5_DataWranglingWithPandas_CheatSheet.ipynb
maximcondon/Project_BabyNames
73176b6a2473830275db53dcd0d05e0c6f751978
[ "MIT" ]
null
null
null
37.352566
18,352
0.456325
[ [ [ "# Pandas Cheat Sheet\n\n## Inspect\n\n**df.info( )** - tells you the type of object you have eg object, int, float AND the amount of memory your DataFrame is using up!\n\n**df.describe( )** - gives you a series of information about your DataFrame - mean, stdev, count, max, min values...\n\n**df.shape** - gives you a tuple of the dimensions of your data\n\n| command \t| description |\n|:-----------------:|:------------------:|\n| df.head( )\t| shows n first rows |\n| df.tail( )\t| shows n last rows |\n| df.info( )\t| lists columns and their types |\n| df.describe( )\t| calculates descriptive statistics |\n| df['Series'].value_counts( ) | counts distinct values in a column |\n| df.count( ) | calculates number of non-empty rows |\n| df.sum( ) |\tcalculates sum of each column |\n| df.mean( )\t| calculates mean of each column |\n| df.shape |tuple with the number of rows and columns|\n| df.values\t| contents as a NumPy array |\n\n## Selecting Rows and Columns\n\n| Operation |\t Description |\n|:----------------:|:------------------:|\n| df[col] |select one column as a Series|\n| df[[col1, col2]] |\tselect 2+ columns as a DataFrame|\n| df.loc[row] | select one row as a Series |\n| df.loc[row, col] |\tselect 2+ rows as a DataFrame|\n| df.iloc[a:b,c:d] |\tselect rows/columns by index |\n| df[df[col] > x] |\t select rows by condition |\n|df[df[col].between(x, y) |\t select rows by range |\n|df[df[col].str.startswith('A')] | apply string function|\n|df[col].str.lower( ) | apply string function|\n|df[col] = x | assign to existing or new column|\n| del df[col]| delete column |\n| df.columns | list with column names |\n| df.index | list with row index |\n| df.iterrows( ) | iterate over rows |\n\n## Plots\n\n| command | description |\n|:--------------:|:------------------:|\n| df.plot( )\t | line plot of each column|\n| df.plot.bar( ) |one bar for each column |\n|df.plot.scatter( ) |\tcreates a single scatter plot |\n| df.hist( )\t | draws a histogram for each column |\n|df.boxplot( )\t | draws a boxplot for each column |\n|df.scatter_matrix( )|\tdraws scatterplot matrix |\n\n## Manipulating DataFrames\n\n| command |\t description |\n|:----------------:|:----------------:|\n| df.transpose( ) |\tswaps rows and columns |\n| df.stack( ) | moves columns to hierarchical row index |\n| df.unstack( ) | creates columns from hierarchical row index|\n| df.pivot_table | Create a spreadsheet-style pivot table as a DF |\n|df.groupby([ x, y]) [ z ] | Groups DF or Series using a mapper or by a Series of columns|\n| df.fillna( ) | Fills Na values with value indicated |\n| df.isna( ) | Detect missing (Na) values |\n| df.dropna( ) | Removes rows from DF with Na values |\n\n\n## Data Wrangling\n\n| command\t | description |\n|:------------------------:|:------------------------:|\n| df.sort_values( ) | sort the rows by one or more columns|\n| pd.concat([df1, df2, ...]) | sticks two DataFrames together |\n| df.join( ) | joins two DataFrames on specific keys/columns|\n| df.merge( ) | join with more options |\n| df['Series'].astype( ) | convert type of a column |\n| df.replace( )\t | replace values by others |\n| df.set_index( ) | \tmoves one column into the index |\n| df.reset_index( )\t | moves the index into a column |\n| df.iterrows( ) | iterate over rows (LAST RESORT)|\n| del df[col]\t | delete column |\n\n\n## Aggregation\n\n| Command | Description |\n|:----------------:|:------------------:|\n| df.groupby( )| Groups DF or Series using a mapper or by a Series of columns |\n| df.transform( )| Call func on self producing a DF with transformed values and that has the same axis length as self |\n| df.apply( ) | apply a function along an axis of the DF |\n| df.mean( ) | gives you the mean of a numerical column |\n| df.max( ) | gives you max value for the column |\n| df.min( ) | gives you min value for the column |\n| df.sum( ) | return the sum of the values for requested axis |\n| df.cumsum( ) | gives you the cumulative sum for column |\n| df.count( ) | counts non-NA cells for each column or row |\n| df.std( ) | returns standard deviation over requested axis |\n| df.median( ) | returns median over requested axis |\n| df.quantile(0.9) | Return values at the given quantile over axis |\n| df.describe( ) | gives you a series of information about your DF - mean, stdev, count, max, min values... |\n| df.corr( ) | Compute pairwise correlation of columns, excluding null values |\n\n\n## Reading and Writing DataFrames\n\n- **pd.read_csv( )** - reads your file and copies data to your notebook\n- **pd.read_excel( )** - reads your excel file \n- **df.to_csv( )** - saves your DataFrame to a csv file\n- **df.to_excel( )** - saves your DF to an xlsx file\n- **df.to_json( )** - saves your DF to a json file\n\n**Reading and using the first column as index**\n\n df = pd.read_csv(\"large_countries_2015.csv\", index_col=0)\n\nThe following parameters may be useful:\n\n- `sep` - column separator when reading CSV\n- `header` - whether there is a row with the header (boolean)\n- `names` - column names when there is no header\n- `index_col` - which column to use as index\n\n## Useful Pandas links\n\nhttps://www.google.de/search?q=pandas+cookbook&rlz=1C5CHFA_enGB842GB842&oq=pandas+cook&aqs=chrome.0.0j69i57j0l4.2929j0j7&sourceid=chrome&ie=UTF-8\n\nhttps://pandas.pydata.org/pandas-docs/stable/user_guide/cookbook.html\n\nhttps://pandas.pydata.org/pandas-docs/stable/index.html\n\nhttps://www.dropbox.com/sh/mxzo38txgdja9g7/AAAQTIlurE4OsQ1xd9EwW43aa?dl=0\n\nhttps://pandas.pydata.org/Pandas_Cheat_Sheet.pdf\n\nhttps://s3.amazonaws.com/assets.datacamp.com/blog_assets/PandasPythonForDataScience.pdf\n\nfile:///Users/maximcondon/Documents/datascience/_downloads/pandas-cheat-sheet.pdf", "_____no_output_____" ], [ "## Exercise 1\n\n### 1. Read a .csv file (or.txt) with .read_csv", "_____no_output_____" ] ], [ [ "import pandas as pd\n\ndf = pd.read_csv ('yob2017.txt', names = ['name', 'gender', 'count'], header = None)\ndf.head()", "_____no_output_____" ] ], [ [ "### 2. Save it to an excel file", "_____no_output_____" ] ], [ [ "df.to_excel('names.xlsx')", "_____no_output_____" ] ], [ [ "### Read the spreadsheet again ", "_____no_output_____" ] ], [ [ "pd.read_excel('names.xlsx').head(10)", "_____no_output_____" ] ], [ [ "### 3. Select all rows with boys ", "_____no_output_____" ] ], [ [ "df[df['gender'] == 'M'].head(10)", "_____no_output_____" ], [ "boys = df[df['gender'] == 'M']", "_____no_output_____" ] ], [ [ "### 4. Select the top 10 names that occur more than 10,000 times", "_____no_output_____" ] ], [ [ "boys_10 = boys[boys['count'] > 10000].head(10)", "_____no_output_____" ], [ "boys_10", "_____no_output_____" ] ], [ [ "### 5. Select names that occur between 100 and 200 times", "_____no_output_____" ] ], [ [ "df[(df['count'] > 100) & (df['count'] < 200)].head(20)", "_____no_output_____" ] ], [ [ "#### Can also do this snazzy bit of code - df.between(..., ...):", "_____no_output_____" ] ], [ [ "#but it includes 200!!!\ndf[df['count'].between(100,200)].head(10)", "_____no_output_____" ] ], [ [ "### 6. Count the boys and girls", "_____no_output_____" ] ], [ [ "df.groupby('gender').sum()", "_____no_output_____" ] ], [ [ "### 7. Extract first and last character as separate columns, using df.apply( )!", "_____no_output_____" ] ], [ [ "def first(string):\n return string[0]\n \ndf['first'] = df['name'].apply(first)\n\ndf.head(10)", "_____no_output_____" ], [ "def last(string):\n return string[-1]\n \ndf['last'] = df['name'].apply(last)\n\ndf.head(10)", "_____no_output_____" ] ], [ [ "#### For this particular operation there is a nice shortcut on pandas, df.str :", "_____no_output_____" ] ], [ [ "df['name'].str[0].head(10)", "_____no_output_____" ] ], [ [ "#### For the last letter just do -1 index position!!", "_____no_output_____" ] ], [ [ "df['name'].str[-1].head(10)", "_____no_output_____" ] ], [ [ "### 8. Finding the length of a string inside a DataFrame\n\n##### Can use a function just like above\n\n##### Or a super-handy function .str.len( ) !!!", "_____no_output_____" ] ], [ [ "df['name'].str.len().head(10)", "_____no_output_____" ], [ "def length(string):\n return len(string)\n \ndf['Length of name'] = df['name'].apply(length)\n\ndf.head(10)", "_____no_output_____" ] ], [ [ "### 9. Count frequency combinations of first/last characters", "_____no_output_____" ] ], [ [ "count = df.groupby(['first','last'])['count'].count()\ncount", "_____no_output_____" ], [ "count.sort_values(ascending=False).head(10)", "_____no_output_____" ] ], [ [ "### This gives us a 26 x 26 DataFrame of number of names with their first character and last character being the letters in the rows/columns:", "_____no_output_____" ] ], [ [ "cross = count.unstack()\ncross = cross.fillna(0.0)\ncross", "_____no_output_____" ] ], [ [ "### 10. Use Seaborn to visualise this matrix!", "_____no_output_____" ] ], [ [ "%matplotlib inline\n\nfrom matplotlib import pyplot as plt\nimport seaborn as sns\n\nplt.figure(figsize=(10,8))\nsns.heatmap(cross)", "_____no_output_____" ] ], [ [ "**We used cross.fillna(0) to fill the NaN spots so we don't have white space!**\n\n### Change the colour of your heatmap using cmap!\n\nsns.heatmap(cross, cmap='RRR') - cmap changes the colour scheme", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(10,8))\nsns.heatmap(cross, cmap='viridis')", "_____no_output_____" ] ], [ [ "## Exercise 2\n\n df = pd.read_csv('yob2000.txt', names=['name', 'gender', 'count'])\n\n### 1. Create two separate DataFrames for boys and girls", "_____no_output_____" ] ], [ [ "df = pd.read_csv('yob2000.txt', names=['name', 'gender', 'count'])", "_____no_output_____" ], [ "boys = df[df['gender'] == 'M']", "_____no_output_____" ], [ "boys.head()", "_____no_output_____" ], [ "girls = df[df['gender'] == 'F']", "_____no_output_____" ], [ "girls.head()", "_____no_output_____" ] ], [ [ "### 2. Put the two DataFrames together again", "_____no_output_____" ] ], [ [ "dfs = [girls, boys]\n\ndf = pd.concat(dfs)\n\ndf.iloc[17648:17658] \n# Can see from this that we've managed to put the dataframes\n# back together again!", "_____no_output_____" ] ], [ [ "### 3. Connect boys and girls horizontally only where the names match", "_____no_output_____" ] ], [ [ "merged = girls.merge(boys, on='name', suffixes=('_female', '_male'))", "_____no_output_____" ], [ "merged", "_____no_output_____" ] ], [ [ "### 4. Create a gender column with the values ‘male’ and ‘female’", "_____no_output_____" ] ], [ [ "#merged['gender'] = None\n\n#merged", "_____no_output_____" ], [ "# def sex(string):\n \n# if merged['gender_female'] == 'F':\n# merged['gender'] = 'female'\n# else:\n# merged['gender'] = 'male'\n \n# merged['gender'] = merged['gender_female'].apply(sex)\n\n# merged", "_____no_output_____" ], [ "# for x in merged:\n \n\n# if merged['gender_female'] == 'F':\n# merged['gender'].apply\n\n# df['gender'] = ", "_____no_output_____" ] ], [ [ "### 5. Use the name as an index", "_____no_output_____" ], [ "## Exercise 3\n\nCombine two DataFrames\n\n### 1. load the data in the file data/gapminder_lifeexpectancy.xlsx\n\n### 2. select a year of your choice", "_____no_output_____" ] ], [ [ "df = pd.read_excel('gapminder_lifeexpectancy.xlsx', index_col=0)", "_____no_output_____" ], [ "df = df[[2000]]\n\ndf.head(5)", "_____no_output_____" ], [ "df = df.reset_index()", "_____no_output_____" ], [ "df.head()", "_____no_output_____" ] ], [ [ "### 3. create an separate DataFrame that has two columns: country and continent, and manually fill it with data for 10-20 countries\n\n### 4. merge both DataFrames\n\n### 5. remove all rows with empty values", "_____no_output_____" ] ], [ [ "d = {'Life expectancy': ['United Kingdom', 'France', 'Germany', 'Italy', 'Spain', \n 'Brazil', 'United States', 'Argentina', 'Colombia', 'Peru',\n 'Russia', 'Myanmar', 'China', 'India', 'Vietnam',\n 'Senegal', 'Nigeria', 'Niger', 'South Africa', 'Lesotho'],\n 'continent': ['Europe', 'Europe', 'Europe', 'Europe', 'Europe',\n 'America', 'America', 'America', 'America', 'America',\n 'Asia', 'Asia', 'Asia', 'Asia', 'Asia',\n 'Africa', 'Africa', 'Africa', 'Africa', 'Africa']}\n\n\ndf2 = pd.DataFrame(data=d)\ndf2", "_____no_output_____" ], [ "merge = df.merge(df2, on='Life expectancy')\nmerge", "_____no_output_____" ] ], [ [ "### 6. print the highest and lowest fertility for each continent", "_____no_output_____" ] ], [ [ "continents = list(merge['continent'].unique())\ncontinents", "_____no_output_____" ], [ "america = merge[merge['continent'] == 'America']\neurope = merge[merge['continent'] == 'Europe']\nafrica = merge[merge['continent'] == 'Africa']\nasia = merge[merge['continent'] == 'Asia']", "_____no_output_____" ], [ "print(f'Country in {continents[0]} with the highest fertility is {america.max()[0]}')\nprint(f'Country in {continents[0]} with the lowest fertility is {america.min()[0]}\\n')\n\nprint(f'Country in {continents[1]} with the highest fertility is {europe.max()[0]}')\nprint(f'Country in {continents[1]} with the lowest fertility is {europe.min()[0]}\\n')\n\nprint(f'Country in {continents[2]} with the highest fertility is {africa.max()[0]}')\nprint(f'Country in {continents[2]} with the lowest fertility is {africa.min()[0]}\\n')\n\nprint(f'Country in {continents[3]} with the highest fertility is {asia.max()[0]}')\nprint(f'Country in {continents[3]} with the lowest fertility is {asia.min()[0]}\\n')", "Country in America with the highest fertility is United States\nCountry in America with the lowest fertility is Argentina\n\nCountry in Asia with the highest fertility is United Kingdom\nCountry in Asia with the lowest fertility is France\n\nCountry in Europe with the highest fertility is South Africa\nCountry in Europe with the lowest fertility is Lesotho\n\nCountry in Africa with the highest fertility is Vietnam\nCountry in Africa with the lowest fertility is China\n\n" ], [ "merge[merge['continent'] == 'America']", "_____no_output_____" ], [ "merge[merge['continent'] == 'America'].max()[0]", "_____no_output_____" ], [ "continents[0]", "_____no_output_____" ], [ "merge['continent'].unique()", "_____no_output_____" ], [ "def high_low(df):\n\n continents = list(df['continent'].unique())\n \n america = df[df['continent'] == 'America']\n europe = df[df['continent'] == 'Europe']\n africa = df[df['continent'] == 'Africa']\n asia = df[df['continent'] == 'Asia']\n \n# for i in continents:\n \n# print(i)\n return f'Country in {continents[0]} with the highest fertility is {america.max()[0]}'\n \n# america = \n# merge[merge['continent'] == 'America'].max()[0]\n\n# print(df[df['continent'] == 'America'].max())\n\n# print(f'Country in {continents[0]} with the highest fertility is {america.max()[0]}')\n# print(f'Country in {continents[0]} with the lowest fertility is {america.min()[0]}\\n')\n\n# print(f'Country in {continents[1]} with the highest fertility is {europe.max()[0]}')\n# print(f'Country in {continents[1]} with the lowest fertility is {europe.min()[0]}\\n')\n\n# print(f'Country in {continents[2]} with the highest fertility is {africa.max()[0]}')\n# print(f'Country in {continents[2]} with the lowest fertility is {africa.min()[0]}\\n')\n\n# print(f'Country in {continents[3]} with the highest fertility is {asia.max()[0]}')\n# print(f'Country in {continents[3]} with the lowest fertility is {asia.min()[0]}\\n')\n\nhigh_low(merge)", "_____no_output_____" ], [ "def high_low(df):\n \n continents = list(df['continent'].unique())\n \n# return continents\n \n for i in continents:\n \n return df[df[i] == i[0].max()[0]]\n \n# i + df[df[i] == i[0].max()[0]] + df[df[i] == i[0].min()[0]]\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa413284d3186149c7353d8cb66ae5d0a02fdfa
815,979
ipynb
Jupyter Notebook
notebooks/bert_large_uncased_mitmovies.ipynb
kemalaraz/MovieEntityRecognizer
2ec85b9ef536d7d0e29709dfd78b0a922bac59cf
[ "MIT" ]
2
2021-03-16T17:04:56.000Z
2021-06-11T12:42:44.000Z
notebooks/bert_large_uncased_mitmovies.ipynb
kemalaraz/NamedEntityRecognizer
2ec85b9ef536d7d0e29709dfd78b0a922bac59cf
[ "MIT" ]
null
null
null
notebooks/bert_large_uncased_mitmovies.ipynb
kemalaraz/NamedEntityRecognizer
2ec85b9ef536d7d0e29709dfd78b0a922bac59cf
[ "MIT" ]
null
null
null
1,402.025773
162,160
0.905025
[ [ [ "## BERT model for MITMovies Dataset\nI was going to make this repository a package with setup.py and everything but because of my deadlines and responsibilities at my current workplace I haven't got the time to do that so I shared the structure of the project in README.md file.", "_____no_output_____" ] ], [ [ "# If any issues open the one that gives error\n# !pip install transformers\n# !pip install torch==1.5.1\n# !pip install tqdm\n# !pip install tensorboard\n# !pip install seqeval\n# ! pip install tqdm\n# ! pip install seaborn\n# !pip install gensim", "_____no_output_____" ], [ "import os\nimport sys\nimport json\nimport numpy as np\nfrom tqdm import tqdm\nsys.path.append(\"..\")\n\nimport torch\nfrom torch import nn\nimport torch.nn.functional as F\nfrom torch.utils import tensorboard\nfrom seqeval.metrics import classification_report\nfrom transformers import Trainer, TrainingArguments\nfrom torch.utils.data import (DataLoader, RandomSampler, SequentialSampler,\n TensorDataset)\nfrom transformers import (WEIGHTS_NAME, AdamW, BertConfig,\n BertForTokenClassification, BertTokenizerFast,\n get_linear_schedule_with_warmup)\nfrom src.namedentityrecognizer.models.bertner import BertNerModel\nfrom src.namedentityrecognizer.data.analyze_dataset import Analyzer\nfrom src.namedentityrecognizer.data.build_dataset import BuildData\nfrom src.namedentityrecognizer.data.make_dataset import MakeData\nfrom src.namedentityrecognizer.utils.processors import NerPreProcessor, NerDataset\nfrom sklearn.metrics import accuracy_score, f1_score, precision_score, recall_score", "_____no_output_____" ], [ "# Some initializers\ntrain = True\nnum_train_epochs = 5\ntrain_batch_size = 32\neval_batch_size = 8\n# Weight decay for regularization\nweight_decay = 0.01\n# Now 1 but if batches wont fit RAM can be increased\ngradient_accumulation_steps = 1\n# %10 warm up\nwarmup_proportion = 0.1\n# Adam variables\nadam_epsilon = 1e-8\nlearning_rate = 5e-5\n# 16 floating point instead of 32\nfp16 = False\nif fp16:\n # Apex AMP optimization level selected in ['O0', 'O1', 'O2', and 'O3']\n fp16_opt_level\n# max seq length (for engtrain.bio since the lengths are pretty short 128 is alright)\nmax_seq_length = 128\n# For gradient clipping\nmax_grad_norm = 1.0\n# For having flexibility over hardware\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n# Data path\ndata_dir = \"/content/drive/MyDrive/MovieEntityRecognizer/data/modified\"\n# Tensorboard Name\ntensorboard_writer = tensorboard.SummaryWriter(\"bert_base_uncased_default\")\n\nvalidate = True\ntest = True", "_____no_output_____" ], [ "# For downloading data, doesn't require ssl so if downloaded no need to run it again\n# dataset_names = [\"engtrain.bio\", \"engtest.bio\", \"trivia10k13train.n,bio\", \"trivia10k13test.bio\"]\n# (MakeData.download_data(os.path.join(\"http://groups.csail.mit.edu/sls/downloads/movie\", dataset_name) for dataset_name in dataset_names)\n\n", "_____no_output_____" ], [ "# Count also word frequencies and lengths or sentences\ntrain_labels = Analyzer.count_labels(\"/home/karaz/Desktop/MovieEntityRecognizer/data/raw/mitmovies/engtrain.bio\", without_o=True)\nAnalyzer.plot_data(train_labels)\ntest_labels = Analyzer.count_labels(\"/home/karaz/Desktop/MovieEntityRecognizer/data/raw/mitmovies/engtest.bio\", without_o=True)\nAnalyzer.plot_data(test_labels)", "_____no_output_____" ], [ "# Get distinct labels\nlabel_list = sorted(list(train_labels.keys()))\nlabel_list.append(\"O\")\nnum_labels = len(label_list)\nlabel_map = {label: id for id, label in enumerate(label_list)}\nprint(f\"Size of labels of regular dataset: {len(label_list)}\\n{label_map}\")", "Size of labels of regular dataset: 25\n{'B-ACTOR': 0, 'B-CHARACTER': 1, 'B-DIRECTOR': 2, 'B-GENRE': 3, 'B-PLOT': 4, 'B-RATING': 5, 'B-RATINGS_AVERAGE': 6, 'B-REVIEW': 7, 'B-SONG': 8, 'B-TITLE': 9, 'B-TRAILER': 10, 'B-YEAR': 11, 'I-ACTOR': 12, 'I-CHARACTER': 13, 'I-DIRECTOR': 14, 'I-GENRE': 15, 'I-PLOT': 16, 'I-RATING': 17, 'I-RATINGS_AVERAGE': 18, 'I-REVIEW': 19, 'I-SONG': 20, 'I-TITLE': 21, 'I-TRAILER': 22, 'I-YEAR': 23, 'O': 24}\n" ], [ "# model configurations and tokenizer\nconfig = BertConfig.from_pretrained(\"bert-large-uncased\", num_labels=num_labels, finetuning_task=\"ner\")\ntokenizer = BertTokenizerFast.from_pretrained(\"bert-large-uncased\")", "_____no_output_____" ], [ "# Change home karaz desktop path to your home directory (basically where the repository is)\ndataset = BuildData.create_dataset(\"/home/karaz/Desktop/MovieEntityRecognizer/data/modified/mitmovies\")\nid2label = {id: label for (label,id) in label_map.items()}\nid2label[-100] = 'X'\nid2label", "_____no_output_____" ], [ "if train:\n num_train_optimization_steps = int(\n len(dataset['train_instances']) / train_batch_size / gradient_accumulation_steps) * num_train_epochs\n print(f\"Number of training steps {num_train_optimization_steps}\")\n print(f\"Number of training instances {len(dataset['train_instances'])}\")\n\nif test:\n test_steps = int(\n len(dataset['test_instances']) / eval_batch_size)\n print(f\"Number of test steps {test_steps}\")\n print(f\"Number of test instances {len(dataset['test_instances'])}\")", "Number of training steps 1525\nNumber of training instances 9775\nNumber of test steps 305\nNumber of test instances 2443\n" ], [ "# Tokenize the datasets\ntrain_tokens = tokenizer(dataset[\"train_instances\"], is_split_into_words=True, return_offsets_mapping=True,\n padding=True, truncation=True)\n\ntest_tokens = tokenizer(dataset['test_instances'], is_split_into_words=True, return_offsets_mapping=True, \n padding=True, truncation=True)", "_____no_output_____" ], [ "# Encode labels and give -100 to tokens which you dont want to backpropagate (basically mask them out)\ntrain_labels = NerPreProcessor.convert_labels(dataset[\"train_labels\"], \n label_map, \n train_tokens)\n\ntest_labels = NerPreProcessor.convert_labels(dataset['test_labels'],\n label_map, \n test_tokens)", "_____no_output_____" ], [ "# Get rid of unnecessary data and create final data\nif train_tokens[\"offset_mapping\"]:\n train_tokens.pop(\"offset_mapping\") \nif test_tokens[\"offset_mapping\"]:\n test_tokens.pop(\"offset_mapping\")\n\ntrain_dataset = NerDataset(train_tokens, train_labels)\ntest_dataset = NerDataset(test_tokens, test_labels)", "_____no_output_____" ], [ "# Model initialization for high level api of huggingface\ndef model_init():\n model = BertForTokenClassification.from_pretrained('bert-large-uncased', num_labels=len(label_map))\n return model", "_____no_output_____" ], [ "# I left the compute metrics here in order to show how the evaluation\ndef compute_metrics(p):\n\n predictions, labels = p\n predictions = np.argmax(predictions, axis=2)\n\n # Remove ignored index (special tokens)\n true_predictions = [\n [id2label[p] for (p, l) in zip(prediction, label) if l != -100]\n for prediction, label in zip(predictions, labels)\n ]\n true_labels = [\n [id2label[l] for (p, l) in zip(prediction, label) if l != -100]\n for prediction, label in zip(predictions, labels)\n ]\n\n preds_stretched = [label for doc in true_predictions for label in doc]\n trues_stretched = [label for doc in true_labels for label in doc]\n\n return {\n \"accuracy_score\": accuracy_score(trues_stretched, preds_stretched),\n \"precision\": precision_score(trues_stretched, preds_stretched, labels=np.unique(preds_stretched), average='macro'),\n \"recall\": recall_score(trues_stretched, preds_stretched, labels=np.unique(preds_stretched), average='macro'),\n \"f1_macro\": f1_score(trues_stretched, preds_stretched, labels=np.unique(preds_stretched), average='macro'),\n \"f1_micro\": f1_score(trues_stretched, preds_stretched, average='micro'),\n }", "_____no_output_____" ], [ "model_name = \"bert-large-uncased-micro-10epoch\"\ntraining_args = TrainingArguments(\n output_dir = \"/home/kemalaraz/Desktop/MovieEntityRecognizer/pretrained_models/\" + model_name, # output directory\n overwrite_output_dir = True,\n evaluation_strategy='epoch',\n num_train_epochs = 10, # total number of training epochs\n per_device_train_batch_size=16, # batch size per device during training\n per_device_eval_batch_size=64, # batch size for evaluation\n warmup_steps=500, # number of warmup steps for learning rate scheduler\n weight_decay=0.01, # strength of weight decay\n logging_dir = \"/home/kemalaraz/Desktop/MovieEntityRecognizer/pretrained_models/\" + model_name + '/logs', # directory for storing logs\n logging_steps=10,\n load_best_model_at_end=True,\n learning_rate = 5e-5,\n seed = 42\n)\n# The high level api of the trainer\ntrainer = Trainer( \n model_init = model_init,\n args = training_args, \n train_dataset = train_dataset, \n eval_dataset = test_dataset,\n compute_metrics = compute_metrics \n)", "_____no_output_____" ], [ "training_results = trainer.train()\nevaluate_results_with_best_epoch = trainer.evaluate()", "_____no_output_____" ], [ "# For basic inference\nmodel = BertForTokenClassification.from_pretrained(path_to_the_model, num_labels=len(label_map))", "_____no_output_____" ], [ "tokens = tokenizer.tokenize(tokenizer.decode(tokenizer.encode(\"list the five star movies starring john lennon\")))\ninputs = tokenizer.encode(\"list the four star movies starring john lennon\", return_tensors=\"pt\")\n\noutputs = model(inputs)[0]\npredictions = torch.argmax(outputs, dim=2)\n\nprint([(token, label_list[prediction]) for token, prediction in zip(tokens, predictions[0].tolist())])", "[('[CLS]', 'O'), ('list', 'O'), ('the', 'O'), ('five', 'B-RATINGS_AVERAGE'), ('star', 'O'), ('movies', 'O'), ('starring', 'O'), ('john', 'B-ACTOR'), ('lennon', 'I-ACTOR'), ('[SEP]', 'O')]\n" ] ], [ [ "## Attachments ", "_____no_output_____" ], [ "![image.png](attachment:image.png)", "_____no_output_____" ], [ "![image.png](attachment:image.png)", "_____no_output_____" ], [ "![image.png](attachment:image.png)", "_____no_output_____" ], [ "![image.png](attachment:image.png)", "_____no_output_____" ], [ "![image.png](attachment:image.png)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
4aa429bfd539e7a905f7ca235cbefa859c19c535
155,600
ipynb
Jupyter Notebook
SQL-assignment-on-IMDB-data.ipynb
mayank171986/SQL-Assignment-on-IMDB-data
9ed8d43269286b18f2c95e445ffd80faa7cf6622
[ "MIT" ]
1
2020-05-09T11:49:27.000Z
2020-05-09T11:49:27.000Z
SQL-assignment-on-IMDB-data.ipynb
mayank171986/SQL-Assignment-on-IMDB-data
9ed8d43269286b18f2c95e445ffd80faa7cf6622
[ "MIT" ]
null
null
null
SQL-assignment-on-IMDB-data.ipynb
mayank171986/SQL-Assignment-on-IMDB-data
9ed8d43269286b18f2c95e445ffd80faa7cf6622
[ "MIT" ]
2
2020-12-19T06:07:49.000Z
2021-09-10T16:08:28.000Z
34.631649
408
0.330996
[ [ [ "# Import common packages and create database connection\nimport pandas as pd\nimport sqlite3 as db\n\nconn = db.connect('Db-IMDB.db')", "_____no_output_____" ] ], [ [ "1.List all the directors who directed a 'Comedy' movie in a leap year. (You need to check that the genre is 'Comedy’ and year is a leap year) Your query should return director name, the movie name, and the year.", "_____no_output_____" ] ], [ [ "%%time\n# List all the distinct directors who directed a 'Comedy' movie in a leap year.\n# citation https://stackoverflow.com/questions/6534788/check-for-leap-year\n# https://www.mathsisfun.com/leap-years.html\nresult = pd.read_sql_query(\n '''\n SELECT DISTINCT trim(P.NAME) as director, M.title as movie, M.year, G.Name\n FROM Movie M\n JOIN M_Director MD ON M.MID = MD.MID\n JOIN Person P on trim(MD.PID) = trim(P.PID)\n JOIN M_Genre MG on M.MID = MG.MID\n JOIN Genre G on MG.GID = G.GID\n WHERE G.Name LIKE '%Comedy%'\n AND (((M.year % 4 = 0) AND (M.year % 100 != 0)) OR (M.year % 400 = 0))\n GROUP BY director\n ORDER BY director\n '''\n , conn);\n\nresult", "CPU times: user 26.5 s, sys: 135 ms, total: 26.6 s\nWall time: 31.2 s\n" ], [ "%%time\n# List all the directors who directed a 'Comedy' movie in a leap year. A director can direct multiple movies in leap year.\n# citation https://stackoverflow.com/questions/6534788/check-for-leap-year\n# https://www.mathsisfun.com/leap-years.html\nresult = pd.read_sql_query(\n '''\n SELECT DISTINCT trim(P.NAME) as director, M.title as movie, M.year, G.Name\n FROM Movie M\n JOIN M_Director MD ON M.MID = MD.MID\n JOIN Person P on trim(MD.PID) = trim(P.PID)\n JOIN M_Genre MG on M.MID = MG.MID\n JOIN Genre G on MG.GID = G.GID\n WHERE G.Name LIKE '%Comedy%'\n AND (((M.year % 4 = 0) AND (M.year % 100 != 0)) OR (M.year % 400 = 0))\n ORDER BY director\n '''\n , conn);\n\nresult", "CPU times: user 26.6 s, sys: 131 ms, total: 26.7 s\nWall time: 31.4 s\n" ] ], [ [ "2.List the names of all the actors who played in the movie 'Anand' (1971)", "_____no_output_____" ] ], [ [ "%%time\nresult = pd.read_sql_query(\n '''\n SELECT p.Name FROM Movie m \n JOIN M_Cast mc ON m.MID=mc.MID\n JOIN Person p ON trim(mc.PID)=trim(p.PID)\n WHERE m.title='Anand' AND m.year=1971\n '''\n , conn)\n\nresult", "CPU times: user 212 ms, sys: 8.56 ms, total: 221 ms\nWall time: 222 ms\n" ] ], [ [ "3. List all the actors who acted in a film before 1970 andin a film after 1990. (That is: < 1970 and > 1990.)", "_____no_output_____" ] ], [ [ "%%time\nresult = pd.read_sql_query(\n '''\n SELECT DISTINCT trim(p.PID) as pid, p.Name\n FROM Movie m\n JOIN M_Cast mc ON m.MID = mc.MID\n JOIN Person p ON trim(mc.PID) = trim(p.PID)\n WHERE m.year > 1990\n AND trim(p.PID) IN (SELECT DISTINCT trim(p.PID) as pid\n FROM Movie m\n JOIN M_Cast mc ON m.MID = mc.MID\n JOIN Person p ON trim(mc.PID) = trim(p.PID)\n WHERE m.year < 1970)\n GROUP BY trim(p.PID)\n ''', conn)\nresult", "CPU times: user 1min 33s, sys: 616 ms, total: 1min 33s\nWall time: 1min 35s\n" ] ], [ [ "4. List all directors who directed 10 movies or more, in descending order of the number of movies they directed. Return the directors' names and the number of movies each of them directed.", "_____no_output_____" ] ], [ [ "%%time\nresult = pd.read_sql_query(\n '''\n SELECT p.Name, count(md.ID) movieCount FROM M_Director md JOIN Person p ON md.PID=p.PID\n GROUP BY md.PID HAVING movieCount >= 10 ORDER BY movieCount DESC\n ''', conn)\n\nresult", "CPU times: user 69.3 ms, sys: 2.19 ms, total: 71.4 ms\nWall time: 73.3 ms\n" ] ], [ [ "5a. For each year, count the number of movies in that year that had only female actors.", "_____no_output_____" ] ], [ [ "%%time\nresult = pd.read_sql_query(\n '''\n SELECT count(m.year) movie_count, m.year as movie_year\n FROM Movie m where m.MID not in\n (SELECT mc.MID FROM Person p JOIN M_Cast mc ON trim(p.PID)=trim(mc.PID) WHERE p.Gender='Male')\n GROUP BY movie_year\n ORDER BY movie_count DESC\n ''', conn)\nresult", "CPU times: user 4min 31s, sys: 1.39 s, total: 4min 32s\nWall time: 4min 37s\n" ] ], [ [ "5b.Now include a small change: report for each year the percentage of movies in that year with only female actors, and the total number of movies made that year. For example, one answer will be:1990 31.81 13522 meaning that in 1990 there were 13,522 movies, and 31.81% had only female actors. You do not need to round your answer.", "_____no_output_____" ] ], [ [ "%%time\nresult = pd.read_sql_query(\n '''\n SELECT y.allMov as 'movie_count', x.year as movie_year, ((x.Movies_Cnt*100.0)/y.allMov) as Percent FROM\n (SELECT count(*) Movies_Cnt , m.year\n FROM Movie m where m.MID not in\n (SELECT mc.MID FROM Person p JOIN M_Cast mc ON trim(p.PID) = trim(mc.PID) WHERE p.Gender='Male')\n GROUP BY m.year) x INNER JOIN\n (SELECT count(*) allMov, m.year\n FROM Movie m\n GROUP BY m.year) y on x.year=y.year\n ''', conn)\nresult", "CPU times: user 4min 38s, sys: 1.37 s, total: 4min 39s\nWall time: 4min 50s\n" ] ], [ [ "6. Find the film(s) with the largest cast. Return the movie title and the size of the cast. By \"cast size\" we mean the number of distinct actors that played in that movie: if an actor played multiple roles, or if it simply occurs multiple times in casts,we still count her/him only once.", "_____no_output_____" ] ], [ [ "%%time\nresult = pd.read_sql_query(\n '''\n SELECT count(DISTINCT mc.PId) as cast_count, m.title FROM Movie m\n JOIN M_Cast mc ON m.MID=mc.MID\n JOIN Person p ON trim(mc.PID)=trim(p.PID)\n GROUP BY m.MID\n ORDER BY cast_count DESC limit 1\n ''', conn)\nresult", "CPU times: user 9min 47s, sys: 2.57 s, total: 9min 49s\nWall time: 9min 58s\n" ] ], [ [ "7. A decade is a sequence of 10 consecutive years. For example,say in your database you have movie information starting from 1965. Then the first decade is 1965, 1966, ..., 1974; the second one is 1967, 1968, ..., 1976 and so on. Find the decade D with the largest number of films and the total number of films in D.", "_____no_output_____" ] ], [ [ "%%time\n# citation https://stackoverflow.com/questions/25955049/sql-how-to-sum-up-count-for-many-decades?rq=1\n# result = pd.read_sql_query(\n# '''\n# SELECT (ROUND(m.year / 10) * 10) AS Decade, COUNT(1) AS total_movies\n# FROM Movie m\n# GROUP BY ROUND(m.year/ 10)\n# ORDER BY total_movies DESC LIMIT 1\n# ''', conn)\n# result\n\nresult = pd.read_sql_query('''\n SELECT d_year.year AS start, d_year.year+9 AS end, count(1) AS total_movies FROM\n (SELECT DISTINCT(year) FROM Movie) d_year\n JOIN Movie m WHERE m.year>=start AND m.year<=end\n GROUP BY end\n ORDER BY total_movies DESC\n LIMIT 1\n ''', conn)\nresult", "CPU times: user 47.7 ms, sys: 34.3 ms, total: 81.9 ms\nWall time: 99 ms\n" ] ], [ [ "8. Find the actors that were never unemployed for more than 3 years at a stretch. (Assume that the actors remain unemployed between two consecutive movies).", "_____no_output_____" ] ], [ [ "%%time\n# citation https://stackoverflow.com/questions/57733454/to-find-actors-who-were-never-unemployed-for-more-than-3-years-in-a-stretch\n# Here I am using window function (LEAD) that provides comparing current row with next row\nresult = pd.read_sql_query(\n '''\n SELECT *, (next_year - year) AS gap FROM (SELECT *\n , LEAD(year, 1, 0) OVER (PARTITION BY Name ORDER BY year ASC) AS next_year\n FROM (SELECT p.Name, m.title, m.year FROM Movie m\n JOIN M_Cast mc ON m.MID=mc.MID\n JOIN Person p ON trim(mc.PID)=trim(p.PID)))\n WHERE gap <=3 and gap >=0\n GROUP BY Name\n ORDER BY Name ASC\n ''', conn)\nresult", "CPU times: user 12min 30s, sys: 2.87 s, total: 12min 33s\nWall time: 13min 13s\n" ] ], [ [ "9. Find all the actors that made more movies with Yash Chopra than any other director.", "_____no_output_____" ] ], [ [ "# %%time\n# The following query is correct but didn't give the results, \n# Running below query gives \"database or disk is full\" error\n# result = pd.read_sql_query(\n# '''\n# SELECT P1.PID, P1.Name, count(Movie.MID) AS movies_with_yc from Person as P1\n# JOIN M_Cast\n# JOIN Movie\n# JOIN M_Director ON (trim(Movie.MID) = trim(M_Director.MID))\n# JOIN Person as P2 ON (trim(M_Director.PID) = trim(P2.PID)) where P2.Name = 'Yash Chopra'\n# GROUP BY P1.PID HAVING count(Movie.MID) >\n# (\n# SELECT count(Movie.MID) FROM Person AS P3\n# JOIN M_Cast\n# JOIN Movie\n# JOIN M_Director ON (trim(Movie.MID) = trim(M_Director.MID))\n# JOIN Person AS P4 ON (trim(M_Director.PID) = trim(P4.PID))\n# WHERE P1.PID = P3.PID AND P4.Name != 'Yash Chopra'\n# GROUP BY P4.PID\n# )\n# ORDER BY movies_with_yc DESC;\n# ''', conn)\n# result", "_____no_output_____" ], [ "%%time\nresult = pd.read_sql_query(\n '''\n SELECT Director, Actor, Count(1) AS Movies_with_YashChopra\n FROM\n (\n SELECT p.Name AS Director, m.title AS Movie\n FROM Person p\n JOIN M_Director md ON trim(md.PID)=trim(p.PID)\n JOIN Movie m ON trim(md.MID)=m.MID and p.Name LIKE 'Yash%'\n GROUP BY p.Name, m.title\n ) t1\n JOIN\n (\n SELECT p.Name AS Actor, m.title AS Movie\n FROM Person p\n JOIN M_Cast mc ON trim(mc.PID)=trim(p.PID)\n JOIN Movie m ON trim(mc.MID)=m.MID\n GROUP BY p.Name, m.title\n ) t2\n ON t1.Movie=t2.Movie\n GROUP BY t1.Director, t2.Actor\n ORDER By Movies_with_YashChopra DESC\n ''', conn)\n\nresult", "CPU times: user 8min 5s, sys: 37.9 s, total: 8min 43s\nWall time: 9min 24s\n" ] ], [ [ "10. The Shahrukh number of an actor is the length of the shortest path between the actor and Shahrukh Khan in the \"co-acting\" graph. That is, Shahrukh Khan has Shahrukh number 0; all actors who acted in the same film as Shahrukh have Shahrukh number 1; all actors who acted in the same film as some actor with Shahrukh number 1 have Shahrukh number 2, etc. Return all actors whoseShahrukh number is 2", "_____no_output_____" ] ], [ [ "%%time\nresult = pd.read_sql_query(\n '''\n SELECT Name FROM Person WHERE trim(Name) LIKE '%shah rukh khan%'\n ''', conn)\n\nresult", "CPU times: user 20.2 ms, sys: 12.4 ms, total: 32.6 ms\nWall time: 50.4 ms\n" ] ], [ [ "<h2>Using below steps we can get Shah Rukh Khan 2nd Degree Connection</h2>", "_____no_output_____" ], [ "- Logic to Build following Query\n- Select movies in which Shah Rukh Khan worked\n- Select Shah Rukh level 1 i.e. 1st Degree connection of Shah Rukh Khan\n- Select movies in which Shah Rukh level 1 worked but exclude movies with Shah Rukh Khan \n- Select Shah Rukh level 2 who worked in some movie with Shah Rukh level 1", "_____no_output_____" ] ], [ [ "%%time\nresult = pd.read_sql_query('''\n SELECT DISTINCT P.Name\n FROM Person p\n JOIN M_Cast mc\n ON trim(p.PID) = trim(mc.PID)\n WHERE mc.MID IN (SELECT mc.MID\n FROM M_Cast mc\n WHERE trim(mc.PID) IN (\n SELECT trim(p.PID) as pid\n FROM Person p\n JOIN M_Cast mc\n ON trim(p.PID) = trim(mc.PID)\n WHERE mc.MID IN (\n SELECT mc.MID\n FROM Person p\n JOIN M_Cast mc\n ON trim(p.PID) = trim(mc.PID)\n WHERE trim(p.Name) LIKE '%shah rukh khan%'\n )\n AND trim(p.Name) NOT LIKE '%shah rukh khan%'\n )\n AND mc.MID NOT IN (SELECT mc.MID\n FROM Person p\n JOIN M_Cast mc\n ON trim(p.PID) = trim(mc.PID)\n WHERE trim(p.Name) LIKE '%shah rukh khan%'))\n \n ''', conn)\n\nresult", "CPU times: user 17min 18s, sys: 37.5 s, total: 17min 55s\nWall time: 18min 20s\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ] ]
4aa4301c03aa5eb80eabd3451c6717d8006f0b96
695,501
ipynb
Jupyter Notebook
homework/14 Homework_Melissa.ipynb
melissa0520/ML100Days
36c06db5f4a3a904744a9a766847041b5322fed3
[ "CC0-1.0" ]
null
null
null
homework/14 Homework_Melissa.ipynb
melissa0520/ML100Days
36c06db5f4a3a904744a9a766847041b5322fed3
[ "CC0-1.0" ]
null
null
null
homework/14 Homework_Melissa.ipynb
melissa0520/ML100Days
36c06db5f4a3a904744a9a766847041b5322fed3
[ "CC0-1.0" ]
null
null
null
626.577477
637,759
0.657168
[ [ [ "# [作業目標]\n\n\n1. [簡答題] 比較下列兩個讀入的 df 有什麼不同?為什麼造成的?\n\n2. 請將 Dcard API 取得所有的看板資訊轉換成 DataFrame,並且依照熱門程度排序後存成一個 csv 的檔案。\n\n\n", "_____no_output_____" ], [ "# 作業 ", "_____no_output_____" ] ], [ [ "# 記得先 Import 正確的套件\n\nimport pandas as pd\nimport numpy as np", "_____no_output_____" ] ], [ [ "### 1. [簡答題] 比較下列兩個讀入的 df 有什麼不同?為什麼造成的?\n", "_____no_output_____" ] ], [ [ "df1 = pd.read_csv('https://raw.githubusercontent.com/dataoptimal/posts/master/data%20cleaning%20with%20python%20and%20pandas/property%20data.csv')\ndf1", "_____no_output_____" ], [ "df2 = pd.read_csv(\n 'https://raw.githubusercontent.com/dataoptimal/posts/master/data%20cleaning%20with%20python%20and%20pandas/property%20data.csv',\n keep_default_na=True,\n na_values=['na', '--']\n)\ndf2", "_____no_output_____" ] ], [ [ "#### 前者不會將缺值補上資料,後者有定義將缺值補為NaN", "_____no_output_____" ], [ "### 2. 請將 Dcard API 取得所有的看板資訊轉換成 DataFrame,並且依照熱門程度排序後存成一個 csv 的檔案。\n", "_____no_output_____" ] ], [ [ "import requests\nr = requests.get('https://www.dcard.tw/_api/forums')\nresponse = r.text\n\nimport json\ndata = json.loads(response)\n\nprint(data)\n", "[{'id': '7f125e07-4460-4ea5-80b5-33f0e9aafa0c', 'alias': 'midnightlab', 'name': '午夜實驗室', 'description': '午夜實驗室10/6、10/7即將在華山登場!這裏提供大家交流活動資訊與討論,請大家要遵守 Dcard 板規喔!', 'subscriptionCount': 1725, 'subscribed': False, 'read': False, 'createdAt': '2016-05-14T19:15:15.698Z', 'updatedAt': '2021-04-20T08:36:58.712Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['午夜實驗室'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c0ed3f99-ed1c-49a8-b413-ed5e925aafe4', 'alias': 'timecapsule', 'name': '時光膠囊', 'description': 'Dcard 九週年最終章!告別動盪不安的 2020,說好了,2021 我們一起活得更努力!請寫一封信給 2021 的自己,你想要成為什麼樣的人、想達成的新年目標或行動計畫?這些願望,Dcard 陪你一起完成。許願的卡友們一年後都會收到「時光膠囊」,提醒你檢視「和 Dcard 的約定」完成了多少。在文章裡留下一句想隨身提醒自己的話,就有機會訂製你專屬的 D 卡。', 'subscriptionCount': 4290, 'subscribed': False, 'read': False, 'createdAt': '2016-05-14T20:15:15.698Z', 'updatedAt': '2021-01-07T07:24:57.455Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9ab1db4e-8beb-4072-be8a-d067a0a02f92/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/18ce292e-934e-41d0-8936-11433faf56be/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 300}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'cc2164ae-88cd-466d-b11b-19e4b1f6f78c', 'alias': 'mother', 'name': '母親節', 'description': 'Dcard 母親節限定板,告訴媽媽我愛妳!', 'subscriptionCount': 370, 'subscribed': False, 'read': False, 'createdAt': '2016-05-14T22:15:15.699Z', 'updatedAt': '2021-04-20T08:36:35.702Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['我是媽寶我驕傲', '曬媽媽', '母親節蛋糕', '康乃馨', '母親節花束', '我愛媽媽', '母親節禮物', '媽媽我愛你', '媽媽', '母親節大餐', '母親節檔期'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '11e8d941-0aad-4960-8348-b27c36270fd1', 'alias': 'merryxmas', 'name': '聖誕CiaoCiao', 'description': '期間限定開啟!只要是和聖誕節有關的事情,或是和「禮物」有關的東西通通都來分享吧!', 'subscriptionCount': 16984, 'subscribed': False, 'read': False, 'createdAt': '2016-05-17T00:11:15.742Z', 'updatedAt': '2021-04-20T08:36:54.019Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['Dcard聖誕列車', '交換禮物', '聖誕禮物', '聖誕卡片', '聖誕舞會', '聖誕樹', '聖誕節'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7b5b4c6c-ee5b-4903-a27f-e772de58f078/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1d36a792-6e4a-40be-8656-89a0b5973f6c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '8d5cabde-d0df-479f-8ab7-153209317537', 'alias': 'father', 'name': '父親節', 'description': 'Dcard 父親節特輯,上傳爸爸帥照,寫下真摯文字,一起謝謝我們心中的 「鋼鐵人老爸」。', 'subscriptionCount': 367, 'subscribed': False, 'read': False, 'createdAt': '2016-05-17T00:12:15.742Z', 'updatedAt': '2017-01-01T10:36:38.235Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 0}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4dd9f69b-9b33-4ba0-9cec-887d8c5091e4', 'alias': 'bugreport', 'name': '臨時回報', 'description': '臨時回報版本問題', 'subscriptionCount': 17, 'subscribed': False, 'read': False, 'createdAt': '2016-05-18T07:20:35.140Z', 'updatedAt': '2017-06-18T03:31:45.331Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '460a5276-0480-4af1-86df-14b3040cab08', 'alias': '5anniversary', 'name': '五週年紀念', 'description': '五週年徵文活動限定板,僅供活動徵文、討論,發文前請先閱讀置頂喔!', 'subscriptionCount': 5415, 'subscribed': False, 'read': False, 'createdAt': '2016-05-18T07:20:35.140Z', 'updatedAt': '2018-04-18T07:16:27.550Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 0}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3c91f2b0-6374-4467-b557-2e5383680afb', 'alias': 'ntu', 'name': '臺灣大學', 'description': '臺灣大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 22973, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T08:15:15.752Z', 'updatedAt': '2021-06-21T10:14:52.134Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/cf164bf7-5918-4b53-956e-93632d033b14/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9808ed78-e150-47bb-847f-70eab4b55470/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 583}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'caf9487a-1994-494d-81f2-7950438e7bef', 'alias': 'nccu', 'name': '政治大學', 'description': '政治大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 16191, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T09:15:15.753Z', 'updatedAt': '2020-08-13T08:42:34.572Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c80ae1a6-45d0-44e7-b8e5-539cd3a88043/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0af62c52-7fdd-4055-9f4c-c91b00ea856a/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 850}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '204997a1-c63d-4dc4-965e-9f018e452d1d', 'alias': 'nctu', 'name': '交通大學', 'description': '交通大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 9748, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T10:15:15.755Z', 'updatedAt': '2020-08-13T08:52:46.478Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/37208085-bffc-4a75-a17c-7a2b358515ef/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b6ef12ed-e127-41db-a3cd-4f9a518eaefa/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 241}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5120400e-e5e6-4676-a1b8-75a13625ecea', 'alias': 'tku', 'name': '淡江大學', 'description': '淡江大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 20489, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T11:15:15.756Z', 'updatedAt': '2020-08-13T08:54:08.205Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bc6f4ea2-8698-4f85-8656-3ca6a93e0bea/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/176586b3-0d17-4e5f-89c9-e3c2e0d52ea2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 957}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a8a2712f-80de-40c6-971f-c7bbb77716fe', 'alias': 'ndhu', 'name': '東華大學', 'description': '東華大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 8017, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T12:15:15.758Z', 'updatedAt': '2020-08-13T08:49:53.852Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1517a9b4-f01c-4cc7-b2bb-3224f87cddf7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/32a86221-9082-4dfe-9c3d-27125e361df3/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 216}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'abc4fa45-a456-49c6-8948-8981ad0b0f97', 'alias': 'ncku', 'name': '成功大學', 'description': '成功大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 21045, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T13:15:15.759Z', 'updatedAt': '2020-08-13T08:49:47.456Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/313d8179-6adb-4d87-a0fc-8c97fd9f14bf/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7812ed61-f3ff-4e62-bbb4-85489610d513/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 985}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b6634fbb-b96a-41c6-82e1-80c18ed23dec', 'alias': 'ntut', 'name': '北科大', 'description': '北科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 8424, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T14:15:15.761Z', 'updatedAt': '2020-08-13T08:46:41.566Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ad6ab485-9ad7-4872-84b0-00a967940d1b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/dc2aafc4-9004-455d-ba1f-a6eb5a6375c6/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 150}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a62a724b-4fdd-4044-b6d3-19483aa7f0df', 'alias': 'fju', 'name': '輔仁大學', 'description': '輔仁大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 19404, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T15:15:15.762Z', 'updatedAt': '2020-08-13T08:55:07.706Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/aeb20423-4edc-4832-b0f4-4d16236ee491/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/65316159-38dd-4fc4-bac8-03833d2530fb/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 631}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '934ab7f8-a550-4b6a-b97c-44fc63a3345e', 'alias': 'ntnu', 'name': '師範大學', 'description': '師範大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 8649, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T16:15:15.764Z', 'updatedAt': '2020-08-13T08:53:04.527Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7d5039ea-54c1-4515-96ce-6d4bfcb2aa96/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6d409cfc-8310-4975-a38e-389940f1ab88/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 133}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd2122f4e-d1b2-423a-8f68-1342906df9e9', 'alias': 'nknu', 'name': '高雄師範大學', 'description': '高雄師範大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3528, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T17:15:15.766Z', 'updatedAt': '2020-08-13T08:43:01.017Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c855878f-7855-451e-8db4-c69ce127d989/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/73e7a308-c8a5-439a-978d-a58b4b08dc8c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 61}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '51a48caf-bfd4-4c12-979e-c55962856a81', 'alias': 'ncue', 'name': '彰化師範大學', 'description': '彰化師範大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 5400, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T18:15:15.767Z', 'updatedAt': '2020-08-13T08:58:13.685Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/326bf514-f456-4c58-96aa-a311761862c9/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/12e4f148-af86-440e-b2f0-4e7cb975fdd5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 106}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '89e7bb6b-e15f-4168-add7-0b498e335ce7', 'alias': 'ncu', 'name': '中央大學', 'description': '中央大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 10433, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T19:15:15.769Z', 'updatedAt': '2020-08-13T08:42:57.487Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c477af20-4885-4183-898f-fef172a902da/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/61e8d725-fd66-4dc0-98df-e4b3d02cfbaf/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 217}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6f72a221-b46c-4f1e-9d82-26378d407608', 'alias': 'fcu', 'name': '逢甲大學', 'description': '逢甲大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 19104, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T20:15:15.770Z', 'updatedAt': '2020-08-13T08:41:44.996Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/843cb1d1-5a08-4cae-9361-283b8e95c8d5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c161ea54-618a-49eb-9710-fefffc7f6c33/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 398}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e45851be-ad1b-4580-aa92-fe3e6e6fcd7e', 'alias': 'nthu', 'name': '清華大學', 'description': '清華大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 11562, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T21:15:15.772Z', 'updatedAt': '2020-08-13T08:50:36.519Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/124c6e6e-7513-4454-9f1d-eef5cff943b4/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2ca1c8d0-d620-418b-8f03-6a21223d6963/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 460}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '921374a2-1d49-491f-968f-87f187d6928e', 'alias': 'ncnu', 'name': '暨南大學', 'description': '暨南大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4417, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T22:15:15.773Z', 'updatedAt': '2020-08-13T08:46:22.251Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/27a6693a-41a3-426a-94ce-b8e835a2bd80/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a915dad1-a1cd-4a35-8198-bab82ed124f4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 143}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '91d20ae5-be49-419a-bd8b-457de2064aa3', 'alias': 'thu', 'name': '東海大學', 'description': '東海大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 14716, 'subscribed': False, 'read': False, 'createdAt': '2016-05-19T23:15:15.775Z', 'updatedAt': '2020-08-13T08:44:06.004Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6749fe85-9287-4cd2-8652-a7e57028fa83/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3d3e6f7a-fc87-4ba8-82b8-a909078b4f5f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 557}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd477f90b-56e4-423d-8686-3524dc331bd5', 'alias': 'nkfust', 'name': '高雄第一科大', 'description': '高雄第一科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7963, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T00:15:15.776Z', 'updatedAt': '2020-08-13T08:50:03.164Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1a31e4b4-3daa-46e7-addd-562d1ee970e5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b67f780a-a98d-4f0d-ad79-71563a0d28fe/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 91}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7e1a5d67-6085-491b-987a-e8ab6ddbd3ce', 'alias': 'ntust', 'name': '台科大', 'description': '台科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 8320, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T01:15:15.778Z', 'updatedAt': '2020-08-13T08:50:44.027Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/80ba5305-f90f-4efa-8f5c-ad9837eb5e8f/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a3120743-b923-4ade-8417-5ed491d80471/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 135}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9bfaab73-e0d2-4e73-ab82-101ab66dd2d4', 'alias': 'nchu', 'name': '中興大學', 'description': '中興大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 11513, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T02:15:15.779Z', 'updatedAt': '2020-08-13T08:52:42.855Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bee7a5fb-f4c6-48ae-bfe1-6ace87dbc1dc/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/51f16f78-0cb8-4a15-84f4-67bfb4ec58f7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 572}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5e7919b5-805c-41c7-ad27-332105f9c614', 'alias': 'yzu', 'name': '元智大學', 'description': '元智大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6552, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T03:15:15.781Z', 'updatedAt': '2020-08-13T09:00:29.646Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0f91df24-f7c0-4c92-8b83-8d962e4090e3/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8a7ee551-2866-4719-8444-ae7bd322c9fd/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 216}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '8b977798-d2b7-4c58-a21f-43c50e802bcf', 'alias': 'nuu', 'name': '聯合大學', 'description': '聯合大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 5558, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T04:15:15.782Z', 'updatedAt': '2020-08-13T08:56:09.850Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b2e09129-2e82-4bbb-ae6f-df410f8702b1/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/72a16ede-18a5-4129-8c52-d12bde245a51/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 206}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2cc8a947-691c-43c5-9f22-f323d1be75df', 'alias': 'ntou', 'name': '海洋大學', 'description': '海洋大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6370, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T05:15:15.784Z', 'updatedAt': '2020-08-13T08:50:40.026Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e1652401-25ed-496d-89ca-2e0e0611564e/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d4e57420-b185-4d51-81e8-bcd987851012/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 176}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'eb59cc29-b856-4e45-8ff8-4936aeefc122', 'alias': 'nsysu', 'name': '中山大學', 'description': '中山大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7028, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T06:15:15.785Z', 'updatedAt': '2020-08-13T08:46:29.870Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fa56c166-40ad-4b7b-be89-c89f3bf70d9c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/13a7a3c2-40a8-4b57-b230-3035ecff7a96/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 247}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0b8a02a7-98c1-4da9-8c15-b3dea327ec65', 'alias': 'ntpu', 'name': '臺北大學', 'description': '臺北大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7578, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T07:15:15.786Z', 'updatedAt': '2020-08-13T08:43:09.300Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f8335431-74ef-49cb-94cd-b44df550774f/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/291d6a9e-69ce-4243-a31a-e60b45184e07/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 321}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a69444ff-19f2-4ac4-b5d8-674a18a3b56b', 'alias': 'cpu', 'name': '中央警察大學', 'description': '中央警察大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6093, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T08:15:15.788Z', 'updatedAt': '2020-08-13T08:54:57.528Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1cbcfc71-e6e9-419f-a157-ee676e11499c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0872cd7b-d91c-44a0-a313-062ec88c29b7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 38}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2883c40b-552b-4b86-87ac-8c3209b88a17', 'alias': 'ccu', 'name': '中正大學', 'description': '中正大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 9376, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T09:15:15.789Z', 'updatedAt': '2020-08-13T08:54:29.685Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4194bf5b-1fe2-46a2-85a4-8c3fe59212c6/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ab892df7-78fc-475a-8959-68e8dcfa2f7c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 351}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2d5b4fc9-3eaa-4a8f-9dab-c9a835c2d5fa', 'alias': 'usc', 'name': '實踐大學', 'description': '實踐大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7878, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T10:15:15.790Z', 'updatedAt': '2020-08-13T09:00:11.117Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/02051c8f-af89-4f73-a044-fce6d518bed3/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/27c8e3ef-ae4c-485e-aa89-32b3f8783cd9/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 349}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '04831d50-5849-4f3c-827c-c9c82dc71d03', 'alias': 'ym', 'name': '陽明大學', 'description': '陽明大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2416, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T11:15:15.792Z', 'updatedAt': '2020-08-13T09:00:26.502Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/59b30df7-24ef-4e4b-b42d-bb793b4e0509/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ce198f79-a4e3-4350-a485-ce9e9154ee69/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5398dfd6-1a81-4a92-ac8b-e79f2b2093cb', 'alias': 'ntua', 'name': '臺灣藝術大學', 'description': '臺灣藝術大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4305, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T12:15:15.794Z', 'updatedAt': '2020-08-13T08:53:08.230Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/56898242-de5e-4348-82b1-56fbcd1d4b38/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/784712bd-e3ae-45b2-a30a-846d78d59e27/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 44}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '34a50714-d3df-4fc3-9ff0-0d836f35088c', 'alias': 'shu', 'name': '世新大學', 'description': '世新大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 9621, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T13:15:15.795Z', 'updatedAt': '2020-08-13T08:43:15.836Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2d07a127-e849-4925-a12d-93939a1ffc86/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a93e0384-649a-44f0-a732-3e487094d121/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 578}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0bd78baa-4dde-49da-b3ec-edf066826f81', 'alias': 'ntue', 'name': '臺北教育大學', 'description': '臺北教育大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2872, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T14:15:15.797Z', 'updatedAt': '2020-08-13T08:53:17.574Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/53b73f5f-3488-476e-9436-e27135bf4bc8/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c5993984-77ff-4265-bedb-1214e90ff7eb/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 59}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7553dd7b-9607-4629-a2b4-d52d0450b059', 'alias': 'tnua', 'name': '臺北藝術大學', 'description': '臺北藝術大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3226, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T15:15:15.798Z', 'updatedAt': '2020-08-13T08:56:45.666Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d8586ef9-be83-46c6-be1f-e91e0249e9ee/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b35838af-7475-46b0-93b2-f358c35daa53/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 122}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '884ae591-32e8-466c-ad94-784931c590e1', 'alias': 'utaipei', 'name': '臺北市立大學', 'description': '臺北市立大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4135, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T16:15:15.800Z', 'updatedAt': '2020-08-13T08:51:31.346Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ad7c72f9-7a99-45ab-8294-57b0637654a1/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/141f4076-8a6a-4e04-9679-a0d450938dbf/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 58}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c9227e8f-935a-4d53-838f-2449ae7d718f', 'alias': 'pccu', 'name': '文化大學', 'description': '文化大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 16085, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T17:15:15.801Z', 'updatedAt': '2020-08-13T08:50:58.970Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d10df3bd-64eb-4bbf-a727-5f369d110dfe/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9a0810c9-f9e4-4e98-a1b4-5321c05a9ea2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 916}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '631f9943-9913-4e65-abaa-d6228a967329', 'alias': 'scu', 'name': '東吳大學', 'description': '東吳大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 10367, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T18:15:15.803Z', 'updatedAt': '2020-08-13T08:56:16.302Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ab809c1f-e6cf-437c-b40e-ca1d65bd8883/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a231c469-e6d8-4c05-bbf3-847134c71faf/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 504}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'eb2908f1-803b-4b4e-ba6a-fc2edaec221b', 'alias': 'ncyu', 'name': '嘉義大學', 'description': '嘉義大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6435, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T19:15:15.804Z', 'updatedAt': '2020-08-13T08:59:09.863Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9e688bcd-5c03-4e67-9922-3c0b9f362714/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7790a600-068d-4dea-8eee-e97aee26185e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 146}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9f8bad92-ec3e-4383-ac9a-5c0ee6c5f233', 'alias': 'nuk', 'name': '高雄大學', 'description': '高雄大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4996, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T20:15:15.805Z', 'updatedAt': '2020-08-13T08:53:45.437Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9dbb13a8-0957-454f-bfa7-066bde4e64b0/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b39cd11f-f30a-4670-b057-67d5990218a4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 115}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2a18cc3e-1178-490d-9db0-53ad81829226', 'alias': 'nutc', 'name': '中科大', 'description': '中科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 11763, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T21:15:15.807Z', 'updatedAt': '2020-08-13T08:47:00.905Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4be112fb-3bf0-47f3-b4e1-90d0d063c668/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/78c7bf25-ad05-4aeb-8a53-7802487c6679/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 198}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '35118bd3-e43f-48df-97d4-9fcafb27930a', 'alias': 'ntunhs', 'name': '國北護', 'description': '國北護板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4772, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T22:15:15.808Z', 'updatedAt': '2021-06-28T11:46:05.677Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ec23ae7b-387c-44c1-acd8-b9549128a6fe/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f2a4eb79-7838-453e-9c40-460648d7e3f5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 78}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'fce8f0cc-cba2-4317-81e4-7ffb0951a892', 'alias': 'isu', 'name': '義守大學', 'description': '義守大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 9967, 'subscribed': False, 'read': False, 'createdAt': '2016-05-20T23:15:15.810Z', 'updatedAt': '2020-08-13T08:55:23.192Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/12f82ff1-7d98-414f-84c4-95ad3c62b805/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f6a57883-00c1-41a9-aea3-d79bd9219152/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 286}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e3d1e6b1-d675-4827-863d-7565701fea9d', 'alias': 'cgu', 'name': '長庚大學', 'description': '長庚大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7329, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T00:15:15.811Z', 'updatedAt': '2020-08-13T08:47:43.236Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/114b05c6-3d70-4b8f-ad97-1490451df1d2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0ef60da5-1c27-4f4a-a5ba-d1a9bd2e1e97/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 82}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9063d7d5-26c0-4000-8d90-ee84ac3d3bc0', 'alias': 'tmu', 'name': '臺北醫學大學', 'description': '臺北醫學大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4373, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T01:15:15.812Z', 'updatedAt': '2020-08-13T09:00:04.412Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ab33366a-9cc1-4eb0-bb5f-2e2a89e3f35b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a82a3a4a-36c8-40d6-b850-62855ee74765/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 151}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b6c59786-1946-40c3-9edf-04eb7550bf9f', 'alias': 'cycu', 'name': '中原大學', 'description': '中原大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 15783, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T02:15:15.814Z', 'updatedAt': '2020-08-13T08:48:49.362Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e133e9f1-bc91-4707-ab8a-37060e262bc0/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e9d1ccd8-77ea-4ad8-8a5a-414a3d95fa4f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 433}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ded4b0a1-cdf3-446f-ba57-0b1a29f30132', 'alias': 'csmu', 'name': '中山醫學大學', 'description': '中山醫學大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6031, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T03:15:15.815Z', 'updatedAt': '2020-08-13T08:41:30.178Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1461e8ea-aaa0-43e9-889c-abfac2d9ff8e/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/065a0cbb-6446-4b8f-80b8-0b788e72ea74/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 91}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'bb3a7b49-d39a-4408-ab98-9a21d669de19', 'alias': 'wzu', 'name': '文藻外語大學', 'description': '文藻外語大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 8336, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T04:15:15.816Z', 'updatedAt': '2020-08-13T08:44:20.475Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a83cec65-d8a5-455c-b13d-f2bde3923a12/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3347fbd9-204f-42e5-890d-1b00b2b9d278/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 241}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2876676a-6230-42d7-b543-94da1c79d8dd', 'alias': 'kuas', 'name': '高應大', 'description': '高應大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7307, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T05:15:15.818Z', 'updatedAt': '2020-08-13T08:41:59.717Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/149d27e0-cf4e-44a8-944a-b3aab22789fb/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fbbf3eeb-2bd1-4496-acad-07cf4ca0f0ab/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 46}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0c371cbf-9447-4e80-a8cf-3a742a59566f', 'alias': 'cgust', 'name': '長庚科大', 'description': '長庚科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6935, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T06:15:15.819Z', 'updatedAt': '2020-08-13T08:57:07.458Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/806d1fb7-1815-4f21-b2bc-57ddd16feba7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7b34e031-d948-4720-b56d-ac47fc767fac/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 101}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5356b09c-7622-4b58-b010-fbbe9e329536', 'alias': 'ncut', 'name': '勤益科大', 'description': '勤益科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7766, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T07:15:15.821Z', 'updatedAt': '2020-08-13T08:58:51.840Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/da84c03a-6a85-4755-b290-7a525e4f0e1c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/52253565-bbd2-46bc-b193-b7d615d8b355/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 128}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0c6831a9-5838-4ad0-8240-adedd3f326cc', 'alias': 'mcu', 'name': '銘傳大學', 'description': '銘傳大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 13421, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T08:15:15.822Z', 'updatedAt': '2020-08-13T08:58:06.851Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3e45705e-9f43-4c57-a26f-1f978367c3f2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/149e557a-be2f-4c67-9ef4-7260457418c6/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 386}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b7be8c6d-56f5-43d4-ab9a-b4e86f255340', 'alias': 'niu', 'name': '宜蘭大學', 'description': '宜蘭大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4601, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T09:15:15.823Z', 'updatedAt': '2020-08-13T08:59:17.590Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/aeb5ab64-d166-46c7-83f2-2d35a989aadd/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b15c5d5e-b2ff-4e59-a3ca-4ecdd7fde41a/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 93}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7135ff63-25da-439e-bf38-8b3a3743e3ef', 'alias': 'nfu', 'name': '虎尾科大', 'description': '虎尾科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 9250, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T10:15:15.825Z', 'updatedAt': '2020-08-13T08:50:00.026Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a8cf95cb-62a7-4474-adaa-447d5eef2104/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/215678b4-b434-4d9b-b396-5b459d191cc5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 189}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7c900bb8-3776-409a-b75b-1931c10770c1', 'alias': 'nhcue', 'name': '新竹教育大學', 'description': '新竹教育大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1088, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T11:15:15.826Z', 'updatedAt': '2020-08-13T08:59:13.743Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/dd9ba311-0f04-424d-b820-6f68904c8df7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c2cf4d28-d0a4-4c34-b601-5a30261a0eed/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e127792c-f5a8-42a4-b519-ba7895df3121', 'alias': 'ttu', 'name': '大同大學', 'description': '大同大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2129, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T12:15:15.827Z', 'updatedAt': '2020-08-13T08:29:32.151Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0d007ece-1b64-45d9-bcf2-5cf5955e896f/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/23fafcbe-35de-4a6f-bebe-4dc5ec95f36d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 77}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ebae364b-05d0-4033-8b0f-a5f4132d6942', 'alias': 'ntub', 'name': '臺北商業大學', 'description': '臺北商業大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 5724, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T13:15:15.828Z', 'updatedAt': '2020-08-13T08:53:13.038Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/838df450-3b65-4c7f-83e1-acde5b0290a5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a3e08e5f-e219-431d-aedc-3315b7bd39bb/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 74}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'decd5c39-1fe4-43ff-82ea-d9ddda0f30ce', 'alias': 'ntcu', 'name': '臺中教育大學', 'description': '臺中教育大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3565, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T14:15:15.830Z', 'updatedAt': '2020-08-13T08:50:33.389Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9bb8785b-356a-41b5-b3de-d08bda099912/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bc459062-2f97-4f5e-a7ad-f72e8a98b8c8/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 68}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ddc537be-fa2e-4b88-833d-925851f1b8a7', 'alias': 'nptu', 'name': '屏東大學', 'description': '屏東大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7598, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T15:15:15.831Z', 'updatedAt': '2020-08-13T08:56:01.277Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ce49bd50-ccf8-4415-acd5-1f0305dae7f0/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ad47952f-c1f7-4a35-bdd8-7066e4d476d3/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 232}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a46bd68f-a3f0-4d17-a0f8-6033a3a879b0', 'alias': 'pu', 'name': '靜宜大學', 'description': '靜宜大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 11953, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T16:15:15.832Z', 'updatedAt': '2021-07-13T07:12:26.873Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a157259d-ced5-450b-80fc-91896d898ab9/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/dc54b689-7848-43eb-aa29-5f580548e33d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 323}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '3147669e-0cf4-48b2-b62b-cf0e6531de6e', 'alias': 'nkmu', 'name': '高雄海洋科大', 'description': '高雄海洋科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 5851, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T17:15:15.834Z', 'updatedAt': '2020-08-13T08:59:28.311Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/67014a95-619b-4481-91a2-1f57befb55cb/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b3ad18ba-62a4-4d7a-b881-75aafca262a1/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 34}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1c885b79-f6f0-4f90-bfbc-ac89b3a14d70', 'alias': 'cyut', 'name': '朝陽科大', 'description': '朝陽科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 13624, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T18:15:15.835Z', 'updatedAt': '2021-07-13T07:12:23.967Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/28107372-ed31-4401-8e31-88b320e0ae08/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e70eef90-e16f-485a-9502-bd81f9592c5c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 281}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'e7b51b7e-f869-4301-847b-df4c2783554d', 'alias': 'ndu', 'name': '國防大學', 'description': '國防大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1901, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T19:15:15.837Z', 'updatedAt': '2020-08-13T08:46:26.236Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a217fe6a-e32f-4f1c-916f-9e0499b5306d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/112e2467-7a10-4dae-b783-2edcee7dc0c7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 25}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '05082cf0-0584-44d7-be90-294a561f3039', 'alias': 'stust', 'name': '南臺科大', 'description': '南臺科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 13716, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T20:15:15.838Z', 'updatedAt': '2020-08-13T08:43:51.142Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/57a5b081-c8e3-4950-aaee-24aca1bddedc/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/74e75973-374c-43ad-8f5d-b772a9376d6b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 220}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7bb92698-8095-43c0-9702-41fd32a892b8', 'alias': 'nkuht', 'name': '高雄餐旅大學', 'description': '高雄餐旅大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6858, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T21:15:15.840Z', 'updatedAt': '2020-08-13T08:52:50.944Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bf0ff419-8c1a-492f-9a5c-9ea53218b2a4/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2271d055-674f-49d8-b8d7-fa59214e75d8/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 161}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd1eeb00d-a3ab-410a-b495-7a0c3fd99fee', 'alias': 'cmu', 'name': '中國醫藥大學', 'description': '中國醫藥大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 5971, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T22:15:15.841Z', 'updatedAt': '2020-08-13T08:57:10.818Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/71fad17f-3d5d-49b9-86d1-c640d9dc92a8/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/19a6afcc-b3d4-4fb6-8125-8472e859f8b4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 140}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '97816f4d-615e-4e42-aed2-4c14cde57497', 'alias': 'cjcu', 'name': '長榮大學', 'description': '長榮大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6986, 'subscribed': False, 'read': False, 'createdAt': '2016-05-21T23:15:15.843Z', 'updatedAt': '2020-08-13T08:45:07.502Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/af7f9a0e-53f8-4a70-8ba0-0a11e0dc410e/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6a9f7f6f-e379-4fd8-b319-3dddc50c43dc/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 220}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0db43903-3b92-4788-a2c9-57375bce62a4', 'alias': 'npust', 'name': '屏科大', 'description': '屏科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 10060, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T00:15:15.844Z', 'updatedAt': '2020-08-13T08:50:25.035Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e490e296-b4dc-4a6b-bae5-e615052fa7aa/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9309d439-5d0f-4c4b-a56f-9907fe0742e9/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 426}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '07630702-0aae-4ef7-a526-eb9067d92d97', 'alias': 'mcut', 'name': '明志科大', 'description': '明志科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3578, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T01:15:15.845Z', 'updatedAt': '2020-08-13T08:52:37.941Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/23ced400-a0e0-4698-ba14-f43344dcf9a7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/724538b5-db7c-4cce-97a8-a82f7fa94677/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 49}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'cba7e478-e7c5-4d62-8b30-eb303f380e2f', 'alias': 'asia', 'name': '亞洲大學', 'description': '亞洲大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 8862, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T02:15:15.847Z', 'updatedAt': '2020-08-13T08:54:25.719Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/84a70f9d-6b92-4ec9-9929-8fe7d45e91a0/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/76f3ecde-260a-4b80-9ecd-8e0f506523c1/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 275}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f2a75b4c-0666-40ed-8814-5a74412d9aa2', 'alias': 'uch', 'name': '健行科大', 'description': '健行科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4194, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T03:15:15.848Z', 'updatedAt': '2020-08-13T08:44:15.343Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/84c13435-b865-4299-bac9-eac69a3c6607/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/15f59884-1a79-47c2-b1d1-583c73a44461/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 52}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7a5177df-fa34-4add-b562-68ab93c6f9fa', 'alias': 'stu', 'name': '樹德科技大學', 'description': '樹德科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6175, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T04:15:15.849Z', 'updatedAt': '2020-08-13T08:43:46.107Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6e9976fa-e649-423a-8b3a-033bd809a7a2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0c132aa1-46b0-449c-bd1c-db47dba22ec2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 248}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'cc2b057e-dddf-4ec0-8b9c-d30ca59b76b2', 'alias': 'au', 'name': '真理大學', 'description': '真理大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3314, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T05:15:15.851Z', 'updatedAt': '2020-08-13T08:51:37.529Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d41f7957-4a73-4bdc-813d-10aa76b4c157/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/63a22903-617a-41aa-bbed-a528aa72962c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 129}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5d9ccb23-849b-40df-8136-89b81f30188d', 'alias': 'cnu', 'name': '嘉南藥理大學', 'description': '嘉南藥理大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 11036, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T06:15:15.852Z', 'updatedAt': '2020-08-13T08:45:11.700Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ecda5e0d-6c12-4b29-bf13-4f195cc1ef9f/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8f8641e4-b2ca-44cd-8e31-0eac849dda92/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 325}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'abbbf09a-dd93-4975-8925-f09cc0beaf40', 'alias': 'kmu', 'name': '高雄醫學大學', 'description': '高雄醫學大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 5593, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T07:15:15.854Z', 'updatedAt': '2020-08-13T08:46:00.400Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1905b588-1ed1-4c6e-9b35-4d1dcaf21910/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9ef30b06-e936-4a28-8b93-d7d033c5919d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 100}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6997411c-63de-459b-be62-feb8116f4cde', 'alias': 'must', 'name': '明新科技大學', 'description': '明新科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6326, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T08:15:15.855Z', 'updatedAt': '2020-08-13T08:46:18.196Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8a5ac832-f32b-4152-ae0f-69b5845b0569/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/365bbd02-f689-4f77-b499-c81706a9257e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 62}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '947a7d9a-3c63-406c-9e30-a76054c17d9b', 'alias': 'nttu', 'name': '臺東大學', 'description': '臺東大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3481, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T09:15:15.856Z', 'updatedAt': '2020-08-13T08:56:07.037Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d00c4500-79f9-4d89-99cf-3527186e3ab6/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/847db55b-be84-4093-9e3a-dae59aaa2d5c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 89}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ed34f889-7f74-438d-9870-a29625c29b14', 'alias': 'tut', 'name': '台南應用科大', 'description': '台南應用科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 10989, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T10:15:15.858Z', 'updatedAt': '2020-08-13T08:56:55.494Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7ce82833-e404-4d20-9909-073b8a9cd18d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/71b8fde5-5af2-4eae-abe1-5514c7d84517/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 210}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '28c28656-da2a-47b9-957b-d70e50213b0d', 'alias': 'nqu', 'name': '金門大學', 'description': '金門大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4091, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T11:15:15.859Z', 'updatedAt': '2021-07-13T07:12:18.883Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9c7566de-d34e-4758-8d53-dea5b27bf67e/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/de605780-75b6-4d99-bdf4-8415c8b0c03d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 166}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'a6c0248a-9dbb-47dc-b9a3-bdddf53825c3', 'alias': 'dyu', 'name': '大葉大學', 'description': '大葉大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4767, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T12:15:15.860Z', 'updatedAt': '2020-08-13T08:57:48.057Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/afee03e9-e3ca-4fae-98ce-dde90d27ab12/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e0e52653-56f6-4473-8bd6-6f1a69b06ee2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 127}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'fb3d5960-017d-4799-ae60-feb7ea11b285', 'alias': 'nhu', 'name': '南華大學', 'description': '南華大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3766, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T13:15:15.862Z', 'updatedAt': '2020-08-13T08:55:57.987Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9a943de8-4377-4e2a-97d6-ce7ac2f3e693/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2b2c38ee-5e9d-41fd-99a5-352fb537f3b7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 91}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e6070620-592a-405c-9793-e9ff5b41795a', 'alias': 'ltu', 'name': '嶺東科大', 'description': '嶺東科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6857, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T14:15:15.863Z', 'updatedAt': '2021-07-13T07:12:15.435Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/77c0c4ab-e947-4ccd-92f0-6e99ea1d3b5f/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b83744ac-47f6-4f2c-bf1f-d8c930f7e834/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 156}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '70e331cb-0fb7-45c6-8d8b-8a2f8b5c1e8c', 'alias': 'fy', 'name': '輔英科技大學', 'description': '輔英科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 8785, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T15:15:15.864Z', 'updatedAt': '2021-07-13T07:12:21.345Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fb6efb1a-58bb-4aec-8a90-b7df6be0ead0/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/416c6b99-4da5-4e3a-b552-8740e538d76e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 203}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '8e389da1-c77d-423d-a8b3-2cb25be09bdf', 'alias': 'cute', 'name': '中國科大', 'description': '中國科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4671, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T16:15:15.865Z', 'updatedAt': '2020-08-13T08:57:44.513Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a3ddf2c2-15bd-4958-bb14-9a4b11cc9da2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8dc54e4b-87af-4e03-971f-28b814c9af6e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 94}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2c8845fe-b0ee-46da-a971-d26b23a7cc74', 'alias': 'tcust', 'name': '慈濟科技大學', 'description': '慈濟科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2907, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T17:15:15.867Z', 'updatedAt': '2020-08-13T08:59:53.552Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/38387647-9771-411e-9413-c286cc8568b3/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/eec697a7-e936-4487-9948-4d5de7f96679/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 105}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9717f1fc-82f0-416c-90d4-329f822bf5fb', 'alias': 'tcu', 'name': '慈濟大學', 'description': '慈濟大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2745, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T18:15:15.868Z', 'updatedAt': '2020-08-13T08:47:15.760Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/221949e9-dc2a-4c65-bd44-780b0603f0b3/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e30af91d-4e5d-4e96-a150-75b9c4c794a1/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 45}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b95f455f-e866-4536-aa5d-f898c7ca795c', 'alias': 'chihlee', 'name': '致理科技大學', 'description': '致理科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7580, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T19:15:15.870Z', 'updatedAt': '2020-08-13T08:54:51.426Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f4b4d213-6e7c-41db-bb8d-5fd70d5f8d5f/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2c191310-2a90-4a1b-86a8-31eac7855e58/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 192}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3220db90-5f1f-417a-bd21-f09dcf97d729', 'alias': 'knu', 'name': '開南大學', 'description': '開南大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2423, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T20:15:15.871Z', 'updatedAt': '2020-08-13T08:41:56.181Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c22022da-828f-46ab-a9a3-c5930b7b1aed/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/af757e42-b1bd-4df2-9dbc-99420cc5f4ed/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 44}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c619b612-64d8-49b1-a462-20801d73a9a4', 'alias': 'hk', 'name': '弘光科技大學', 'description': '弘光科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 10470, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T21:15:15.872Z', 'updatedAt': '2020-08-13T08:57:54.607Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2d1b355d-492f-4a66-8379-5346c52d1223/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/63b8f8e0-644d-4be0-87d3-1e0cccf6895d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 172}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ff9a3357-1b74-4b0e-96f7-07c3b056033f', 'alias': 'nutn', 'name': '臺南大學', 'description': '臺南大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4417, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T22:15:15.874Z', 'updatedAt': '2020-08-13T08:50:56.369Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3f348ec2-5534-4664-834a-63cf459314d5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1fca3188-7b09-439c-afd1-dcdd1e437c02/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 107}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5f9c5599-5093-4dc4-b077-6ba8f8a5d376', 'alias': 'chu', 'name': '中華大學', 'description': '中華大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2436, 'subscribed': False, 'read': False, 'createdAt': '2016-05-22T23:15:15.875Z', 'updatedAt': '2020-08-13T08:51:41.612Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/65d869cb-fd9d-496c-bd9c-b5d8de49c71c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/84a3ee8d-0208-4cf8-985d-9eb0efca433c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 54}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b922750e-adc4-46fc-aa7b-d594ac8d1e00', 'alias': 'takming', 'name': '德明財經科大', 'description': '德明財經科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4577, 'subscribed': False, 'read': False, 'createdAt': '2016-05-23T00:15:15.877Z', 'updatedAt': '2020-08-13T08:54:04.463Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c14ded69-e64a-468f-96fd-ec7d4bb9a344/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/aa942d02-bf11-4082-b0af-568e0854e839/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 105}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '715eeb3b-a981-4c1e-bf90-fede4604a9eb', 'alias': 'just', 'name': '景文科大', 'description': '景文科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3942, 'subscribed': False, 'read': False, 'createdAt': '2016-05-23T01:15:15.878Z', 'updatedAt': '2020-08-13T08:49:09.981Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/26313c03-3765-4b63-85de-b454b67efa43/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ac237a61-35da-43ab-8dea-ae48cee3bf3a/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 123}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c6e67058-e728-4a86-b228-b0e73cf9d985', 'alias': 'delete', 'name': '刪文', 'description': '', 'subscriptionCount': 56, 'subscribed': False, 'read': False, 'createdAt': '2016-05-23T02:15:15.879Z', 'updatedAt': '2017-06-18T20:42:53.510Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 0}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f022828e-790a-4ea4-8b15-92639fcdb804', 'alias': 'lhu', 'name': '龍華科技大學', 'description': '龍華科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6097, 'subscribed': False, 'read': False, 'createdAt': '2016-06-20T03:02:48.846Z', 'updatedAt': '2020-08-13T08:42:20.155Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4e4cd591-6753-4f12-a65a-b38f388710b2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/57ee8fcc-6003-4983-b7c8-77e96edfb47d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 112}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9c3a9c20-5f82-4bb9-ab7a-7bd994dfa0f5', 'alias': 'yuntech', 'name': '國立雲林科技大學', 'description': '國立雲林科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7901, 'subscribed': False, 'read': False, 'createdAt': '2016-06-21T18:50:14.053Z', 'updatedAt': '2020-08-13T08:47:34.311Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d0572a23-5a4f-4120-8dbb-dd7198ca70e2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5aa6be25-ba3a-41c0-b25f-5544519f5bff/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 119}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '63a0d93e-acc5-4664-b55a-81e6fe0a4d88', 'alias': 'csu', 'name': '正修科大', 'description': '正修科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7064, 'subscribed': False, 'read': False, 'createdAt': '2016-06-29T02:36:10.704Z', 'updatedAt': '2020-08-13T08:41:33.953Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/26ca693f-e13f-4342-9dc4-ff0de3dd32d5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0cc0d5ae-535f-4e65-bddb-aa4aed767f05/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 84}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ceb61936-45f8-4282-960a-938fcb4d760d', 'alias': 'feu', 'name': '遠東科大', 'description': '遠東科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2284, 'subscribed': False, 'read': False, 'createdAt': '2016-07-05T17:34:13.377Z', 'updatedAt': '2020-08-13T08:51:55.139Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/cbeb5464-06e1-4780-a092-cef7c4891f9c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5dc20b81-1ccb-4b73-b787-58876eddca28/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 10}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b88dec33-5dd7-41c2-b50e-92f9fcaa6342', 'alias': 'wfu', 'name': '吳鳳科大', 'description': '吳鳳科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1736, 'subscribed': False, 'read': False, 'createdAt': '2016-07-05T17:39:14.613Z', 'updatedAt': '2020-08-13T09:00:19.442Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5cb247b5-63e6-466a-ad23-d972e8402f54/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/351f3e46-a4a5-48ff-aa52-94c0824a0b70/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 19}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '753d7474-2de4-47a7-bcf6-b047b4301b90', 'alias': 'ctust', 'name': '中臺科大', 'description': '中臺科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6274, 'subscribed': False, 'read': False, 'createdAt': '2016-07-05T17:46:58.770Z', 'updatedAt': '2020-08-13T08:48:42.478Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5aa65510-041e-4608-826f-7b7f1edd15d4/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4e92d76a-068d-440f-99d6-c904e48f91f6/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 114}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '35556304-f4ce-428a-bb3f-1dae26d4cc00', 'alias': 'npu', 'name': '國立澎湖科技大學', 'description': '國立澎湖科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3582, 'subscribed': False, 'read': False, 'createdAt': '2016-07-05T17:51:08.283Z', 'updatedAt': '2020-08-13T08:52:55.058Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a15d56d9-7ce5-4a0e-98b5-ffd6f4d49b68/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b1dcf856-83fc-4544-9bd6-bc2e5df3b451/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 45}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2f45090c-7dc0-4373-87d4-722067905cd5', 'alias': 'ksu', 'name': '崑山科大', 'description': '崑山科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7356, 'subscribed': False, 'read': False, 'createdAt': '2016-07-05T17:58:22.279Z', 'updatedAt': '2020-08-13T08:46:10.452Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c8025006-9874-4eb9-91fd-8923e4fdb57b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fae2f7eb-a285-49c9-8c70-1d06266159b9/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 131}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'da7eccfc-5025-46a2-8558-9a716df087cc', 'alias': 'ocu', 'name': '僑光科技大學', 'description': '僑光科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 5655, 'subscribed': False, 'read': False, 'createdAt': '2016-07-15T04:06:37.278Z', 'updatedAt': '2020-08-13T08:43:12.416Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0f2a664c-7df4-4544-89b3-16272ccc3e84/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4883eebe-7170-47e4-9a79-7e49cbf007b5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 100}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2df6b29d-a531-464e-a45f-568b2586b7e5', 'alias': 'hwai', 'name': '中華醫事科技大學', 'description': '中華醫事科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 7043, 'subscribed': False, 'read': False, 'createdAt': '2016-07-15T04:06:37.278Z', 'updatedAt': '2020-08-13T08:41:52.401Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/cf85fd20-79f5-43ce-a65e-792403328377/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/20929e6f-ed55-463e-b382-5676e75b5e31/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 319}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '428e6c45-891b-4f29-8075-4b63c7bbd3b2', 'alias': 'hwu', 'name': '醒吾科技大學', 'description': '醒吾科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 5039, 'subscribed': False, 'read': False, 'createdAt': '2016-07-15T04:06:37.278Z', 'updatedAt': '2020-08-13T08:49:07.007Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/16a4c7cb-b9d5-4d50-9ad6-b33cbf179659/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/28dfbe9d-43a0-42af-858e-012dfb6a58bb/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 171}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a44320a7-69c3-4e68-a22f-043dc37c1e89', 'alias': 'twu', 'name': '環球科技大學', 'description': '環球科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1277, 'subscribed': False, 'read': False, 'createdAt': '2016-07-15T04:06:37.278Z', 'updatedAt': '2020-08-13T08:47:30.027Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/418be4d2-c20e-446a-bfdd-001292ed8d07/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d7a89759-c2ba-45d3-85f2-ae25b6a536c2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 4}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c730a6a0-6220-4f32-a6d3-43c7eb6b6cce', 'alias': 'ntupes', 'name': '臺灣體育運動大學', 'description': '臺灣體育運動大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2962, 'subscribed': False, 'read': False, 'createdAt': '2016-07-15T04:06:37.278Z', 'updatedAt': '2020-08-13T08:46:37.706Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/786b60c9-f16c-443e-b7d3-2daad31b4940/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/08101040-7a91-4a00-9d17-0b4bfaed91af/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 47}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '44eb2033-5318-4d84-bf70-f776372f1fc6', 'alias': 'hfu', 'name': '華梵大學', 'description': '華梵大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1022, 'subscribed': False, 'read': False, 'createdAt': '2016-07-21T18:40:38.585Z', 'updatedAt': '2020-08-13T08:41:48.807Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/20a915b2-66a9-4fe7-9855-e8d5c7cc3e76/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3d79c0c0-abf4-49b6-893e-69529a0a3492/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd5617656-cc6f-49fa-825a-1c8f39801fef', 'alias': 'fgu', 'name': '佛光大學', 'description': '佛光大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2206, 'subscribed': False, 'read': False, 'createdAt': '2016-07-21T18:43:04.801Z', 'updatedAt': '2020-08-13T08:57:51.441Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/90399ce5-7dd9-46b4-adfb-cb24ecd26a6d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/27ddd192-15e8-4751-86f2-8c5144592f20/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 39}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6fef528b-68d8-4ea3-ba51-429b0f95f3c6', 'alias': 'vnu', 'name': '萬能科大', 'description': '萬能科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4032, 'subscribed': False, 'read': False, 'createdAt': '2016-07-21T18:49:41.232Z', 'updatedAt': '2020-08-13T09:00:15.253Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5971d1f4-c6b8-4805-89e4-bd942920e15e/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e931ccbf-ef91-4599-bc16-c3a5b29c5bdd/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 71}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '51d1d615-0013-45fa-9968-641a11823411', 'alias': 'oit', 'name': '亞東技術學院', 'description': '亞東技術學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2578, 'subscribed': False, 'read': False, 'createdAt': '2016-07-21T18:52:51.983Z', 'updatedAt': '2020-08-13T08:59:43.094Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/cb6074e2-a651-43f3-8f42-2997767ce8d6/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a081276d-36d4-4038-885e-1eea4dad5c42/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 31}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'cf19e445-c0ab-4c2a-b5c7-75ab53cde89a', 'alias': 'cku', 'name': '經國管理暨健康學院', 'description': '經國管理暨健康學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2005, 'subscribed': False, 'read': False, 'createdAt': '2016-07-21T18:55:31.107Z', 'updatedAt': '2020-08-13T08:47:47.734Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/cd4dda3a-8689-4697-bb3e-a2437cebb20a/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bc831b40-1cae-40ab-b524-28fc96b80c56/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 39}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '76408068-e8d9-4265-a958-256ed5e6690c', 'alias': 'ctu', 'name': '建國科大', 'description': '建國科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2658, 'subscribed': False, 'read': False, 'createdAt': '2016-07-21T19:00:37.223Z', 'updatedAt': '2020-08-13T08:57:37.680Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/27ed5ecc-dce0-4f2e-a02c-4402f726427e/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/99763cb6-e211-4c51-aeb5-4375a295b8ef/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 13}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a6a12a63-dca5-4e2f-907e-ac45aaa30586', 'alias': 'tnu', 'name': '東南科大', 'description': '東南科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1907, 'subscribed': False, 'read': False, 'createdAt': '2016-07-28T19:01:43.033Z', 'updatedAt': '2020-08-13T08:56:39.469Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d943016c-8b64-4633-ae6c-e54b7522ea89/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/27a6f9cb-b42f-48a1-b266-71560367d2cc/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 17}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b6720fca-d4da-4931-b7fa-50975c7666d5', 'alias': 'nkut', 'name': '南開科大', 'description': '南開科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1915, 'subscribed': False, 'read': False, 'createdAt': '2016-07-28T19:02:27.417Z', 'updatedAt': '2020-08-13T08:43:04.516Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0fda44d5-ebf4-440e-8f16-237c44c84fe5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/150ff22d-d6e4-4a1d-bfba-5840c182f4cb/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 6}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7298d0a7-e8cb-4957-9d28-ee43c679ce81', 'alias': 'sju', 'name': '聖約翰科大', 'description': '聖約翰科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1638, 'subscribed': False, 'read': False, 'createdAt': '2016-07-28T19:06:33.725Z', 'updatedAt': '2020-08-13T08:43:35.203Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e5411ce5-ef18-4ce9-844f-da6ea51f44f3/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/55c36f53-d939-4f2d-a7dd-078e89b46a53/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 10}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '414c6dc3-98ef-4b8f-b006-1ed5a91dc090', 'alias': 'mdu', 'name': '明道大學', 'description': '明道大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1583, 'subscribed': False, 'read': False, 'createdAt': '2016-07-28T19:08:30.463Z', 'updatedAt': '2020-08-13T08:42:26.827Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1458e027-6d52-47b5-8c08-059e4d6387e2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fe198831-96f9-4bd5-b7df-b292a2a35704/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 10}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '01978bc7-fd88-4082-9bbe-a1464df46020', 'alias': 'meiho', 'name': '美和科大', 'description': '美和科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3328, 'subscribed': False, 'read': False, 'createdAt': '2016-07-28T19:11:43.701Z', 'updatedAt': '2020-08-13T08:55:51.173Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ab45f6c5-b463-4af2-9140-a4c7dcc7ca1a/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fed07efd-37b2-4852-9eb8-68d8bab8d2d6/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 61}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f27ddf8a-6961-4818-93ec-6e20f57a880d', 'alias': 'tnnua', 'name': '臺南藝術大學', 'description': '臺南藝術大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1662, 'subscribed': False, 'read': False, 'createdAt': '2016-07-28T19:15:29.138Z', 'updatedAt': '2020-08-13T08:47:21.106Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/117bbc58-0267-4d85-b2e5-e29173d4fcc8/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9439b08b-8242-4940-9d23-e8340463ff3e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 49}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e589fcb2-1172-4786-9ff0-0bc005862f1e', 'alias': 'ukn', 'name': '康寧大學', 'description': '康寧大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2102, 'subscribed': False, 'read': False, 'createdAt': '2016-07-28T19:17:49.216Z', 'updatedAt': '2020-08-13T09:00:08.024Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0117702d-61ea-478b-862c-61068b3bef06/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7b42b3fa-dc02-46c0-834a-f0ebf7eb82df/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 42}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b7be1117-ef38-413c-8d38-8189f1d4823a', 'alias': 'ydu', 'name': '育達科大', 'description': '育達科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1815, 'subscribed': False, 'read': False, 'createdAt': '2016-08-04T18:01:14.368Z', 'updatedAt': '2020-08-13T09:00:23.123Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1b76bcac-cf5f-4fb8-9bf7-8db61d364bf7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bf2f26ea-589d-4846-ab69-cd21e5f74421/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 15}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f4dd0cc1-b67f-401b-a70b-82077e6fee02', 'alias': 'kyu', 'name': '高苑科大', 'description': '高苑科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1441, 'subscribed': False, 'read': False, 'createdAt': '2016-08-04T18:04:49.716Z', 'updatedAt': '2020-08-13T08:46:14.508Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d6dc2ea7-0a3d-4a73-8a0a-c653fa141866/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3844ed46-e8ed-48fa-b914-f057d80b1ca6/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 5}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '8e7402e4-4535-4f84-bbc8-da3c568f9821', 'alias': 'cust', 'name': '中華科大', 'description': '中華科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2127, 'subscribed': False, 'read': False, 'createdAt': '2016-08-04T18:11:04.074Z', 'updatedAt': '2020-08-13T08:48:45.970Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0b4baf16-93f1-4d3f-abd5-ed5132e28fc5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ec8ca862-f953-4a5a-82b6-fa0148a2cd6e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 15}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'fc5720c6-4ac5-4a85-b611-b999a0a6eade', 'alias': 'ypu', 'name': '元培醫事科大', 'description': '元培醫事科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3286, 'subscribed': False, 'read': False, 'createdAt': '2016-08-04T18:16:42.779Z', 'updatedAt': '2020-08-13T08:54:22.190Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/80c6d53e-b426-40ab-b018-9d7fc6b1dc77/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b94e1278-4628-48a3-b249-446fe6e4d538/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 32}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3a392fdd-eb74-4906-91b9-7595536f8e94', 'alias': 'ntsu', 'name': '國立體育大學', 'description': '國立體育大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2312, 'subscribed': False, 'read': False, 'createdAt': '2016-08-04T18:19:23.896Z', 'updatedAt': '2020-08-13T08:56:04.370Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c94bce8a-9a17-411c-9ef8-4d58ebf1fd63/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fa0021a5-1abb-4064-8acf-6244e3d1d564/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 19}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '89fa9865-f508-4271-9b84-a98eb0a65fda', 'alias': 'tajen', 'name': '大仁科大', 'description': '大仁科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 3160, 'subscribed': False, 'read': False, 'createdAt': '2016-08-04T18:23:54.406Z', 'updatedAt': '2020-08-13T08:47:11.495Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/304a2e8f-8be8-4354-967e-f1a86cdd2bd7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c75edd70-5e0c-411e-b11b-e9ebbd940d0f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 121}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b614ccbd-debe-4471-889b-9e9ef656fe77', 'alias': 'tsu', 'name': '台灣首府大學', 'description': '台灣首府大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 951, 'subscribed': False, 'read': False, 'createdAt': '2016-08-11T17:57:41.668Z', 'updatedAt': '2020-08-13T08:44:11.284Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e35bbd0c-ad63-452b-97d7-c533d823b20a/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a83fafc6-57a4-43ba-8733-8e52609da57b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b70d3c78-8118-495b-b812-4179dd22efae', 'alias': 'hcu', 'name': '玄奘大學', 'description': '玄奘大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2288, 'subscribed': False, 'read': False, 'createdAt': '2016-08-11T17:59:39.117Z', 'updatedAt': '2020-08-13T08:55:17.086Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0b1d009a-cf8a-428a-9d6b-cd8f32b1c482/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/cdf83d6d-a197-41eb-b3f3-159f2450e5fc/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 37}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9b2e05ce-6264-417b-8dbf-2f233a9ea625', 'alias': 'nju', 'name': '南榮科大', 'description': '南榮科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 308, 'subscribed': False, 'read': False, 'createdAt': '2016-08-11T18:02:32.486Z', 'updatedAt': '2020-08-13T08:59:23.788Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/eca6fe85-ef11-4ee8-81f3-58f3f7efe02f/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f3e64c6c-f257-4648-bd48-669f6ce5c3ea/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1fd91af8-3acb-4732-b615-718f8c964585', 'alias': 'ccut', 'name': '中州科大', 'description': '中州科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 889, 'subscribed': False, 'read': False, 'createdAt': '2016-08-11T18:05:24.013Z', 'updatedAt': '2020-08-13T08:47:38.799Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/910793fa-5746-4350-bde2-4723d75ee959/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ba24030b-fb80-4cda-bc10-72355ed014dd/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'af8749f0-8542-4256-8fbb-ce5f025cb254', 'alias': 'hwh', 'name': '華夏科大', 'description': '華夏科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1022, 'subscribed': False, 'read': False, 'createdAt': '2016-08-11T18:11:28.240Z', 'updatedAt': '2020-08-13T08:49:04.409Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/304b66a6-6283-4060-b743-9b0b5c3a196d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bf10b40c-7d20-4437-9153-9ffd0bf683b5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a5903cdd-9edf-4cbe-a377-2acf524bf0ca', 'alias': 'hust', 'name': '修平科大', 'description': '修平科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2096, 'subscribed': False, 'read': False, 'createdAt': '2016-08-11T18:20:03.638Z', 'updatedAt': '2020-08-13T08:58:03.309Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ae255baa-b943-41b7-8b7c-bd06441c8797/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3853171a-75be-4ff8-85b7-742b3a97250f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd45aed33-9bf4-409c-affb-dc88805413a9', 'alias': 'tust', 'name': '敏實科技大學', 'description': '敏實科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 490, 'subscribed': False, 'read': False, 'createdAt': '2016-08-11T18:27:45.354Z', 'updatedAt': '2020-10-26T06:41:33.846Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8b0dd34f-c959-416c-b601-435afa32bdc0/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/67bf43c1-d649-40df-b59c-f9f217931f4d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c5d2c311-2e63-462c-bb0d-193d62516cbf', 'alias': 'tpcu', 'name': '臺北城市科大', 'description': '臺北城市科大板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4159, 'subscribed': False, 'read': False, 'createdAt': '2016-08-18T18:22:48.862Z', 'updatedAt': '2020-08-13T08:56:52.153Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c7f1ff58-a4b8-4db8-a4ab-38155c9649dd/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/21c73eaf-30da-4a86-9e63-59c506a29471/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 36}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '74412b2a-383d-47d7-b1f3-264e91d4ea40', 'alias': 'mmc', 'name': '馬偕醫學院', 'description': '馬偕醫學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1239, 'subscribed': False, 'read': False, 'createdAt': '2016-08-18T18:27:41.262Z', 'updatedAt': '2020-08-13T08:55:54.553Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0962800d-fff1-4f81-b0c9-c09aa0135552/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9c9aacfb-e06f-4219-8a2f-4a4351628ca2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a38f96da-bbbb-4305-9225-a0acf92012be', 'alias': 'dlit', 'name': '宏國德霖科技大學', 'description': '宏國德霖科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1318, 'subscribed': False, 'read': False, 'createdAt': '2016-08-18T18:35:01.313Z', 'updatedAt': '2020-08-13T08:48:52.192Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/eada2536-ea75-45e3-82ae-0e1299e39dc4/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/431accd1-d73f-4042-8949-febdef559787/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 8}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9670c4b7-da6d-488f-a9e1-f3ad5f56406e', 'alias': 'tumt', 'name': '台北海洋科技大學', 'description': '台北海洋科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1477, 'subscribed': False, 'read': False, 'createdAt': '2016-08-18T18:38:39.911Z', 'updatedAt': '2020-08-13T08:54:18.805Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e1d29a42-b6b4-427a-8e8b-0df4038e5750/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5486962e-7a14-4654-b951-530fe41ea4cb/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 20}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2cc2630e-0fe4-4c61-8880-19017c453975', 'alias': 'nanya', 'name': '南亞技術學院', 'description': '南亞技術學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 911, 'subscribed': False, 'read': False, 'createdAt': '2016-08-18T18:44:16.853Z', 'updatedAt': '2020-08-13T08:42:30.808Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/224f6996-f1e4-4386-aa46-14f3e5709ace/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4bf12dfc-e015-4580-a783-53577a77a8c7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'fe5a4146-ff03-4d2e-a9f8-04283c772d19', 'alias': 'tcpa', 'name': '臺灣戲曲學院', 'description': '臺灣戲曲學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 561, 'subscribed': False, 'read': False, 'createdAt': '2016-08-18T18:47:58.852Z', 'updatedAt': '2020-08-13T08:59:50.163Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/dd123b52-47fb-4a95-ad7f-4bc0080249ac/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7283f0e1-ebb8-4ec3-aebc-c3c85e0b970d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2c08da52-1bb0-43d6-a58d-a4a8c322ac2f', 'alias': 'lit', 'name': '黎明技術學院', 'description': '黎明技術學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1742, 'subscribed': False, 'read': False, 'createdAt': '2016-08-18T18:50:23.056Z', 'updatedAt': '2020-08-13T08:49:41.216Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9cbf6117-f5e8-4f20-b387-03c0c1d2c7bb/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b2b193f2-a55b-46a2-8aac-4f7157aced82/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 24}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd4b520d9-6e6a-415f-923d-464334c9246e', 'alias': 'toko', 'name': '稻江科技暨管理學院', 'description': '稻江科技暨管理學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 283, 'subscribed': False, 'read': False, 'createdAt': '2016-08-18T18:53:54.898Z', 'updatedAt': '2020-08-13T08:54:11.410Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ead9ecb1-b0e3-4cf9-a500-d3610b8a690e/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/84086bc8-0e31-4b1e-8a43-ad3bee17c70b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'aff553d9-d18e-43af-ae9e-71ad09366ed6', 'alias': 'dila', 'name': '法鼓文理學院', 'description': '法鼓文理學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 245, 'subscribed': False, 'read': False, 'createdAt': '2016-08-25T18:29:03.231Z', 'updatedAt': '2021-04-20T08:36:45.105Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': ['ok'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e522fc90-79f3-410d-9aa6-6e18db1fa42b/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1efc303f-23c2-44b0-883c-d4d6423c146f/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0bb81a8f-118e-4950-ac9f-c9570d05680f', 'alias': 'apic', 'name': '亞太創意技術學院', 'description': '亞太創意技術學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 260, 'subscribed': False, 'read': False, 'createdAt': '2016-08-25T18:29:03.231Z', 'updatedAt': '2020-08-13T08:41:21.041Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f0df0b22-3e07-438d-84dd-06368e975824/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/eabc833e-af81-4558-822e-3a986f5f81e1/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '71adc2db-e939-41b5-b194-1c4499e11a62', 'alias': 'fit', 'name': '蘭陽技術學院', 'description': '蘭陽技術學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 383, 'subscribed': False, 'read': False, 'createdAt': '2016-08-25T18:29:03.231Z', 'updatedAt': '2020-08-13T08:55:04.279Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/813acf5f-374e-4cbf-bb77-9704a13f642c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/73c5d757-1890-491f-ab06-17ec8b4a75dc/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '8a71f083-b5be-44ea-88ae-025265c4281a', 'alias': 'ttc', 'name': '大同技術學院', 'description': '大同技術學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 442, 'subscribed': False, 'read': False, 'createdAt': '2016-08-25T18:29:03.231Z', 'updatedAt': '2020-08-13T08:54:15.506Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b6a5fdd6-fc20-4456-a928-53c254331fbf/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5f81afc0-8e2f-43d9-b105-036568f691f4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd430504b-4938-4e37-92ec-62aebf63630a', 'alias': 'tht', 'name': '臺灣觀光學院', 'description': '臺灣觀光學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 370, 'subscribed': False, 'read': False, 'createdAt': '2016-08-25T18:29:03.231Z', 'updatedAt': '2020-08-13T08:51:07.419Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/cd8da9e9-2d30-4f69-96c1-313adb0f9d49/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/415eacbf-d515-4d2f-8a6b-32c815f24774/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '95aef8c8-b106-45bc-8449-f3999e097538', 'alias': 'dahan', 'name': '大漢技術學院', 'description': '大漢技術學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 300, 'subscribed': False, 'read': False, 'createdAt': '2016-08-25T18:29:03.231Z', 'updatedAt': '2020-08-13T08:45:28.869Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5c4197bd-d1de-4204-a8e8-a9bfc4cbf27d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2f27e967-5225-41d9-b48b-6798ad860e5c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0d0140ad-53d8-46f8-861a-c751b5307be7', 'alias': 'tf', 'name': '東方設計大學', 'description': '東方設計大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1067, 'subscribed': False, 'read': False, 'createdAt': '2016-08-25T18:29:03.231Z', 'updatedAt': '2020-08-13T08:44:01.025Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/81b10072-2b25-423f-b942-e9bc3a24e62b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/85fd8f9d-0261-4d5a-b3d7-909b9c514154/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1e80e53b-f4e1-4959-8efe-4ba3dcbac130', 'alias': 'fotech', 'name': '和春技術學院', 'description': '和春技術學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 351, 'subscribed': False, 'read': False, 'createdAt': '2016-08-25T18:29:03.231Z', 'updatedAt': '2020-08-13T08:55:10.716Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b738616a-e6a2-4172-93be-a0a4dfd54f88/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ffa1ac64-bd32-408a-8256-6e1335be7c59/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '373e1153-735e-451d-ab03-749f200e236e', 'alias': 'cit', 'name': '崇右影藝科技大學', 'description': '崇右影藝科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 945, 'subscribed': False, 'read': False, 'createdAt': '2016-08-25T18:29:03.231Z', 'updatedAt': '2020-08-13T08:51:45.709Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/53b97eb1-9b31-4e49-a229-91776d84ffa8/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d33812b9-8814-49f5-a1f4-913d29802161/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 4}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9f519a33-80a0-4cec-a309-15d03959d27a', 'alias': 'szmc', 'name': '樹人醫護管理專科學校', 'description': '樹人醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 6600, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:43:56.892Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/13e4b36c-7a40-42b7-9ed9-c2d9cbf0cf28/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0be96bba-280e-4265-a686-211e339c92f8/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 258}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'afa86dc6-f8cb-4c11-998e-73948c9cb090', 'alias': 'hsc', 'name': '新生醫護管理專科學校', 'description': '新生醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 4883, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:45:33.072Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/15469ca6-1943-46b1-93d8-ff40fc0d2ec5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2be4cae9-50ca-4cfb-b300-244b0ff89e3d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 87}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4438c34e-24dc-4bd0-b210-d9467f3e88eb', 'alias': 'cjc', 'name': '崇仁醫護管理專科學校', 'description': '崇仁醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2618, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:45:00.084Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/944dc0dc-4640-4ee3-afcb-4cbcda871301/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/77351ac6-68e9-4546-bf29-ec61c12b3ac9/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 28}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '56cdd28c-7b03-4737-a357-9103114e881d', 'alias': 'kmvs', 'name': '高美醫護管理專科學校', 'description': '高美醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 275, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:49:35.108Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d09ac658-bbda-4bdc-b51b-2a123391f286/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3f0bb475-6475-4398-ac74-0ecfd9296934/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '45636e16-748b-4aa2-8b71-4e8c2bad0378', 'alias': 'knjc', 'name': '康寧醫護暨管理專科學校', 'description': '康寧醫護暨管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 470, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:55:44.779Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0f1db3a0-33ea-47fb-af12-90b80e89dcc4/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/cc8342b4-452e-400d-9d28-2fa373c29fc9/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd1377288-8432-4ec0-a539-e1ac6d59362c', 'alias': 'mkc', 'name': '馬偕醫護管理專科學校', 'description': '馬偕醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 0, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:58:10.340Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/74cfcc99-4678-4f89-b8ab-7a7fc7447e1c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4142f1ad-5231-48bb-85b3-8a9f978500f9/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 160}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b14ae5c6-f33b-4d3a-acbf-cc1fa0bcb8d0', 'alias': 'yuhing', 'name': '育英醫護管理專科學校', 'description': '育英醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1662, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:44:26.166Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4a54a5b2-1003-47a3-868c-6d2400439d4a/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fa796f33-f9dc-491c-b355-5b3b1cd45656/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 24}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4e2c3b1d-93cb-4a67-b21e-1488ccca2ede', 'alias': 'ntin', 'name': '國立臺南護理專科學校', 'description': '國立臺南護理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1775, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:46:33.500Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f7c26467-d543-45c1-8edc-ffd2fbc511d8/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a2b638a3-e65d-4559-b8b4-d2619dfdd666/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 6}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b389ba26-737e-4a5f-b274-e927f85c5ad4', 'alias': 'tzuhui', 'name': '慈惠醫護管理專科學校', 'description': '慈惠醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 1486, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:56:59.899Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/05fd2611-5bf3-46a7-afcd-5fbeafca2220/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f09dad72-c7b5-4dd6-9285-bf24ba41e02c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 18}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '59fe875d-1568-4274-9ef1-2dbd2c4e5376', 'alias': 'smc', 'name': '聖母醫護管理專科學校', 'description': '聖母醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 973, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:51:01.500Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5b8cc5d0-bd7d-460e-b441-784fa13eaee2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f52dc6f0-c327-4138-bef4-e0867b5c2659/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 13}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1ec14be6-abde-48dc-9646-b6ed1dca2225', 'alias': 'ctcn', 'name': '耕莘健康管理專科學校', 'description': '耕莘健康管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2513, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:45:20.070Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9e2081a6-0138-488e-a6fe-b88a2bc64b64/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/437087a6-14c0-450a-8fce-3942485334c2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 81}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0186d21b-35cc-48cc-af2e-5e667a24b85d', 'alias': 'jente', 'name': '仁德醫護管理專科學校', 'description': '仁德醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2714, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:45:37.776Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/64d9ac2e-088f-4f34-9c0c-2f85ec8d1a0d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/653249fa-d3f4-4acd-9537-1c7bf146f1e1/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 245}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '420f48da-bcad-43dc-aa93-3ba6cf5a4710', 'alias': 'mhchcm', 'name': '敏惠醫護管理專科學校', 'description': '敏惠醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2619, 'subscribed': False, 'read': False, 'createdAt': '2016-09-23T09:35:46.370Z', 'updatedAt': '2020-08-13T08:49:44.189Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6e5324f3-ff3f-4d39-8bee-855e99706cec/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5feb702e-1d91-4944-ba8c-3f639a24f290/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 59}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '50f871b1-627d-43c2-b15d-d5bcfc62df6a', 'alias': 'ndmctsgh', 'name': '國防醫學院', 'description': '國防醫學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2855, 'subscribed': False, 'read': False, 'createdAt': '2016-11-21T02:59:11.853Z', 'updatedAt': '2020-08-13T08:49:56.537Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a1f44ae2-8c9a-41de-b9cc-1e855a30a3c8/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/031aab6d-7c33-4927-8b73-da9b24446870/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 97}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ced2dd8b-4ee7-46b5-ba6c-17aa2a19bdfd', 'alias': 'infotest', 'name': '公告測試區', 'description': '', 'subscriptionCount': 1, 'subscribed': False, 'read': False, 'createdAt': '2017-02-25T06:52:03.772Z', 'updatedAt': '2018-02-06T17:18:30.699Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2ae7b36e-30df-415f-a8bd-ef345e4b14f0', 'alias': 'info', 'name': '公告', 'description': '', 'subscriptionCount': 23, 'subscribed': False, 'read': False, 'createdAt': '2017-02-25T06:52:03.772Z', 'updatedAt': '2017-02-25T06:52:03.772Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 0}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd99edc69-f42e-4ed0-81fa-7a0747e2c332', 'alias': 'aaroc', 'name': '中華民國陸軍專科學校', 'description': '中華民國陸軍官校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2436, 'subscribed': False, 'read': False, 'createdAt': '2017-04-10T03:17:59.697Z', 'updatedAt': '2020-08-13T08:57:03.355Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7dc4bcdd-b98d-4ef0-ae99-2cc4626ece42/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5747a51b-896e-45f7-a70f-644675064527/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 24}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e50e4b41-e140-4dd3-b756-d0513bbca8d8', 'alias': 'tpa', 'name': '臺灣警察專科學校', 'description': '臺灣警察專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 10306, 'subscribed': False, 'read': False, 'createdAt': '2017-05-16T06:20:22.297Z', 'updatedAt': '2020-08-13T08:56:49.050Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['課程評價'], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4ccfc9a2-e8e9-4762-8025-784fc738d8e3/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/41eeaecd-9ea2-48ad-9ad3-12c47dd590e2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 133}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9e2ea80f-3342-4533-bb7a-d639e6c517e7', 'alias': 'athlete', 'name': '選手', 'description': '', 'subscriptionCount': 0, 'subscribed': False, 'read': False, 'createdAt': '2017-08-22T05:22:03.772Z', 'updatedAt': '2020-08-13T06:02:20.625Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c0aead55-dc9c-4eee-bb05-89ff8b78eb60', 'alias': 'welcomegift', 'name': '小禮物', 'description': '', 'subscriptionCount': 292, 'subscribed': False, 'read': False, 'createdAt': '2017-12-13T09:55:46.177Z', 'updatedAt': '2017-12-13T09:55:46.177Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 29}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7b26d7a9-1654-4a55-9b34-11a43f25f4a6', 'alias': 'nkust', 'name': '國立高雄科技大學', 'description': '國立高雄科技大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 14088, 'subscribed': False, 'read': False, 'createdAt': '2018-03-26T10:02:39.841Z', 'updatedAt': '2020-08-13T08:50:06.583Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ed6e5aaf-670d-43a2-a055-73a3e2badf46/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9f320631-2749-4241-8fb8-4a1a41b0bb55/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 147}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5376c0b6-172c-46a1-a56a-46fa1caffcbf', 'alias': 'ultrasex', 'name': '進階西斯版', 'description': '', 'subscriptionCount': 145, 'subscribed': False, 'read': False, 'createdAt': '2018-03-30T06:52:31.953Z', 'updatedAt': '2020-02-27T07:06:11.065Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 0}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'eac3108e-7119-48bc-a501-9ef64c65eaca', 'alias': 'ctbc', 'name': '中信金融管理學院', 'description': '中信金融管理學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 516, 'subscribed': False, 'read': False, 'createdAt': '2018-06-28T07:34:54.275Z', 'updatedAt': '2020-08-13T08:45:15.702Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f19a66b1-ccdf-49ce-9b09-6634cbd2ef64/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f03a05fd-d5cb-4b20-b425-33af3c06ec59/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a5811a74-5dd2-4226-9f0b-9ce7826c5d1f', 'alias': 'tku_secondhand', 'name': '淡江大學二手物交流', 'description': '你的廢物是我的寶物!快來二手物交流板向同校卡友出清與徵求二手物吧!', 'subscriptionCount': 168, 'subscribed': False, 'read': False, 'createdAt': '2018-07-09T03:42:43.499Z', 'updatedAt': '2020-08-13T08:59:57.777Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': False, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'postCount': {'last30Days': 0}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6a384d83-7f27-48a3-8900-8b37312d4b30', 'alias': 'ntc', 'name': '國立臺東專科學校', 'description': '臺東專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 955, 'subscribed': False, 'read': False, 'createdAt': '2018-07-09T04:31:45.489Z', 'updatedAt': '2020-08-13T08:53:00.049Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/17171088-3e89-44ae-a575-1e89653be297/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bff20a19-9795-47b4-8b7a-dc19c2a99205/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 13}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '867cea50-59af-431d-b1f5-eae4e5daac93', 'alias': 'ncku_secondhand', 'name': '成功大學二手物交流', 'description': '你的廢物是我的寶物!快來二手物交流板向同校卡友出清與徵求二手物吧!', 'subscriptionCount': 200, 'subscribed': False, 'read': False, 'createdAt': '2018-07-10T09:16:36.328Z', 'updatedAt': '2020-08-13T08:49:50.438Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': False, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'postCount': {'last30Days': 0}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '79b4c4ce-ec42-4caf-94aa-ed358744ed20', 'alias': 'fcu_secondhand', 'name': '逢甲大學二手物交流', 'description': '你的廢物是我的寶物!快來二手物交流板向同校卡友出清與徵求二手物吧!', 'subscriptionCount': 321, 'subscribed': False, 'read': False, 'createdAt': '2018-07-10T09:17:37.232Z', 'updatedAt': '2020-08-13T08:48:55.082Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': False, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b32dfecc-5379-487b-9fa1-f6168ca22e26', 'alias': '87', 'name': '87', 'description': '期間限定87板!歡迎發表任何自己或朋友的87事!分享你在生活中發現的87~一起開心過87節', 'subscriptionCount': 157, 'subscribed': False, 'read': False, 'createdAt': '2018-08-06T06:51:15.844Z', 'updatedAt': '2021-07-12T04:13:24.045Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['87節', '我八七我驕傲', '87'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9ffbd450-4416-4023-87ef-81b38703a224', 'alias': 'hkmacdaily', 'name': '港澳日常', 'description': '專屬於香港澳門o既討論區,日常生活大小事都可以係度傾~發文請注意需超過15個中文字', 'subscriptionCount': 0, 'subscribed': False, 'read': False, 'createdAt': '2018-10-03T03:41:18.556Z', 'updatedAt': '2021-07-16T07:05:15.714Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['好玩', '港澳板', '生活', '日常'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fcc42323-331f-46d6-9ed4-554c4eba8ed7/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/eddfcec1-3081-42d6-8373-a8722c918cc6/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 501}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '02038822-2e52-4c3e-ba02-e3f216ce2c7e', 'alias': 'cna', 'name': '中華民國海軍軍官學校', 'description': '中華民國海軍軍官學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 790, 'subscribed': False, 'read': False, 'createdAt': '2018-11-05T09:59:16.508Z', 'updatedAt': '2020-08-13T08:57:14.844Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/63c6c201-7e0f-4dcf-a36f-2332cd57472c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a2ecc579-1c26-4b51-b3d4-22c69ff5ca90/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 4}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0e571b3a-aca2-4423-8136-3245adfbedbb', 'alias': 'hkmaculife', 'name': '港澳 u life', 'description': '專屬於香港澳門討論校園大小事o既討論區~無論係宿舍生活定係讀書話題都可以係度討論~發文請注意需超過15個中文字', 'subscriptionCount': 3837, 'subscribed': False, 'read': False, 'createdAt': '2018-12-28T07:06:56.597Z', 'updatedAt': '2021-07-16T06:06:19.824Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['上莊', 'hall', 'Ulife', 'Sem', '大學'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7b511f59-c288-4b99-9ab8-12b606a3221d/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/475daaef-2ff0-4a18-b92b-b662978b4534/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 107}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f04ba238-e83a-4361-a654-2bbbb62cb37e', 'alias': 'hkmacrelationship', 'name': '港澳感情事', 'description': '專屬於香港澳門討論感情事o既全匿名討論區~無論係感情難題定係啱啱分手都可以係度盡情傾訴~發文請注意需超過15個中文字', 'subscriptionCount': 5423, 'subscribed': False, 'read': False, 'createdAt': '2018-12-28T07:07:27.625Z', 'updatedAt': '2021-07-16T06:06:35.577Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['表白', '香港', '拍拖', '單身', '感情', 'A0'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1cd57e18-7d10-4858-a6bb-c1c07f987ec6/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d244a676-6bd1-4b58-a0d8-a485b9d54583/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 481}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ff298eca-10f6-461e-84b1-561b19375ec3', 'alias': 'hkmactwexchange', 'name': '僑生交流', 'description': '專屬於港澳台交流o既討論區,無論你係僑生、鍾意台灣o既港澳人定係鍾意港澳o既台灣人,都歡迎係呢度互相交流~發文請注意需超過15個中文字', 'subscriptionCount': 4051, 'subscribed': False, 'read': False, 'createdAt': '2018-12-28T07:07:50.290Z', 'updatedAt': '2021-07-16T06:06:52.354Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['僑生'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/258b4bf3-6cf0-460e-a5a9-d3b4f6881f09/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f502d4e1-137c-4cd7-a1db-cfa229b173da/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 68}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '030327e0-75ec-47ba-8103-f2152a9822a0', 'alias': 'hkmacgirl', 'name': '港澳女生', 'description': '專屬於港澳姊妹們o既討論區,呢度淨係女仔可以發文、留言,無論係咩女仔專屬既話題都可以係度討論~唔會有男仔係下面留言的 XD 發文請注意需超過15個中文字', 'subscriptionCount': 3840, 'subscribed': False, 'read': False, 'createdAt': '2018-12-28T07:08:45.995Z', 'updatedAt': '2021-07-16T06:08:46.792Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['煩惱', '香港', '女仔', '心事'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2cc410d8-3d81-4fca-a30a-b83d4a56b727/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/acb0de67-271e-46b2-a2f0-094ad42e00f6/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 83}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1b0a200d-50e7-4abf-9bce-20239479d1ec', 'alias': 'afats', 'name': '空軍航空技術學院', 'description': '空軍航空技術學院板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 970, 'subscribed': False, 'read': False, 'createdAt': '2019-01-06T03:02:50.556Z', 'updatedAt': '2020-08-13T08:44:50.054Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1589e983-1a29-4758-8d4c-ff796a04079a/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/49e28d33-9194-4b60-959c-b7d6a162cd9b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 24}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '349cc671-7194-4fb9-91a2-85c09810f7eb', 'alias': 'cuhk', 'name': '香港中文大學', 'description': '中大同學既小天地~發文請注意需超過15個中文字', 'subscriptionCount': 1714, 'subscribed': False, 'read': False, 'createdAt': '2019-04-22T05:08:45.266Z', 'updatedAt': '2020-08-13T08:45:24.102Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ba918ac1-38f4-4a48-9440-fc26635c3c57/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8c678bbb-9342-40da-b6b4-2e27de10c511/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '24a9c5cb-ce6d-4091-aa1e-7918dc9d327a', 'alias': 'hklgbt', 'name': '港澳 LGBT', 'description': '呢度可以發表 LGBT 既小故事同埋討論相關議題~注意要互相尊重~發文請注意需超過15個中文字', 'subscriptionCount': 4366, 'subscribed': False, 'read': False, 'createdAt': '2019-04-25T07:01:35.991Z', 'updatedAt': '2021-04-20T10:20:27.501Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['性傾向', '跨性別', 'LGBT', '同性戀'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bb056490-fd70-4793-b9ba-5312a863607a/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3bba5b58-7366-48a9-96a9-ea354fb1debd/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 57}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6d26c1cc-187e-4627-b426-16f1b9483b11', 'alias': 'hkmovietv', 'name': '港澳煲劇', 'description': '呢度可以發表任何煲劇、電影、電視劇既心得~發文請注意需超過15個中文字', 'subscriptionCount': 3324, 'subscribed': False, 'read': False, 'createdAt': '2019-04-25T07:02:19.323Z', 'updatedAt': '2021-04-20T10:26:12.203Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['戲院', '睇戲', '煲劇', '電視劇', '香港'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6db798fd-1c48-433a-bf53-04481ccea2b5/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0fdb4ae1-e6d9-4b4a-bfa3-62a2978fdd46/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 28}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f60429cb-5bda-42db-ba31-e9675293c969', 'alias': 'hkgag', 'name': '港澳搞 GAG', 'description': '呢度可以發表任何搞笑搞 GAG 既內容,甚至係潮文~發文請注意需超過15個中文字', 'subscriptionCount': 3225, 'subscribed': False, 'read': False, 'createdAt': '2019-04-25T07:04:25.679Z', 'updatedAt': '2021-04-20T10:27:32.800Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['LOL', 'memes', '搞笑', 'Gag'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e3cc14d2-3a94-41fb-b157-d6b7cbe762b8/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f689b75c-4fa0-48ad-a5bd-d69b31c7e2de/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 28}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7bca8c34-f996-4792-ad9c-3e992ca93158', 'alias': 'hkfreshmen', 'name': '港澳 Freshmen', 'description': '呢度係比今年9月入大學/大專同現任大學生交流既地方~發文請注意需超過15個中文字', 'subscriptionCount': 6145, 'subscribed': False, 'read': False, 'createdAt': '2019-05-13T05:48:19.129Z', 'updatedAt': '2021-07-16T06:07:39.046Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['JUPAS', 'asso', 'year1', 'freshman', '升學', 'DSE'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/40a3139c-9d1b-4089-9a65-ae185c36e100/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/74dc461d-7eae-4636-a22e-4cb8d4a3c899/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 221}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7ecfd6ce-a723-4d13-a8e8-f5501e5c4ef2', 'alias': 'hku', 'name': '香港大學', 'description': '港大同學既小天地~發文請注意需超過 15 個中文字', 'subscriptionCount': 1328, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T04:34:57.095Z', 'updatedAt': '2020-08-13T08:55:20.172Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bd251e73-594d-4692-89f9-f2d3c46c8564/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9cbef1fe-2de3-4d8e-84d3-ece24a9b52d7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '20af7f52-8b6d-4d7a-8ce6-83f1e1c51e13', 'alias': 'hkust', 'name': '香港科技大學', 'description': '科大同學既小天地~發文請注意需超過 15 個中文字', 'subscriptionCount': 826, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T04:41:00.999Z', 'updatedAt': '2020-08-13T08:49:00.991Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f9fa833f-f9ed-41f2-9f79-6c8e82cd0d61/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f1d0267c-f34c-489f-8d4a-9cc568c1e1b4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e7da7fda-c883-460f-bff2-2146441f3d10', 'alias': 'hkpu', 'name': '香港理工大學', 'description': '理大同學既小天地~發文請注意需超過 15 個中文字', 'subscriptionCount': 1635, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T04:41:32.029Z', 'updatedAt': '2020-08-13T08:48:57.982Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/87a20258-0a7f-48ba-9fe2-f15ba5963aaf/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3494f1e5-88ce-47c4-8f0f-1bded88540a1/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f8273a1d-5c21-48a2-9d89-23a94e2a92c5', 'alias': 'hkbu', 'name': '香港浸會大學', 'description': '浸大同學既小天地~發文請注意需超過 15 個中文字', 'subscriptionCount': 789, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T06:28:17.423Z', 'updatedAt': '2020-08-13T08:51:58.526Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0b87e9a4-2e55-491d-a091-ec63bf70d6a5/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1b9f7af3-424e-4c79-b1db-90e486ace16f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4390f47e-0984-4974-a197-2a2d1fcb5657', 'alias': 'cityu', 'name': '香港城市大學', 'description': '城大同學既小天地~發文請注意需超過 15 個中文字', 'subscriptionCount': 945, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T06:29:24.155Z', 'updatedAt': '2020-08-13T08:51:48.782Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b00ba85d-81db-4737-a0c9-44b436012afc/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4587eb5b-7f17-4997-86fa-d0646ca199e3/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7142b5d3-19a2-4a01-8f6c-5055f1eece9e', 'alias': 'lingu', 'name': '嶺南大學', 'description': 'LingU 同學既小天地~發文請注意需超過 15 個中文字', 'subscriptionCount': 355, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T06:30:01.400Z', 'updatedAt': '2020-11-20T03:41:48.579Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5b518483-7b9a-406b-9adf-8887ec23d926/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6cf9c299-7a9f-46c6-9dea-e45aa12a52b6/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f8bdfe51-4000-44b3-9959-2f78107980ac', 'alias': 'syu', 'name': '香港樹仁大學', 'description': '樹仁同學既小天地~發文請注意需超過 15 個中文字', 'subscriptionCount': 590, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T06:31:09.329Z', 'updatedAt': '2020-08-13T08:51:04.377Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e8661686-ee57-4ec5-bbf2-0f7e759d145b/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e058d78c-4cb7-4b0f-80e5-c3b3810ed7ab/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '084766c6-95c0-4dac-81af-4f40e76ff87b', 'alias': 'hsu', 'name': '香港恒生大學', 'description': '恒大同學既小天地~發文請注意需超過 15 個中文字', 'subscriptionCount': 582, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T06:32:22.094Z', 'updatedAt': '2020-08-13T08:57:59.095Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fa1f53ac-4bec-4748-aee0-c4017c2a198c/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f1996266-d928-43da-a741-d44d9ee7fec7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'bb1d1cd6-9b1d-45c8-9125-7f757c8ac525', 'alias': 'openu', 'name': '香港公開大學', 'description': 'OU 同學既小天地~發文請注意需超過15個中文字', 'subscriptionCount': 788, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T06:33:22.697Z', 'updatedAt': '2020-08-13T08:54:00.300Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/75ef920d-81ca-4088-930a-85510729ae73/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bc127413-a5f6-4232-b7ed-95b45811266c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 6}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '70fad9eb-83dc-47fe-b3c8-7897125bed31', 'alias': 'eduhk', 'name': '香港教育大學', 'description': '教大同學既小天地~發文請注意需超過 15 個中文字', 'subscriptionCount': 565, 'subscribed': False, 'read': False, 'createdAt': '2019-07-12T06:35:02.237Z', 'updatedAt': '2020-08-13T08:55:00.764Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6cc3a1df-bcb6-4404-b5e1-8b616af50d32/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2501}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/cb9db97f-9226-4a56-b0e6-0c865a8e734a/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ce47f85e-b675-4e79-8ead-d6635add0263', 'alias': 'hkmacsex', 'name': '港澳西斯', 'description': '呢度係比香港澳門既同學仔討論 18+ 既話題,發文留言前請先閱讀板規', 'subscriptionCount': 12171, 'subscribed': False, 'read': False, 'createdAt': '2019-08-27T06:19:10.672Z', 'updatedAt': '2021-04-20T10:24:59.231Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': False, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['扑野', '18禁', 'SEX'], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0bd10d37-e6ae-4332-bbe2-a0235d874b1c/full.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7de12c29-1ef6-426d-a058-1329eafe2afe/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 52}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6923bba9-3396-4bdb-bd87-e7e8d7891d72', 'alias': 'survival_guide', 'name': '暗黑新生攻略', 'description': '', 'subscriptionCount': 194, 'subscribed': False, 'read': False, 'createdAt': '2019-08-29T07:54:27.047Z', 'updatedAt': '2019-08-29T07:54:27.047Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 24}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'bd3e5b4f-0268-4464-9a67-fdbd3db58d4d', 'alias': 'hkmacsec_hand', 'name': '港澳二手物品交流', 'description': '呢度係比香港澳門既同學仔出售二手物品既討論區,發文留言前請先閱讀板規', 'subscriptionCount': 1983, 'subscribed': False, 'read': False, 'createdAt': '2019-10-28T05:54:45.436Z', 'updatedAt': '2021-07-12T04:53:18.676Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['香港', '二手'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b816442d-21a1-456c-9d8e-11703022af02/full.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/aad86234-998c-4fcf-b759-4cd0046e0029/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '019f0994-375e-479a-9b08-1b8addb64cc3', 'alias': 'hkbeauty', 'name': '港澳 Beauty', 'description': '呢度係比香港澳門嘅同學仔討論化妝、護膚、美髮、任何扮靚相關話題嘅討論區,發文留言前請先閱讀板規', 'subscriptionCount': 0, 'subscribed': False, 'read': False, 'createdAt': '2020-01-02T03:21:28.406Z', 'updatedAt': '2021-04-20T10:15:48.549Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['減肥', '護膚', '打扮', '搽面', '化妝', '分享'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/53bbcdaa-d29a-4a8b-934d-a09d471fbfe2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/042e1ebc-6b13-4090-a9ab-7745c747d64f/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 47}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1150ae79-6e67-46f2-bfec-74f8c365a4a1', 'alias': 'hktrending', 'name': '港澳時事', 'description': '呢度係比香港澳門嘅同學仔討論同港澳有關既時事議題嘅討論區,發文留言前請先閱讀板規', 'subscriptionCount': 0, 'subscribed': False, 'read': False, 'createdAt': '2020-01-02T03:22:36.962Z', 'updatedAt': '2021-04-20T10:31:36.975Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['港聞', '正苦', '林鄭', '時事', '社會', '政治'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/91ebc571-f347-4b91-887b-0c74c2a813be/orig.jpeg', 'type': 'image/jpeg', 'width': 1792, 'height': 597}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9ae2a0de-410d-4770-a723-bfad803e83ad/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 64}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '36443e19-9334-42f4-b91e-95e601150f61', 'alias': 'hkacg', 'name': '港澳 ACG', 'description': '呢度係比香港澳門既同學仔討論同分享各種動漫、遊戲嘅討論區,發文留言前請先閱讀板規', 'subscriptionCount': 0, 'subscribed': False, 'read': False, 'createdAt': '2020-01-02T03:23:17.450Z', 'updatedAt': '2021-07-12T04:51:47.346Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['電玩', '動漫節', 'Cosplay', '動漫', 'ACG', '遊戲'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8db3e820-7ba9-4f62-be6a-51adb9076e25/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7712d81b-a049-47a7-b3fe-cfd697429543/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 36}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '8cb36269-9d1a-4044-962a-e45126b8dbda', 'alias': '2019_ncov', 'name': 'COVID-19', 'description': 'COVID-19 集中討論專區。歡迎討論防疫知識、疫情最新狀況及相關新聞。請注意:禁止散布不實訊息。', 'subscriptionCount': 138850, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:25:43.573Z', 'updatedAt': '2021-05-22T06:59:08.296Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['校正回歸', '口罩', '防疫', 'COVID19'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/aaf83806-5e39-48f2-9341-334fcc23f3e3/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f445eb68-0dab-4892-a49e-847e59dd7e57/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3769}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'bf17fb02-a9b5-458e-a1bb-75aa854c7e41', 'alias': 'happynewyear', 'name': '新年快樂', 'description': 'Dcard 祝大家 2021 新年快樂!', 'subscriptionCount': 2816, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:25:53.573Z', 'updatedAt': '2021-04-20T08:36:40.917Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['春聯', '紅包'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d7ef6657-4cd1-4b1d-9c2e-2d03282ca9dd/orig.jpeg', 'type': 'image/jpeg', 'width': 1646, 'height': 548}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9ecec93f-21dc-48f3-bdc8-404f84c5e1f8/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b7db78c7-7b32-4da5-a565-907a892259d7', 'alias': 'youtuber', 'name': 'YouTuber', 'description': '只要有手機你就是Youtuber,一起將你的作品分享給全世界吧!', 'subscriptionCount': 260715, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:26:03.573Z', 'updatedAt': '2020-10-07T10:21:01.877Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '請善用搜尋功能,不要發表相同或類似文章。\\n\\n發文前請先仔細閱讀板規,若因違反板規而被刪文或禁言,請自行負責。', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ad59c688-ad32-4b5d-b441-ee7fe8b4acfe/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5b68ba39-26c6-4a36-8a6b-e7fe9bf60c7f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1101}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '238cf3c1-2fc3-4e7f-abc5-99e3ddf9c8ad', 'alias': 'announcement', 'name': '官方公告', 'description': 'Dcard 官方發布全站公告、新功能上線、卡友活動等消息的專區。歡迎訂閱接收第一手官方資訊!', 'subscriptionCount': 17118, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:26:13.573Z', 'updatedAt': '2020-02-04T07:53:43.573Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '這裡只限 Dcard 官方發文', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/42e89ffa-133b-49af-8b3f-37a4eefca432/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5156f0d1-c338-45bd-b090-77c7f8f0e42a/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3927a13e-67e8-4a3b-b271-dea8048a0129', 'alias': 'smallgoodthings', 'name': '小事公益', 'description': '改變世界不僅僅是一個人做了很多事,而是和一群人一起做了很多小事。分享你溫暖的故事,就是最棒的小事~', 'subscriptionCount': 13009, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:26:23.573Z', 'updatedAt': '2021-04-20T08:36:41.517Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['小事公益', '領養代替購買', '環保餐具', '不浪費食物', '街賣者', '謝謝你辛苦了', '愛要及時'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/702a765e-6e6a-4592-b6da-c8656f318176/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ad342623-0e07-4930-9e5a-dda51662323c/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 36}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'cd2959fe-5515-4c1e-bd35-1cd4c36b44f0', 'alias': 'freshman', 'name': '新生季', 'description': '掌握以下兩個技巧,問題更有機會被解答!\\n【1.】發文前先搜尋過去是否有類似問題討論 \\n【2.】發表文章時語句友善、有禮貌', 'subscriptionCount': 94393, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:26:33.573Z', 'updatedAt': '2021-06-04T19:51:03.755Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前請先選擇分類 ⬆️', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '課程學業', '新生活動', '校園生活', '打工實習', '宿舍', '交通', '社團營隊'], 'topics': ['我的線上畢典', '選系雷達', '新生季懶人包', '選校', '選系', '落點', '指考', '宿舍用品', '人際關係', '學長幫幫我', '學姊幫幫我', '統測', '新生', '大學', '推甄', '備審資料', '筆電', '高科大', '中科大'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/751f06a4-b909-468e-bcc2-62b2c418d873/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d703756f-1bbc-494f-ae12-448e2d0d4581/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3676}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '66d54270-2766-4ced-b6dc-b00ae3b58a7e', 'alias': 'graduate', 'name': '畢業季', 'description': '寫下一段關於大學四年的深刻回憶,可以是回顧每個年級的成長收獲;給未來大一新生的誠摯建議;青澀甜美的校園愛情;又或者是一段關於遺憾的故事。', 'subscriptionCount': 19387, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:26:43.573Z', 'updatedAt': '2021-04-20T08:36:52.904Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['學士服', '畢業典禮', '畢業照', '畢業歌', '畢業花束', '畢業禮物', '畢旅', '應屆畢業生', '畢業', '畢冊', '社會新鮮人', '學士帽', '學士照'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/50b1ee83-bbf3-43f9-bf46-963a0a9e2ec5/orig.jpeg', 'type': 'image/jpeg', 'width': 1801, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f0cebc64-513b-4f85-b957-41bf2cc25259/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 72}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'cbd5285f-3cba-4bfc-86d0-1ab52d201459', 'alias': 'makeup', 'name': '美妝', 'description': '不管你喜歡開架彩妝還是專櫃彩妝,美妝板提供各種最新彩妝開箱評比、粉底色號、唇膏試色、眼影試色、保養品推薦等你來討論!', 'subscriptionCount': 450324, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:26:53.573Z', 'updatedAt': '2021-06-25T07:07:20.485Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '發文請記得在下一步驟加入「相關話題」或其他相關分類喲!', 'postTitlePlaceholder': '發文前請選擇標題分類,提高文章曝光度喔!❤️', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '底妝', '眼妝', '唇彩', '保養', '情報'], 'topics': ['潔膚水', '防曬', '粉餅', '受受狼', '刷具', '遮瑕', '粉刺', '打亮', '眼影', '粉底', '眉筆', '粉底液', '美白', '隱眼', '蜜粉', '面膜', '眼線', '修容', '二手拍', '唇膏試色', '眉毛', '保養', '日常妝容', '唇彩', '底妝', '敏感肌', '口紅', '腮紅', '眼妝', '凹凹賞', '空空賞', '指甲油', '男生保養', '約會妝容', '開架彩妝', '香水', '醫美'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9c97615a-e424-4db5-91aa-4cfb5f03f60b/orig.jpeg', 'type': 'image/jpeg', 'width': 1801, 'height': 601}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f5ff9184-1c01-4d22-a246-20bf0b057745/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3336}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '16b1b561-788e-4214-b4b7-e3102a35fa0d', 'alias': 'manicure', 'name': '美甲', 'description': '歡迎交流包含凝膠、甲片、美甲貼紙、指甲油,以及手部保養類型的文章。', 'subscriptionCount': 114292, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:27:03.573Z', 'updatedAt': '2021-06-04T15:25:43.045Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5c8c5c82-198f-48ce-9384-e5c004d33ca7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/98da131f-87e1-4967-909c-212d6925fca7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 500}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6d1f3e37-667a-4f84-951d-2efebf51d32a', 'alias': 'facelift', 'name': '醫美', 'description': '只要透過努力,人人都有機會成為自己想要的樣子。我們相信對美的追求是人的天性,歡迎你在這裡和大家一起交流討論,謹慎考慮自己想要什麼,選擇適合自己的,同時幫助其他人一起朝更好的方向邁進。', 'subscriptionCount': 79508, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:27:13.573Z', 'updatedAt': '2021-04-27T01:12:07.266Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '☝️要選“標題分類”,不然會違規☝️', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/eb330da3-7117-43d9-99a0-5aff4f264f57/full.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/eadeac9f-bf37-4f81-8d9a-28b85b433912/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 646}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1a14ba93-3989-47e5-aedc-a3918963100e', 'alias': 'fragrance', 'name': '香氛', 'description': '本板討論範圍以香氛產品為主,並包含香氛品牌之周邊商品如下:\\n1. 香水以及其周邊產品(例如:蠟燭、乳液、擴香、沐浴用品等)。\\n2. 關於香水的聞香心得、使用、消費經驗、情報分享等。', 'subscriptionCount': 101355, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:27:23.573Z', 'updatedAt': '2021-07-06T12:19:53.119Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['香水', '室內香氛', 'Jomalone', '女香', '生日禮物', 'Diptyque', '擴香', '開箱', '潘海利根', 'penhaligon', 'Lelabo', '迷香人', '女孩'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6bf538f9-8cd2-423c-b1cb-350a5abd4dc6/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9603fe75-3abb-4da0-856c-1ffde84b6c5b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 366}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '62268850-d2b4-4c21-a7a3-26e71702a276', 'alias': 'hairsalon', 'name': '美髮', 'description': '美髮板提供各種美髮心得、染燙髮經驗分享、保養技巧等討論!', 'subscriptionCount': 159432, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:27:33.573Z', 'updatedAt': '2021-04-20T08:36:38.447Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['短髮', '燙髮', '剪髮', '護髮油', '編髮造型', '染髮', '洗髮乳', '吹風機', '護髮', '潤髮乳'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4725eaff-fd9c-488d-9e11-cf05895a5027/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/afc6beea-7617-4dbc-8127-731340ba7bc5/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 599}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '919f8537-6268-4404-b319-0cf46111ba34', 'alias': 'orthodontics', 'name': '牙齒矯正', 'description': '歡迎大大們踴躍分享矯正前、中、後的心得及矯正相關資訊。', 'subscriptionCount': 62147, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:27:43.573Z', 'updatedAt': '2020-09-14T16:44:34.441Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3059719e-0d8d-42eb-a463-082c5719d4c8/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 599}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/03106453-411b-45ac-a67a-a4a88b5f20b5/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 624}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3b54b08f-27c3-4048-9777-c40def9ac9e1', 'alias': 'contact_lens', 'name': '隱形眼鏡', 'description': '隱形眼鏡板是為了讓大家可以討論隱形眼鏡的各種話題!\\n不論是各種款式、各種國家的品牌、各種折扣優惠、心得、問題都歡迎大家在這邊討論。', 'subscriptionCount': 78109, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:27:53.573Z', 'updatedAt': '2020-09-14T06:42:23.773Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/cb117f18-ca43-4f5d-adfd-df228975f24f/orig.jpeg', 'type': 'image/jpeg', 'width': 738, 'height': 246}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/dcbccd0b-d7b5-4978-bbf8-f3349fc5fde1/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 177}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'be1a095b-175e-4523-9e06-66a05d939676', 'alias': 'dressup', 'name': '穿搭', 'description': '穿搭板提供各種服裝搭配、包鞋、飾品配件等相關話題討論。\\n歡迎分享自己的日常穿搭,或任何潮流相關話題也可以在此發文詢問!', 'subscriptionCount': 534417, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:28:03.573Z', 'updatedAt': '2021-04-20T08:36:37.330Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '日常', '正式', '情侶', '鞋款'], 'topics': ['蝦皮', '耳環', '襯衫', '工裝', '後背包', '寬褲', '淘寶', '涼鞋', '洋裝', '情侶穿搭', '鞋子', '韓系穿搭', '背包', '眼鏡', '白襯衫', '牛仔褲', '腰包', '日系穿搭', '餅乾鞋', '外套', '背心', '長夾', '古著', '女生穿搭', '男生穿搭', '日常穿搭', '肉肉女穿搭', '雨衣', '小隻女穿搭', '真理褲', '校園穿搭', '帆布鞋', '歐美風', '球鞋', 'Converse', '中性穿搭', '顯瘦穿搭', '婚禮穿搭', '泳裝', '約會穿搭', '罩衫', '重複穿搭', '姊妹穿搭', '復古風'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/face819a-591d-4636-8eab-86529e6b07f0/orig.jpeg', 'type': 'image/jpeg', 'width': 1801, 'height': 601}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/cd6b2192-5a0a-4dad-b571-2f0ca051a646/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1369}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2c2bb5a2-bd09-4cd1-842c-4ec78ff0b1c5', 'alias': 'sneakers', 'name': '球鞋', 'description': '歡迎每一位喜歡球鞋、研究球鞋的卡友們善用這平台 : )\\n發文前請記得把版規看仔細唷!\\n不驗鞋、不驗賣家,若要推薦賣家也請注意哦⚠️', 'subscriptionCount': 67883, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:28:13.573Z', 'updatedAt': '2021-04-20T08:36:38.236Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前⬆️先選擇標題分類吧☺️', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['NIKE', 'ADIDAS', 'REEBOK', 'PUMA', 'CONVERSE', 'NB', '聯名'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/14a7be8d-2509-4c32-9784-9181f35af458/orig.jpeg', 'type': 'image/jpeg', 'width': 1394, 'height': 464}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3c5cae10-e860-4ac4-aba1-ade9904460a8/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 215}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd5ee9d02-510e-4f81-96e9-ab59b56e21ca', 'alias': 'buyonline', 'name': '網路購物', 'description': '網路購物板主要提供線上購物之經驗分享與網購教學討論。\\n或是在網購前中後遇到問題也能在此發文尋求卡友的幫忙。', 'subscriptionCount': 169904, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:28:23.573Z', 'updatedAt': '2021-04-20T08:36:41.949Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['教學', '發問', '集運', '心得'], 'topics': ['網購教學', '淘寶', '退貨', '蝦皮', '支付寶', '賣家', '集運運費', '官方集運', '私人集運', '購物金', '物流', '假貨'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5e683d3a-617a-4ea4-a076-1d9119aca9bf/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c0a8a080-ab94-4289-9d96-3ac948672da9/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1807}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c573dca4-8afd-4c56-8c53-4402ead7bdad', 'alias': 'boutique', 'name': '精品', 'description': '歡迎交流有關精品包包、小皮件、首飾、衣服、鞋子等商品,與到精品專櫃、精品店購物的過程分享。', 'subscriptionCount': 121804, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:28:33.573Z', 'updatedAt': '2020-09-14T06:35:01.047Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/79fc93cc-f622-4427-8cc0-2162d82cd832/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bc113ec1-602f-4f85-b90d-939b9f0e5b17/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 350}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '42851318-b9e2-4a75-8a05-9fe180becefe', 'alias': 'relationship', 'name': '感情', 'description': '無論是遠距離戀愛、情侶間的有趣互動、分手後的藕斷絲連等...都可以在感情板分享你們的愛情故事,找到愛情路上的共感。', 'subscriptionCount': 550783, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:28:43.573Z', 'updatedAt': '2021-04-20T08:36:40.391Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['曖昧', '閃光', '劈腿', '失戀', '分手', '告白'], 'topics': ['微西斯', '愛情', '閃光', '價值觀', '告白', '分手', '遠距離', '失戀', '曖昧', '做愛', '在一起', '學長', '交友軟體', '七夕', '單身', '渣男', '第一次', '同居', '暗戀', '復合', '木頭男友', '綠帽', '撒嬌', '劈腿', '大叔', '脫魯', '同事', '情人節', '菜桃貴', '一個人', '生理期', '約會', '月老', '警察', '熱戀期', '學妹', '旅行', '生日禮物'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b7d2dc32-cccb-4afa-8291-781164fd4691/orig.jpeg', 'type': 'image/jpeg', 'width': 7500, 'height': 2500}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c99966a1-03f9-4a69-86d4-df979a970496/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 11362}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '75a726e6-d4e3-4902-a410-2430a39fffcb', 'alias': 'mood', 'name': '心情', 'description': '提供分享生活情緒、抒發心情或交流各種情緒處理的經歷故事。在這裡你可以安心匿名,用無壓力的書寫方式與卡友分享你的生活點滴。', 'subscriptionCount': 263890, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:28:53.573Z', 'updatedAt': '2021-04-20T08:36:40.189Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['女大十八變', '租屋糾紛', '畢旅', '感動的事', '一句晚安', '想念你', '謝謝你', '靠北', '勵志', '情緒勒索', '霸凌', '憂鬱症'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b34fa540-50a5-4f8e-ab24-9fc7b14c1b7d/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f1be2fc4-d860-4853-a7df-0a9fe0a2dc38/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 9109}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '255fd275-fec2-49d2-8e46-2e1557ffaeb0', 'alias': 'talk', 'name': '閒聊', 'description': '閒聊板提供各種生活周遭大小事的討論,無論是半夜睡不著想找同好,甚至是戴牙套遇到的困擾等...都可在此發文。', 'subscriptionCount': 330688, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:29:03.573Z', 'updatedAt': '2021-04-20T08:36:35.632Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['醫療', '法律'], 'topics': ['網美媽媽', '廢墟探險', '畢旅', '童年回憶', '泰國浴', '租屋', '牙套', '法律', '困擾', '醫療'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3eac71d2-89ea-4687-baae-49bd0a24d90d/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/dfe250b0-f60c-420a-a099-5eafd107ae31/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 6650}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a1aaa6e6-2594-4968-b7dc-e1b14bea96f4', 'alias': 'funny', 'name': '有趣', 'description': '有趣板歡迎發表任何自己或親友的耍笨事蹟!各種好笑、傻眼、母湯的生活趣事或笑話大全通通都可以在此發表。', 'subscriptionCount': 283557, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:29:13.573Z', 'updatedAt': '2021-04-20T08:36:37.118Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['笑話', '梗圖', 'Wootalk', '愛莉莎莎', '黃金12猛漢', '撩妹', '微西斯', '貼圖', '網紅', '火柴人', '您還親切的問我要冰還熱', '傻眼', '奶奶好可愛', '這年頭的香腸都喜歡玩躲貓貓', '搞笑', '童年回憶', '撩妹語錄', '糗事', '惡搞', '接龍', '宿舍生活', '有趣日常'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/99728ad7-2b6e-47de-b243-7d83ab34aa8d/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d45f9407-d676-4e38-9da3-2a99cf2f0895/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1292}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '73c0282a-f7ef-4300-bf66-57e97bc0fc27', 'alias': 'joke', 'name': '笑話', 'description': '歡迎分享各種類型的笑話、梗圖、meme,不管是好笑的、冷場的、能讓人引發思考的,或者是諷刺社會時事都可以分享。', 'subscriptionCount': 179987, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:29:23.573Z', 'updatedAt': '2021-04-20T08:36:39.292Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['地獄梗', 'meme', '梗圖', '冷笑話'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a6a4e947-e535-492c-9deb-b439c6073002/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fed6bbaf-add1-4ecf-9e0c-654448ba2a06/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 72}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '06e07db4-c8b5-4dae-851f-e8a8236e770c', 'alias': 'meme', 'name': '梗圖', 'description': '梗圖=有梗的圖', 'subscriptionCount': 422567, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:29:33.573Z', 'updatedAt': '2020-08-31T09:47:51.769Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/926dcd27-8107-4745-a07f-3c07a4baa93d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/296d5970-41ae-4054-8c73-59171072a0a3/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2668}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f72e3b1d-3c9a-4fec-8a61-41c76cc317af', 'alias': 'girl', 'name': '女孩', 'description': '專屬女孩的討論版,提供和女生有關的話題討論。也能在這裡匿名分享、抒發、詢問遇到的困擾,就像有一群閨蜜陪你度過每一天!', 'subscriptionCount': 269475, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:29:43.573Z', 'updatedAt': '2021-04-20T08:36:37.575Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['購物', '髮型', '心事'], 'topics': ['心事', '男友', '比基尼', '除毛', 'WhatsInMyBag', '內衣', '家人', '發胖', '桌布', '懷孕', '處女膜', '減肥', '第一次', '健身', '相處', '改造', '約炮', '月經', '小胸女孩', '手機桌布', '私密處', '疫苗', '霧眉', '念珠菌', '分手禮', '陰道冠', '豐胸', '韓服', '精品', '大胸女孩', '牙套', '夜店', '後背包', '皮膚科', '短夾', '棉條', '吊飾', '雙眼皮', '無肩帶內衣'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a2b478f6-b7fc-4ea8-bb1b-38154772a736/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0bba80d2-1a43-4f58-ac9a-a195e049a6a4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2939}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'db901754-0902-4229-bd04-362f72e4ae8a', 'alias': 'menstrual', 'name': '生理用品', 'description': '本板為生理用品板,舉凡國內外女性月經期間使用的產品,例如衛生棉、布衛生棉、衛生棉條、月亮杯、月事內褲、月事海綿,皆屬於本板討論範圍。', 'subscriptionCount': 29239, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:29:53.573Z', 'updatedAt': '2020-09-18T15:12:01.399Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '請於本版討論範圍內,遵守版規發文\\n注意:避孕、情趣用品和私密保養非本版範圍', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b6c87108-04ff-4392-bca3-1630a5d42f7f/orig.jpeg', 'type': 'image/jpeg', 'width': 993, 'height': 331}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/094757ab-6d1a-45a9-980a-8340d662824c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 33}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f11e8d02-6756-4376-9db3-e1cca4d2a66c', 'alias': 'marriage', 'name': '結婚', 'description': '無論是還沒有要結婚、正在結婚的路上、婚姻的酸甜苦辣...等,都可以在結婚板分享交流,讓 Dcard 陪你一起結婚吧!', 'subscriptionCount': 104826, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:30:03.573Z', 'updatedAt': '2021-04-20T08:36:42.069Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['婚姻生活', 'OhMyWedding', '寶寶日記', '婚紗', '海外婚禮', '婚禮小物', '我是媽媽', '聘金', '求婚', '懷孕', '裸婚', '外遇'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a366d316-3458-4123-9091-929ee1fe884b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/eb5ab644-4eb2-403c-aa89-1494d06f472e/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 383}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c1f60d65-4f49-4a56-9c00-f162c93e31a9', 'alias': 'parentchild', 'name': '親子', 'description': '無論是還沒懷孕、正在懷孕、教養孩子,親子之間的喜怒哀樂,都可以在親子板分享交流,讓 Dcard 與你一同成長吧!', 'subscriptionCount': 45665, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:30:13.573Z', 'updatedAt': '2021-04-20T08:36:36.023Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['教養問題', '懷孕', '寶寶日記'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/69dcfa76-77e8-44e6-8d16-8b8cc5cf7338/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/73218f2c-48b8-4ff5-996a-5c7a1da8a0a2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 535}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b5b2653a-6304-4564-9ea0-a4cec0be7aee', 'alias': 'rainbow', 'name': '彩虹', 'description': 'Love Wins!專屬彩虹(LGBT)們的討論板,在這裡可以用最無壓力的方式分享你們的故事。', 'subscriptionCount': 175839, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:30:23.573Z', 'updatedAt': '2021-04-20T08:36:36.801Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['心情', '議題'], 'topics': ['微西斯', '高馬尾和長直髮', 'PPL', '早知道系列', 'Les', '天菜老師', '總在夜半消失的室友學長', '甲', 'TPL', '百合小說', '床上運動', '發情的公狗', 'Vincent', '告白', '直女', '師生戀', '交友軟體', '暖暖日常', '直男', '我們來認真的談一場戀愛', '喬薇', '長髮冰山學姐X短髮忠犬學妹', '出櫃', '性事', '姐姐很可愛', '同婚', '深櫃', '做愛', '天菜', '初吻', '跨性別', '無性戀'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/11289bd9-c722-4072-9e61-74b2184947fd/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8cf206ab-0dd6-4b5b-92b0-56b1daeac943/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2869}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c177f426-2627-4050-b989-eedc3371e610', 'alias': 'trans', 'name': '跨性別', 'description': '歡迎交流跨性別相關話題及文章。*只要您有性別光譜上非二分法的所有大小事都可以和大家分享討論*請先詳閱站規與板規的發文規則', 'subscriptionCount': 16039, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:30:33.573Z', 'updatedAt': '2021-07-22T04:47:27.038Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d7ef0669-02c1-4b50-a49d-75d3edba302d/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/33bd1bae-3d64-4e1f-9426-facb74398dc4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 47}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6754ab63-036c-49ba-bfa4-b95945046e29', 'alias': 'otokonoko', 'name': '偽娘', 'description': '歡迎交流偽娘的穿搭及妝容與女裝的心路歷程分享,也歡迎女生們發文給予偽娘建議與鼓勵。', 'subscriptionCount': 23846, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:30:43.573Z', 'updatedAt': '2021-04-20T08:36:36.613Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['偽娘'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/239e127d-59ee-45e8-900b-550775467b69/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bdb900a2-be4b-4f90-be54-4e18ce1bbcea/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 67}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '26e4252c-63d8-401b-82a9-110fe1e306db', 'alias': 'lesbian', 'name': 'Les', 'description': '屬於 Lesbians 的小天地。', 'subscriptionCount': 29689, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:30:53.573Z', 'updatedAt': '2021-04-20T08:36:49.608Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '1. 請至少15字勿填充\\n2. 避免發表個人社群帳號或交換個資', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['板規', 'Les'], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d2caa863-122d-4082-8887-4b2500e28f02/orig.jpeg', 'type': 'image/jpeg', 'width': 1170, 'height': 390}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8783002d-bbf9-4f35-bf11-23cc83ec0101/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 132}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '14f573f5-3f02-4821-8e18-f561d7b7db91', 'alias': 'entertainer', 'name': '追星', 'description': '追星板提供討論、詢問、分享明星藝人的相關話題。\\n國內外藝人的最新資訊也歡迎在此發表,小心被各種安利文圈粉囉!', 'subscriptionCount': 155444, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:31:03.573Z', 'updatedAt': '2021-04-20T08:36:36.188Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['EXO', 'Apink', 'ITZY', '周子瑜', 'SEVENTEEN', '手燈', 'NCT', '賴冠霖', 'AB6IX', 'IU', '武林', 'BTS', 'YouTuber', '專輯', 'TWICE', '王一博', 'MAMAMOO', '韓星', 'GOT7', '姜丹尼爾', 'PRODUCEX101', '防彈少年團', 'RedVelvet', 'IZONE', 'GFRIEND', 'BTOB', 'Somi', 'StrayKids', 'IOI', '曹承衍', 'AB6IX', 'NUEST', 'SuperJunior'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e5d3c645-77ef-443b-bf61-81201b3c5cf9/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c75cd7bd-50b2-46e3-bebc-641d978c30d3/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1430}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9e4476fc-d1b4-4490-a578-fc4dc95ffc52', 'alias': 'bts', 'name': 'BTS', 'description': '怎麼了?\\n我們看起來像是在Billboard Hot 100上蟬聯兩週冠軍,接著又連續蟬聯兩週亞軍之後再次得到冠軍和亞軍後,又同時以Savage Love和Dynamite登上冠軍和亞軍,然後在Billboard Music Awards上連續四年獲得Top Social Artist獎之外Dynamite又被葛來美提名入圍Best Pop Duo/Group Performance後再以抒情曲Life goes on和Dynamite回歸Billboard Hot 100冠軍及季軍之後又以Butter再次獲得Billboard Hot 100冠軍並蟬聯整整七週後以Permission To Dance接力得到冠軍的歌手的粉絲嗎?\\n\\n對啊\\n我們就是ヾ(◍°∇°◍)ノ゙', 'subscriptionCount': 46800, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:31:13.573Z', 'updatedAt': '2021-07-19T17:15:53.621Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請記得先詳閱板規後再發文!請記得先詳閱板規後再發文!請記得先詳閱板規後再發文!霸拖•᷄ɞ•᷅', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['米糕麻麻的公告', '阿米手冊', '板規說明'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/dd644eca-f434-40e9-a6ce-c74b3e75eb2a/orig.jpeg', 'type': 'image/jpeg', 'width': 1080, 'height': 360}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/24095963-d4e1-4b34-8820-070a16e8cbe0/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 276}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1c1ff244-2bc3-4ebb-aa49-cfe93abf890f', 'alias': 'akb48_group', 'name': 'AKB48 集團', 'description': '板名為AKB48Group,所以討論範圍請記得以整個48系為主(如:AKB、HKT、JKT、MNL、TeamTP...等)', 'subscriptionCount': 4402, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:31:23.573Z', 'updatedAt': '2020-10-08T08:38:43.704Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7ff87829-0666-458d-aed2-e01c762d738a/orig.jpeg', 'type': 'image/jpeg', 'width': 588, 'height': 196}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/46ae7c3d-842c-4732-af3d-41ae9287fd2d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 27}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'dbf7bcf4-91be-4c9c-b8f1-b67c70cb9559', 'alias': 'nogizaka46', 'name': '乃木坂 46', 'description': '歡迎蒞臨乃木坂46板,只要跟坂道有關的資訊,都可以在本板發文討論喔~發文時請詳閱本板全部板規。', 'subscriptionCount': 5977, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:31:33.573Z', 'updatedAt': '2021-04-20T08:36:45.555Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文時請選取右上角的標題分類(選取後毋需再重複打上標題分類),並詳閱板規內之發文相關規定。', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['乃木坂46板公告', '乃木坂46', '日向坂46', '櫻坂46', '欅坂46'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2d06189b-b063-4f6b-a4e0-83593457ee08/orig.jpeg', 'type': 'image/jpeg', 'width': 1080, 'height': 360}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/28a65113-b4ee-4021-a1f9-a878df984c4c/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 74}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6cf320df-3f3a-495f-9764-3dfbef767c49', 'alias': 'korea_star', 'name': '韓星', 'description': '本板討論事項為大韓民國相關演藝人員(練習生、舞者、選秀節目參賽者等),非為本板討論對象者,一律刪文處理。\\n\\n討論來源為新聞、影音、節目內容等確定來源之消息,也歡迎多多推廣自己喜愛的偶像明星,請勿擅播不實之謠言。\\n\\n本板除了本身板規外,也依照Dcard全站站規處理相關檢舉。\\n\\n歡迎各位多多利用韓星板,多多交流', 'subscriptionCount': 62149, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:31:43.573Z', 'updatedAt': '2020-11-03T07:47:40.472Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/994fcf8f-ec3b-4823-83a3-6fde1681c55e/orig.jpeg', 'type': 'image/jpeg', 'width': 640, 'height': 213}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4e3e5ef1-262d-440b-a012-87620eba4f42/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 100}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '56fa9926-2412-47af-9046-194dd4d8f51c', 'alias': 'exo', 'name': 'EXO', 'description': \"WE ARE ONE!❤️EXO 사랑하자❤️\\n歡迎來到EXO板,發文和留言前請先詳閱板規‘ㅅ'\", 'subscriptionCount': 19849, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:31:53.573Z', 'updatedAt': '2021-07-14T04:33:45.438Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請選擇右上角的標題分類\\U0001f9cf🏻\\u200d♀️', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['EXO', 'WeAreONE'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e57d3aff-9def-42c9-8b48-0ff4407b1918/orig.jpeg', 'type': 'image/jpeg', 'width': 1170, 'height': 390}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5eb6e3da-e234-467c-966c-97478da89e14/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 53}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2238d924-11b4-4bda-bd92-53b87d92eecc', 'alias': 'mayday', 'name': '五月天', 'description': '歡迎各位五迷們一起討論分享關於五月天的資訊、音樂、演唱會、電影、書籍等作品,也請大家遵守版規,共同打造良好的討論風氣喔!', 'subscriptionCount': 20718, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:32:03.573Z', 'updatedAt': '2021-04-20T08:36:57.515Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '#提問 #分享 #心得', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['討論區', '五月天演唱會', '五迷'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3c5d98e2-3b0f-47d4-88ca-e8cd13791fc0/orig.jpeg', 'type': 'image/jpeg', 'width': 1080, 'height': 360}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bd3b4060-e330-4667-9745-5646ca57fe42/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 13}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '414fa98a-04ec-4a13-bf21-fd63d7119726', 'alias': 'mjj', 'name': '麥可傑克森', 'description': '歡迎進入麥可傑克森的 Dcard 看板,這裡提供用戶發佈有關麥可傑克森的資訊,文章發佈前請先瀏覽板規!', 'subscriptionCount': 1986, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:32:13.573Z', 'updatedAt': '2020-10-08T08:39:14.410Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/baaec8d9-1486-49f4-9e71-4b0d43e15964/orig.jpeg', 'type': 'image/jpeg', 'width': 1527, 'height': 509}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/08b18a39-4f0a-4952-a1a7-28fa15d96e94/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c82dae3f-28ba-4aae-961d-c754e6ccd37a', 'alias': 'handicrafts', 'name': '手作', 'description': '手作板提供各種大小道具、羊毛氈針織、鋼筆、手帳、插畫素描水彩或任何和DIY手作有關的文章都歡迎在這裡發表哦!', 'subscriptionCount': 134097, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:32:23.573Z', 'updatedAt': '2021-04-20T08:36:36.707Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'large'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['手創', '廢物利用', '繪畫', '紙藝', '鋼筆', '工藝'], 'topics': ['乾燥花', '爆炸盒', '袖珍屋', '羊毛氈', '飲料提袋', '模型', '手寫', '鋼筆', '手繪', '電繪', '摺紙', '縫紉', '刺繡', '毛線', '手帳', '黏土', '雕刻', '噴漆', '印章', '插畫', '木工', '金工', '水彩', '素描', '皮件', '編織', '書法', '袖珍'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/39e492e6-a8eb-4e08-a7e8-d4954bbee845/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6b283fd1-9af6-4e99-897c-fad7369bcdb2/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 406}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '283fe663-ed14-4f3b-b472-1fbdd7c26b00', 'alias': 'illustration', 'name': '插畫', 'description': '歡迎交流任何形式與插畫(例如素描、水彩、圖文創作、手繪、電繪等)相關類型之文章。', 'subscriptionCount': 97301, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:32:33.573Z', 'updatedAt': '2020-09-14T06:40:55.384Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3156df44-c80d-4473-b7da-3d10bde671be/orig.jpeg', 'type': 'image/jpeg', 'width': 1030, 'height': 343}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c201e2ae-1307-483b-8ef5-6e1dc63c5d87/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1562}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'fb66dd2c-8b39-4bc0-a7a5-29bca6b215af', 'alias': 'design', 'name': '設計', 'description': '不論是讀視傳、室設、景觀、建築等設計科系的同學們,或是單純對設計有熱情的朋友,都歡迎來這裡分享自己的作品和專業!', 'subscriptionCount': 60766, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:32:43.573Z', 'updatedAt': '2020-10-13T16:50:03.121Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文請選標題分類、話題!\\n例:#分享 網格排版設計作品~', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6131ddf4-5ed3-4772-8aa1-fa37a9c75a40/orig.jpeg', 'type': 'image/jpeg', 'width': 735, 'height': 245}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4c66b148-a617-4f1d-9de0-9811f9e4da8f/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 115}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b6d3c83f-4799-4154-98cb-a7e2437d5b22', 'alias': 'caligraphy', 'name': '手寫', 'description': '筆跡,是偶然的春暖花開,必然的源遠流長。', 'subscriptionCount': 37478, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:32:53.573Z', 'updatedAt': '2021-04-25T08:00:03.533Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['手寫', '手寫的溫度', '鋼筆', '新詩', '詩詞'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4e8f2198-fce6-4493-975d-6931d1798c93/orig.jpeg', 'type': 'image/jpeg', 'width': 1125, 'height': 375}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e26bafd0-26a0-467a-9283-669ed13ef3f9/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 100}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f74a0662-cb27-4d45-a6b7-7f87d3d02396', 'alias': 'stationery', 'name': '文具', 'description': '歡迎各位文具愛好者加入ლ(‧´ェ`‧ლ)\\n小心不要踩到守門的板龜\\n違反規定的夥伴們就...✂️嘿嘿嘿', 'subscriptionCount': 56228, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:33:03.573Z', 'updatedAt': '2021-05-16T05:07:56.046Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '這裡是文具板的標題 (๑•̀ω•́)ノ', 'postTitlePlaceholder': '請在上方選擇 #活動 #心得 #問題 #分享 #知識 #求救 分類', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['文具板', '文具控', '文具展', '文具', '鋼筆', '手帳', '紙膠帶', '拼貼'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/687e4043-634b-4811-a684-b8784cdc3260/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d49a2060-c355-410a-a228-b44658a22f7a/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 75}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3efb479b-3298-44bb-b56f-2090b1a7e2df', 'alias': 'toy', 'name': '玩具', 'description': '歡迎交流包含公仔、玩具、模型、模改、扭蛋、一番賞,以及娃娃收藏類型的文章。', 'subscriptionCount': 30695, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:33:13.573Z', 'updatedAt': '2020-09-14T06:38:36.037Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0b039763-de67-4afc-b207-1dd17a50e1d1/orig.jpeg', 'type': 'image/jpeg', 'width': 1420, 'height': 473}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a35fcf45-81e1-45fd-977b-3beb889a2942/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 128}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '889351b2-42a4-44ff-bccf-c5d0c45b116c', 'alias': 'lego', 'name': '樂高', 'description': '樂高板成立啦!主旨就是希望多個能互相交流作品,分享即時資訊的空間。\\n\\n希望大家能開心的在版上熱切交流,盡情討論moc的無限可能!\\n\\n板規會依照特殊需求而不定期修改喔。', 'subscriptionCount': 17929, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:33:23.573Z', 'updatedAt': '2020-09-24T09:52:28.140Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f1e82792-5c4d-4eb9-bf2d-3497371501bc/orig.jpeg', 'type': 'image/jpeg', 'width': 682, 'height': 227}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d8f1fbcc-66a4-4640-9885-1c3761d70464/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 51}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5b964198-2e59-419d-b44c-0276d0fea7e8', 'alias': 'house', 'name': '居家生活', 'description': '居家生活板以家或個人空間出發,舉凡室內設計、空間風格、裝潢、what’s in my room、收納技巧甚至租屋注意事項等實用資訊分享,或可提升居住品質的相關內容皆可在此討論!', 'subscriptionCount': 190291, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:33:33.573Z', 'updatedAt': '2021-04-20T08:36:40.284Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['WhatsInMyRoom', '居家佈置', '空間風格', '租屋', '室內香氛', '家具', '輕裝潢', '收納', '租屋糾紛'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a3abad1a-69fc-4234-992f-74208538977c/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e88310f9-8a54-433b-a27f-f72e9760f8ef/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1017}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a1f2e74c-16e2-47ef-9f29-845c121ac53e', 'alias': 'architecture', 'name': '建築', 'description': '希望建築系或者對建築有興趣的朋友能夠分享自己的設計,圖面教學,等等,互相交流進步,互相協助,讓自己的設計和興趣能夠有地方分享', 'subscriptionCount': 22305, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:33:43.573Z', 'updatedAt': '2021-04-20T08:36:48.328Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '記得在標題加上 #問題、#教學 之類的較好分類喔', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['建築轉學考', '建築軟體', '事務所'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d8b06f3c-a476-4acf-ae6a-44193436d361/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0b28a056-e4c7-4585-b607-9bc2e343fce4/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 37}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '387dcbe1-da6d-49f1-a31e-1301df4f6d04', 'alias': 'aromatherapy', 'name': '芳療', 'description': '精油、純露、花精、植物油、芳療推拿、芳療按摩等芳療行為及芳療產品使用為本板討論範圍。', 'subscriptionCount': 16832, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:33:53.573Z', 'updatedAt': '2021-07-04T20:14:26.505Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8ae5fa13-60a5-4588-b2e4-877d7f402066/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f0f4a12b-de3e-4ec4-a8a1-76a093313e8c/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 13}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '49eed176-a7ad-492c-b6cf-77733ab633ef', 'alias': 'ecolife', 'name': '無痕生活', 'description': '無痕生活板提供無痕飲食使用心得、環保低碳及減塑生活等經驗分享。\\n環保政策、環境問題與剩食議題有不同想法都能在此發文討論!', 'subscriptionCount': 32203, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:34:03.573Z', 'updatedAt': '2021-04-20T08:36:50.830Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喔!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['限塑政策', '不鏽鋼吸管', '玻璃吸管', '食物浪費', '無痕飲食', '月亮杯', '剩食餐廳', '衛生紙', '回收', '環保杯套', '食物袋', '徒步環島', '保鮮盒', '衛生棉條', '世界地球日'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ad71ab91-1910-47c9-9cb5-c67cdd352c0b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e4534810-a7e8-494b-b69c-9caa8bd307d2/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 37}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7ee21581-1307-4ba9-b9dc-82028bdcca49', 'alias': 'pet', 'name': '寵物', 'description': '寵物板無論是貓狗、毛小孩或任何養其他寵物的經驗都可以在此討論,另外像是寵物協尋或動物醫院的分享也歡迎發文!', 'subscriptionCount': 231298, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:34:13.573Z', 'updatedAt': '2021-04-20T08:36:39.170Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'large'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '協尋', '狗', '貓', '小動物', '爬蟲', '水族'], 'topics': ['領養代替購買', '米克斯', '貓', '狗', '柯基', '柴犬', '認養', '貓咪真的很可愛', '動物醫院', '寵物美容', '結紮', '寵物店', '鳥', '兔', '魚', '刺蝟', '植物', '驅蟲', '疫苗'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/078d5aff-eb34-48e9-b220-d61ddd285843/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7608bd5b-19a5-4289-9be7-2479a55bba5a/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1708}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '57a329de-1999-425b-92cf-1e750dbede3b', 'alias': 'show_cats', 'name': '曬貓', 'description': '你家的貓曬起來!', 'subscriptionCount': 88917, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:34:23.573Z', 'updatedAt': '2020-04-09T04:34:50.613Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a7e58581-29d3-47c2-9fe4-9076c0165323/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 67}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d55a91aa-6115-4ff0-8058-5f38535e7968/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 319}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd0cd5819-4627-4b5f-8a8e-050976e0fba1', 'alias': 'photography', 'name': '攝影', 'description': '歡迎大家來攝影版交流攝影作品、技巧教學、器材討論、攝影話題閒聊!請大家發文/留言前先看一下版規避免違規噢!', 'subscriptionCount': 125796, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:34:33.573Z', 'updatedAt': '2021-07-09T17:13:46.225Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '發文前請看一下版規!記得加上話題跟分類!有發文字數限制喔!', 'ipCountryCondition': {}, 'subcategories': ['底片', '手機', '單眼', '器材'], 'topics': ['夜景', '底片', '手機攝影', '攝影作品', '單眼', '鏡頭', '人像攝影', '街拍', '快門', '調色', '修圖', '相機', '照片', '日出', '街頭攝影', '作品', '旅遊', '疫情', '底片相機', '新手'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3c5a1300-de76-4023-8997-8a0c4ee6ed3b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/32e714dc-a38d-44f0-8ea5-0a31ad5a4c95/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 444}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4736baba-5375-40b6-8a1b-f3a24be6bf24', 'alias': 'plant', 'name': '植物', 'description': '歡迎大家一起討論所有跟植物相關的話題(*`▽´*)\\n\\n發文未加 #分類 者經板主提醒後6小時內未訂正即刪文\\n\\n多肉標籤包含仙人掌和空氣鳳梨以及山地玫瑰\\n蕨類標籤包含鹿角蕨、蘚苔與食蟲植物和其它蕨類\\n請益標籤包含問植物名和植物生病、有蟲蟲等該如何解決\\n科普標籤包含如何照顧植物、該植物介紹等等\\n單純想聊天就選閒聊,但是閒聊但內容要跟植物相關\\nHave a wonderful day!', 'subscriptionCount': 23537, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:34:43.573Z', 'updatedAt': '2021-07-10T08:05:31.063Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前先看板規,然後沒有選標題分類會刪文唷', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['多肉', '蕨類', '請益', '多肉植物', '園藝', '新手', '仙人掌', '居家佈置', '盆栽'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8efdd584-f4d2-4898-8b4a-ee8c9e577693/orig.jpeg', 'type': 'image/jpeg', 'width': 640, 'height': 214}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5dd13008-ff6d-43bd-b817-0e9a965f8560/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 159}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd208d1e2-6f14-429b-97b7-75a2dd4f761c', 'alias': 'vehicle', 'name': '汽機車', 'description': '本板提供汽機車相關話題或部品討論,以及各種中古車或交通法規的問題也可在此發文詢問!', 'subscriptionCount': 116922, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:34:53.573Z', 'updatedAt': '2021-04-20T08:36:39.897Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['保養', '改裝', '購車選擇', '配件部品', '車禍處理'], 'topics': ['機車', '汽車', '中古車', 'Gogoro', '二手車', '安全帽', '機車駕照', '汽車駕照', '行車記錄器', '改裝', '機油', '車禍', '車牌', '駕照', '監理站', '重機', '加油', '輪胎', '租車', '煞車', '路考', '國道', '引擎'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/efb95ee2-7cd7-4f70-94b6-2d8977f2fe63/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5ddf8ebd-a5ee-46e0-b8b2-264e4fea94a4/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1218}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2cffc289-d332-4d4e-95a7-cae58a1778aa', 'alias': 'heavy_motorcycle', 'name': '重機', 'description': '歡迎大家來到大型重型機車與白牌檔車的天地❤️\\n請大家遵守板規喲~', 'subscriptionCount': 63811, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:35:03.573Z', 'updatedAt': '2021-04-30T07:31:01.645Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['重機', '檔車', '輕檔車', '重機板', '機車'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bad95514-4354-47a9-a16b-ba9ee70b3dec/orig.jpeg', 'type': 'image/jpeg', 'width': 800, 'height': 266}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/41b20f51-f728-4775-894c-c4fb2381e659/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 163}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5e4df604-ce17-49d0-a7bf-621022e76fc0', 'alias': 'aviation', 'name': '航空', 'description': '本板以“分享”為主,討論航空職業、飛行經驗等等航空資訊。歡迎前輩分享,充實航空板。', 'subscriptionCount': 44654, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:35:13.573Z', 'updatedAt': '2021-06-29T13:13:07.360Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '👆🏻👆🏻👆🏻標題請分類👆🏻👆🏻👆🏻', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['飛機餐', '飛行紀錄', '地勤', '空服員', '航空', '培訓機師'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f0387d7e-fa69-429b-a067-e09eb842ca53/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ab3d3afc-e6d3-4735-a6c8-087576c289cd/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 30}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '797a4641-1897-45b1-ba4f-3352df8601e3', 'alias': 'railway', 'name': '鐵道', 'description': '本板討論範圍涵蓋傳統鐵路、高速鐵路、捷運、輕軌、有軌電車等各式各式軌道運具', 'subscriptionCount': 10150, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:35:23.573Z', 'updatedAt': '2020-11-03T13:45:34.627Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'video', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e02a1d25-7d11-4498-81a6-148dab514c6a/orig.jpeg', 'type': 'image/jpeg', 'width': 1334, 'height': 444}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/14ac47d3-f016-48e0-aa6e-fb3241ea7cea/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 17}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c23878c8-fbce-4ccf-96f3-8af0e0f52370', 'alias': 'transport', 'name': '交通運輸', 'description': '歡迎討論、分享國內外大眾交通運輸的相關資訊', 'subscriptionCount': 12047, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:35:33.573Z', 'updatedAt': '2020-09-18T15:58:10.905Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '#情報 #討論 #問題 #新聞 #分享 #問卷 #協尋 #紀實', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/13d77e12-056c-4295-875e-21784e9f6294/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0d42efa9-2ef5-4e90-85f3-da8fde08dff5/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 32}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4c6964fc-8b39-4480-a844-847f09e4e09d', 'alias': 'horoscopes', 'name': '星座', 'description': '星座版提供各種星座運勢、心理測驗、星座感情分享,或是有任何塔羅占卜相關的專業知識也可在此發文討論!', 'subscriptionCount': 347350, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:35:43.573Z', 'updatedAt': '2021-04-20T08:36:39.207Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['占卜', '心理測驗', '白羊', '金牛', '雙子', '巨蟹', '獅子', '處女', '天秤', '天蠍', '射手', '摩羯', '水瓶', '雙魚'], 'topics': ['心理測驗', '占卜', '雙魚', '射手', '天蠍', '雙子', '巨蟹', '白羊', '金牛', '水瓶', '獅子', '處女', '天秤', '摩羯'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8e7c9abc-8d16-4de1-80a0-9015e40af469/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5b2bc846-a9e1-4a60-b383-29cce5c40d18/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2362}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '50b54456-b7d2-455c-a5b3-a883bba6f1a2', 'alias': 'tarot', 'name': '塔羅', 'description': '進入本版之前請先閱讀板規,以免被刪文😇\\n\\n歡迎分享各式占卜及心理測驗類型的文章。\\n因為目前無占卜板,如果有類似卜卦、心理測驗等文章,將不會刪文', 'subscriptionCount': 103941, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:35:53.573Z', 'updatedAt': '2021-04-20T08:36:42.193Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '要記得分類歐!!!', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['占卜', '感情', '塔羅測驗'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b6b260c2-6edb-4d3b-82cc-064c671d886b/orig.jpeg', 'type': 'image/jpeg', 'width': 1181, 'height': 393}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3c7d83da-6222-40ad-87aa-e8c47b06e0b0/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 727}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '37a78696-f6a6-48ec-a19f-688f799a2aed', 'alias': 'marvel', 'name': '靈異', 'description': '靈異板提供分享關於自己或親友的驚奇、驚悚、超自然怪事經驗或自創靈異小說分享。', 'subscriptionCount': 145333, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:36:03.573Z', 'updatedAt': '2021-04-20T08:36:39.486Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['親身經歷', '海龜湯', '連載'], 'topics': ['都市傳說', '廢墟探險', '佛牌', '護身符', '親身經歷', '海龜湯', '真實故事', '靈異板系列文'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/106edc7e-f102-450e-963e-8ab044c0ecc2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/de2de1d8-9212-4bab-a075-0102b2b00ce8/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 727}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'de977130-f3eb-467b-822e-b0e91124e9eb', 'alias': 'food', 'name': '美食', 'description': '美食板歡迎分享各種吃貨食記心得,或提供手搖飲料、校園美食、美食情報等文章!', 'subscriptionCount': 383179, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:36:13.573Z', 'updatedAt': '2021-04-20T08:36:35.879Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'large'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '食譜', '食記', '評比', '超商'], 'topics': ['台中美食', '高雄美食', '台南美食', '台北美食', '新竹美食', '板橋美食', '全聯', '711', '嘉義美食', '全家', '花蓮美食', '屏東美食', '貓咪咖啡廳', '宜蘭美食', '桃園美食', '學餐', '肯德基', '超商', '全聯牛奶糖季', '甜點', '台中美食', '銅板美食', '燒烤', '外國學餐', '嘉義美食', '手搖飲', '早午餐', '居酒屋', '餐飲店員告訴你', '下午茶', '飛機餐', '火鍋', '自助餐', '咖啡廳', '調酒'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/314f0f8c-6ef8-43e1-a0b5-7c676cf022ff/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/24f78ac5-0501-4258-a8df-59265fdff614/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1818}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd7db42a8-bedb-4ea2-96a7-c9034f4dd89a', 'alias': 'cooking', 'name': '烹飪', 'description': '歡迎大家分享以下內容:\\n1. 自己的手做料理\\n2. 料理問題提問\\n料理提問請具備足夠條件\\n讓大家能夠幫你解決\\n請勿伸手牌', 'subscriptionCount': 182357, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:36:23.573Z', 'updatedAt': '2021-04-20T08:36:38.750Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前請先瞭解版規規定喔', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['料理', '提問', '廚具', '烹飪', '食譜', '小資料理'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/85e30cf2-d9ec-48f6-8895-92d259b563c5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/791be1a0-d27d-45d9-81a8-e7a666588632/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 979}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'fc52ee20-8732-41df-9751-49a8427187d6', 'alias': 'boba', 'name': '手搖', 'description': '本板供大家分享及討論手搖飲料的情報、心得等等內容。', 'subscriptionCount': 88945, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:36:33.573Z', 'updatedAt': '2021-06-18T17:12:12.488Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['鮮奶茶', '奶茶', '烏龍茶', '珍珠奶茶', '手搖飲', '螞蟻人', '珍珠控'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f23885d5-3b00-4c02-8858-3547996e18ae/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/45f44600-bfec-48d2-ad2d-e7140eaeebfd/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 122}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '23082c2f-c19d-456c-a805-1e6ac0a36c31', 'alias': 'alcohol', 'name': '品酒', 'description': '不知道如何品酒的話,就先從喝慢一點開始吧!\\n\\n細細的品嚐,不管是調酒、威士忌、葡萄酒還是啤酒,除了感官的感受,歷史的背景與故事都十分玩味,有時候它不只能帶給你五感的刺激,甚至能反照出那無常的內心情緒。\\n\\n讓我們一起沉浸在酒杯中的美好,分享彼此有趣的微醺故事。\\n\\n未成年者請勿飲酒,飲酒過量有害健康', 'subscriptionCount': 67199, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:36:43.573Z', 'updatedAt': '2021-04-29T15:33:17.221Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['調酒', '酒吧', '威士忌', '紅酒', '品酒', '酒'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f7c21c53-ff98-42aa-b16e-aaa6189416f2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0753a2ae-2c35-443c-8094-2bbecc4efb02/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 49}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6120536f-6a70-49bb-8891-ec0ba2f4b396', 'alias': 'coffee', 'name': '咖啡', 'description': '義式、拿鐵、美式、黑咖啡...,不管哪種咖啡,只要你喜歡,他就是一杯好咖啡。\\n讓我們一起來,分享每一杯咖啡的美好、感動與故事,並找到最愛的,那杯專屬於你的咖啡。', 'subscriptionCount': 46087, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:36:53.573Z', 'updatedAt': '2021-04-20T08:36:50.654Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['CoffeeReview', '咖啡', '拿鐵', '摩卡', '卡布奇諾', '美式', '黑咖啡', '手沖', '賽風壺', '義式咖啡', '濃縮咖啡', '單品', '濾杯', '磨豆機', '豆子'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4d21cc6d-d809-4782-ab66-6ebd67ca8082/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/20ce47af-5f76-4eca-b57c-652b8dfcf61b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 33}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4dafb16a-98f6-4f6b-9daf-204fc4089edc', 'alias': 'go_vege', 'name': '呷菜', 'description': '歡迎大家交流呷菜資訊,國內外食記、新聞、素食產品、食譜、活動分享…等', 'subscriptionCount': 21406, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:37:03.573Z', 'updatedAt': '2021-04-20T08:36:54.456Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['素食', '美食', '旅遊', '愛動物', '環保', '極簡', '無痕生活'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/812764be-d085-4c77-8639-5f062f3b23d9/full.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2469f46f-2cc7-4cf5-9772-72dd69466628/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 23}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'de71b09b-b66a-40ca-baa7-759d5b10d49a', 'alias': 'fastfood', 'name': '速食', 'description': '歡迎各位速食愛好者來到本板!\\n無論是令人驚豔的新品,或是超級殺的優惠券,抑或是各種有關速食的疑難雜症,\\n都歡迎各位一起來討論📢📢📢', 'subscriptionCount': 43010, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:37:13.573Z', 'updatedAt': '2021-04-20T08:36:43.389Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文不必再自己打分類囉!\\n(請在上方選擇 標題分類)', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['麥當勞', '摩斯', '肯德基', '優惠券'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0a76ae91-80ff-42ea-a760-75b82f6b0f34/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fe822c3a-46d4-4c3b-90ab-5a30eebaf234/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 24}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '70c4b5d1-055e-48a7-b874-2c9dace6e4d6', 'alias': 'snack', 'name': '零食', 'description': '喜歡吃餅乾跟糖果的大家,一起分享好吃的零食吧\\n往幸福的路上肥下去~~~~~~~', 'subscriptionCount': 61260, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:37:23.573Z', 'updatedAt': '2021-07-08T09:17:55.285Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['巧克力', '零食'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/aac7ecf2-ec7c-46ce-a639-5adec3a54214/orig.jpeg', 'type': 'image/jpeg', 'width': 845, 'height': 281}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/854e2bc4-0c88-4e3b-be78-d14786127b9e/orig.jpeg', 'type': 'image/jpeg', 'width': 187, 'height': 187}, 'postCount': {'last30Days': 62}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ffc2ebb1-9985-4ce8-8f30-7dfdca609afa', 'alias': 'cvs', 'name': '超商', 'description': '美食 福利 分享 刷卡回饋 都在超商版', 'subscriptionCount': 57831, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:37:33.573Z', 'updatedAt': '2021-07-16T12:11:55.731Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '⬆️發文記得點分類⬆️ 不然Ban90天', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['超商版重要通知', '小七', '全家', '思樂冰', '霜淇淋', '711'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/94e5c38c-d86d-439d-8d45-6726b018a12f/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0d516394-42b2-421c-8fae-61ce8eed48f0/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 109}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e271b323-0e08-4d9f-b379-dfbd82cfa32b', 'alias': 'hypermarket', 'name': '量販店', 'description': '歡迎分享各大量販店(全聯、costco、家樂福、愛買、大潤發⋯)最新優惠情報、新品上市或商品開箱分享!', 'subscriptionCount': 28565, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:37:43.573Z', 'updatedAt': '2020-10-27T01:42:22.648Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/af919d5f-301a-4eaf-bd9e-9dc0ff05af44/orig.jpeg', 'type': 'image/jpeg', 'width': 936, 'height': 312}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/dd7a3608-b8d3-45f9-9772-2bcc1f4092e1/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 19}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a255fb97-4199-4bd0-9b28-1ec2c0d2f382', 'alias': 'travel', 'name': '旅遊', 'description': '旅遊板歡迎分享你的旅行紀錄或是國內外自由行、背包客心得、打工度假、機票購買等經驗,或是有什麼國內外不可錯過的旅遊景點也歡迎發文分享哦!', 'subscriptionCount': 177008, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:37:53.573Z', 'updatedAt': '2021-04-20T08:36:39.419Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'large'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '臺灣', '日韓', '亞洲', '歐美'], 'topics': ['畢旅', '自由行', '賞楓', '海外志工', '台灣秘境', '臥鋪火車', '獨旅', '飛機餐', '沙發衝浪', '韓國', '穿旗袍遊台南', '首爾', '自由行', '展覽', '畢旅', '泰國浴', '泰國', '環島', '景點', '郵輪', '網卡', '香港自由行', '地鐵', '遊記', '大阪', '青年旅舍', '旅行社', '跟團', '行李箱', '租車', '機票', '護照', '簽證', '打工度假', '廉航', '打工換宿'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0287c747-1a5c-4029-9333-7c60150f6eed/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e41ed51a-ecf9-4d39-9259-4bcc0dbc8465/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 243}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '275e0c11-82d0-43df-9606-502f5727716d', 'alias': 'movie', 'name': '電影', 'description': '注意:本板嚴禁標題爆雷,內文如有爆雷內容\\n1. 請於標題最前面加上 #有雷\\n2. 請在內文最前面做好防雷措施,避免預覽文字出現劇透\\n違規者將刪除文章。\\n本板提供電影影評或上映情報之相關分享,或國內外影展、電影獎項、推薦片單等話題討論。', 'subscriptionCount': 180189, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:38:03.573Z', 'updatedAt': '2021-04-20T08:36:36.924Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '請記得話題加入「電影名稱」或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '情報', '電影', '臺灣', '韓國', '歐美', '日本', '中國'], 'topics': ['影評', 'MARVEL系列', '迪士尼', 'DC系列', '觀後感', '電影院', '奧斯卡獎', '預告片', '動畫', '真人版', '宮崎駿', '韓國電影', '日本電影', '歐美電影', '經典電影', '印度電影', '驚悚片', '紀錄片'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5100ace3-10e5-40c8-bc68-727ff887daf6/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/67f1560a-56c0-4f2b-b9ec-47a2dcc9e0cd/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 284}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ba0145a2-adff-4229-9a84-9b26ebd2f7f0', 'alias': 'tvepisode', 'name': '戲劇綜藝', 'description': '戲劇綜藝板提供分享戲劇節目、電視綜藝、八點擋等心得,最新國內外熱門綜藝推薦也可在此發文討論!', 'subscriptionCount': 132357, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:38:13.573Z', 'updatedAt': '2021-04-20T08:36:40.120Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '請記得在話題加入「戲劇名稱」或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['美劇', '韓劇', '日劇', '台劇', '陸劇'], 'topics': ['netflix', '木曜4超玩', '綜藝玩很大', '八點檔', '台劇', '後宮甄嬛傳', '植劇場', '日劇', '韓劇', '美劇', '陸劇', '泰劇', '韓綜', '日綜', '台綜', '陸綜', '偶像劇', 'RunningMan'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/643a503c-cbc0-460c-942f-faff3fca121b/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/160cdad7-b98c-45c0-8fbf-2d0f87a6d6cc/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 819}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '28e79ba0-8ded-4acc-a6b2-f015bdef7605', 'alias': 'ea_series', 'name': '歐美影集', 'description': '歡迎討論一點點電影跟歐美各國的影集呦!挪威北歐也可!只是板主也會好奇能去哪裡看XD 板主看到特別且有趣的文章會置頂讓更多人看到~\\n\\n溫馨提醒:根據Dcard版規,禁止揪團辦帳號、交換ID等交換個人資料行為', 'subscriptionCount': 90562, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:38:23.573Z', 'updatedAt': '2021-06-03T08:11:36.935Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '不要在標題爆雷,會爆雷可以加#爆雷', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['歐美影集', 'Netflix', '心得分享'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8b1a55fc-afab-4d4c-9057-7c2f4cf89aa6/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/14da7f97-0f30-4922-837f-7603feba2533/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 55}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0a859162-88d8-4406-8111-71c57aae84a1', 'alias': 'netflix', 'name': 'Netflix', 'description': '希望大家能一起創造友善小天地\\n分享我們對於Netflix 的熱愛\\nENJOY❤️\\n**請不要在這裡揪團買帳號,不要交換資料,會禁言**', 'subscriptionCount': 256225, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:38:33.573Z', 'updatedAt': '2021-05-02T01:11:11.254Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['Netflix', '影集', '美劇', '電影', '推薦', '觀後感'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9d953264-c016-49a1-95a3-24c9342cb6ad/full.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e8ca3a5d-612f-45b7-8cff-55fc58167911/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 550}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4071b933-4198-49b2-93c6-dfce2f0dc272', 'alias': 'marvel_studios', 'name': '漫威', 'description': '歡迎發表與漫威有關的一切文章,包含漫畫、電影及活動等等。', 'subscriptionCount': 29901, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:38:43.573Z', 'updatedAt': '2020-09-14T06:42:27.504Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d6a3b4e4-a622-4fd1-b50d-4906b4cdf817/orig.jpeg', 'type': 'image/jpeg', 'width': 1239, 'height': 413}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/607155c5-81f2-41e7-b545-2ddde286315e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 10}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '019a92bb-1305-4471-88dd-872c6ee60ce7', 'alias': 'japan_drama', 'name': '日劇', 'description': '歡迎來到日劇板討論與日劇相關的內容,包含分享日劇、演員、心得等等都可以討論唷!\\n除了遵守板規外,希望大家在發文、留言時,都能保持最基本的禮貌,讓日劇板成為一個能讓大家友善討論的空間✨', 'subscriptionCount': 57326, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:38:53.573Z', 'updatedAt': '2021-04-20T08:36:43.563Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['日劇', '推薦', '分享', '心得'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/83cc7d39-41f0-4a2e-8b97-ddcaf1e93740/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8a3ac7f9-dcff-48eb-97dc-848e6806e459/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 62}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2e57a052-affa-4b32-a68d-98d7d6020c12', 'alias': 'korea_tvshow', 'name': '韓綜', 'description': '歡迎於本板討論韓國綜藝節目相關資訊與心得。\\n禁止空泛內容。\\n友善討論', 'subscriptionCount': 45965, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:39:03.573Z', 'updatedAt': '2021-06-30T07:52:54.118Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請善用分類標題 讓各位版友更清楚文章主題喲(。・ω・。)ノ', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['韓綜', '綜藝節目'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/23d6e94d-6ad0-41d6-bdaf-fced9db2eb20/orig.jpeg', 'type': 'image/jpeg', 'width': 890, 'height': 296}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bf544e9f-ec00-4357-9812-a056e9d4a046/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 21}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '885f988a-c7bf-4e24-a308-2560f8da8a0e', 'alias': 'music', 'name': '音樂', 'description': '音樂板歡迎分享國內外演唱會、專輯、新歌推薦、獨立音樂、音樂創作等文章,或國內外各種音樂獎項也可在此討論。', 'subscriptionCount': 82403, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:39:13.573Z', 'updatedAt': '2021-04-20T08:36:36.956Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '請記得在話題加入「音樂名稱」或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['精選', '創作', '東洋', '西洋', '樂器'], 'topics': ['金曲獎', '找歌', '音樂創作', '演唱會', '獨立音樂', '電子音樂', 'Cover', '中國新說唱', '空耳', '西洋音樂', '日文歌', '韓文歌', '古典音樂', '原聲帶', '饒舌', '吉他', '歌單', '歌詞', '五月天', 'KTV', '我是歌手', '唱片', '華語歌'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0aa38fd6-9151-4504-92c6-4084dbb6430c/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/021ca70f-ec98-4473-8b76-6d548c76a6bc/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 687}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '828d3c98-8b5c-4db9-afd8-d4c48c13dea4', 'alias': 'kkboxkma', 'name': 'KKBOX 風雲榜', 'description': '討論 KKBOX 風雲榜,就上 Dcard!', 'subscriptionCount': 4502, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:39:23.573Z', 'updatedAt': '2020-02-04T07:53:43.573Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/48cbeaa6-ab54-40e1-8cef-2a06c14c7278/orig.jpeg', 'type': 'image/jpeg', 'width': 1646, 'height': 548}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1bee1de3-4894-4905-9aa5-3831d19c77ca/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '97eb486d-2f9d-4912-8b5d-b8f539e449d4', 'alias': 'indiemusic', 'name': '獨立音樂', 'description': '討論與分享獨立、非主流音樂資訊\\n討論各種 的心得與資訊,包含但不限於:音樂祭、演唱會、表演等等', 'subscriptionCount': 46719, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:39:33.573Z', 'updatedAt': '2021-04-20T08:36:43.888Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '123 go', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['音樂祭', 'punk', 'hiphop', 'folk', 'blues', 'rock', 'funk', 'jazz', 'dreampop', 'citypop', '農農', '法蘭黛', '落日飛車', '懷孕飛車'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e515d597-a36c-48fb-803c-557eee37179b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/77cf293f-f31e-4d64-a091-7d51b42abfbf/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 50}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '802fb053-cd6b-48f7-a837-31b94e9e635c', 'alias': 'edm', 'name': '電子音樂', 'description': '希望大家踴躍發文、按讚、留言互動讓電子音樂版可以成為大家喜歡上來交流的地方,台灣的電子音樂市場仍然需要更多人來讓他茁壯,希望能讓更多人了解電子音樂的美好!', 'subscriptionCount': 18277, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:39:43.573Z', 'updatedAt': '2020-09-14T06:42:10.468Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4ed7fa88-6598-43e6-baf5-8437c849d7ca/full.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/549290c7-962c-4a4a-b3f2-e43c87a7074d/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 22}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3d5ea668-ec42-458c-91bb-12fa8e9afa27', 'alias': 'musicevent', 'name': '音樂活動', 'description': '討論各種音樂活動的心得與資訊,包含但不限於:音樂祭、演唱會、表演等等。愛惜帳號,發文時請按照格式!票務相關文章格式請看置頂!', 'subscriptionCount': 28045, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:39:53.573Z', 'updatedAt': '2021-07-12T04:47:48.783Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '愛惜帳號,發文時請按照格式!票務相關文章格式請看置頂!', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['適合聽音樂的活動', '音樂祭'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a332cc22-d3da-447a-b47b-34e4b898a447/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1fca37d7-f245-4d97-b305-2b981387e2f0/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 16}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '5b5691f1-d5ba-4d27-bcee-b921e0951ab0', 'alias': 'wind_band', 'name': '管樂', 'description': '歡迎分享任何管樂相關的活動,也可以於此分享、討論、各種的管樂知識、技巧、心得...等。', 'subscriptionCount': 6252, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:40:03.573Z', 'updatedAt': '2020-09-14T06:43:44.210Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/601f296e-1150-4ab4-9afe-065f915134e5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 599}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7d7f990e-312d-4c01-9c68-d9acf3cb001b/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 6}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd05d1ad3-7678-47a7-b498-9e108b74657e', 'alias': 'hiphop', 'name': '嘻哈', 'description': '歡迎大家討論嘻哈文化相關主題,包含但不限於饒舌、beatbox、街舞、塗鴉等內容。', 'subscriptionCount': 32804, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:40:13.573Z', 'updatedAt': '2020-09-14T06:37:59.268Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6b71c8ef-3b4b-4cdd-aa83-ce4bf613ce0c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1ed548df-854b-4ef9-b5e0-5bb863b95b34/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 177}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'bda36c42-7c73-49de-999d-8c5540c70e01', 'alias': 'vintage', 'name': '復古', 'description': '發文標題前面請分類,好讓大家更容易找到文章內容\\n如:#知識分享 #歷史分享 #閒聊 #討論 #活動 #心得..etc', 'subscriptionCount': 20992, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:40:23.573Z', 'updatedAt': '2020-09-14T06:37:49.739Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/06a7981f-b918-44f0-8143-4c062c209df8/orig.jpeg', 'type': 'image/jpeg', 'width': 611, 'height': 203}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8bafd36d-57e6-4ce8-9cea-9661eb187453/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '977976c6-d4b7-4163-83d5-010c32ed46cd', 'alias': 'palmar_drama', 'name': '布袋戲', 'description': '霹靂兵烽決之碧血玄黃,金光御九界之仙古狂濤。\\n碧血玄黃變,法滅萬籟悲。\\n廓主宰千秋劫,天火焚盡百業,步輪迴。\\n看聖行無悔,付神道不歸。\\n菩提問世渡魔隳,玄解兵鋒戡亂,譜傳說。', 'subscriptionCount': 3627, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:40:33.573Z', 'updatedAt': '2021-06-02T02:04:18.609Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '#分類', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['布袋戲'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/dad37e3c-b180-4b8b-bb54-4bb2294b8b5c/orig.jpeg', 'type': 'image/jpeg', 'width': 1080, 'height': 359}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/22145b3b-98a6-411e-89ce-a7144d8389f5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 15}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '8a2a5245-984b-4757-9c08-eee91a539153', 'alias': 'theater', 'name': '劇場', 'description': '劇場是大家的,不管音樂舞蹈戲劇傳統藝術,讓我們重新找回臨場感的價值,發揚表演藝術,感動更多人。', 'subscriptionCount': 9638, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:40:43.573Z', 'updatedAt': '2020-09-14T06:43:47.581Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b6092373-29a0-45c8-9e6f-8eef505005df/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/372d99c3-cbba-461c-bcf9-302833576aa2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '72f262c4-75aa-4d75-ab0e-cd7bd09e69b5', 'alias': 'game', 'name': '遊戲', 'description': '本板提供討論各種手遊、桌遊、家用主機、Steam等遊戲的祕技攻略或電玩周邊、電競實況分享!', 'subscriptionCount': 151262, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:40:53.573Z', 'updatedAt': '2021-04-20T08:36:36.223Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '請記得在話題加入「遊戲名稱」或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['LOL', 'OW', '手遊'], 'topics': ['動物森友會', '戀與製作人', '閃耀暖暖', '跑跑薑餅人', '極速領域', '傳說對決', '第五人格', '遇見逆水寒', '遊戲序號', '爆爆王M', '新楓之谷', 'FGO', '還願', 'Steam', 'Switch', '手遊', 'Overwatch', '攻略', '小遊戲', '課金', '單機遊戲', 'PS4', 'LOL', '魔物獵人', '電競', '遊戲實況', '桌遊'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ac3e2244-bfe7-440f-8934-f4f3a76e040b/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/dca89fb7-8735-4588-9d37-52e9e975467d/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1174}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '938bcf31-3da6-4b3c-83b0-78cf9ad6dbbe', 'alias': 'board_game', 'name': '桌遊', 'description': '發文請依照格式\\n⚠️標題前面請分類,包含但不限於此五類:#閒聊、#介紹、#開箱、#請益、#推廣', 'subscriptionCount': 19844, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:41:03.573Z', 'updatedAt': '2021-07-12T04:47:32.027Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/da41495b-36bc-4614-9870-f9e22ba55bac/orig.jpeg', 'type': 'image/jpeg', 'width': 1547, 'height': 515}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6ae185aa-b5ad-423f-a692-dabe6600e90c/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 23}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'b903fa13-3258-4944-9afb-2b81c3708124', 'alias': 'hearthstone', 'name': '爐石戰記', 'description': '歡迎爐石戰記的朋友們踴躍在這個板發文討論,\\n無論是任何與爐石相關,開心的、不順的事,或是有心得想分享交流,都能於此板一同交流!', 'subscriptionCount': 3692, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:41:13.573Z', 'updatedAt': '2021-07-12T04:49:59.800Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'video', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7af6fe1a-3114-402b-a043-73a0366310e3/orig.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/04775c8b-ea8a-4b3c-942d-156395948dd2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 13}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '2b8a1428-77ee-4993-aaa1-b31812103183', 'alias': 'onmyoji', 'name': '陰陽師', 'description': '有問題都可提出', 'subscriptionCount': 4848, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:41:23.573Z', 'updatedAt': '2020-09-27T17:35:58.484Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a55116a2-fe6e-49e7-ac06-f8b3a66e2c28/orig.jpeg', 'type': 'image/jpeg', 'width': 719, 'height': 239}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e1ae1343-1705-41f0-add9-2106c93ce3a0/orig.jpeg', 'type': 'image/jpeg', 'width': 120, 'height': 120}, 'postCount': {'last30Days': 12}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'cc03b5eb-8bb2-4072-aea0-4837e094c317', 'alias': 'minecraft', 'name': 'Minecraft', 'description': '這裡是麥塊版', 'subscriptionCount': 13111, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:41:33.573Z', 'updatedAt': '2021-07-12T04:48:19.935Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請勿直接在板上留下通訊軟體資訊', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/754f41ec-a23a-4af8-bc7c-461b67fcda65/orig.jpeg', 'type': 'image/jpeg', 'width': 1600, 'height': 533}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b379fc30-a224-41bf-811d-1dce8eff50c5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 92}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '1485d642-95e2-4984-92c6-bf2e69ede925', 'alias': 'lol', 'name': '英雄聯盟', 'description': '本板為英雄聯盟板,討論主旨為一切與英雄聯盟有關之事物與話題。', 'subscriptionCount': 40278, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:41:43.573Z', 'updatedAt': '2021-07-12T04:48:23.684Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'video', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3457801f-3cea-4a65-a542-5693bd03cef3/orig.jpeg', 'type': 'image/jpeg', 'width': 1280, 'height': 427}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/17230066-f572-4d8d-b1f1-19f73fa58b11/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 585}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '14185ae6-44ab-45e2-88da-ae97e0159c9f', 'alias': 'acg', 'name': '動漫', 'description': '動漫板提供各種輕小說、動畫討論、新番推薦、公仔模型、同人二創或Cosplay分享,動漫周邊或動漫展情報也歡迎在此發文討論。', 'subscriptionCount': 161557, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:41:53.573Z', 'updatedAt': '2021-04-20T08:36:39.864Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '請記得在話題加入「作品名稱」或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['精選', '情報', '心得', '推坑', '同人', 'COS'], 'topics': ['蠟筆小新', '庫洛魔法使', '聲之形', '動漫展', '初音未來', 'Cosplay', '動漫周邊', '動漫歌單', '推坑', '新番', '聲優', '名偵探柯南', '漫畫家', '動畫化', '恐怖漫畫', 'Vocaloid', '童年回憶', '真人版', '輕小說', 'MAD', '聖地巡禮', '同人'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/79da68ac-0a0a-477b-b677-f00fba9adffd/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4019a9d5-4b92-4ebd-a3fa-925eed513953/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1266}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e6b363d9-9ec0-450c-bdcc-7c0c30eb1084', 'alias': 'boyslove', 'name': 'BL', 'description': '本板提供討論、分享、詢問BL相關話題,歡迎分享各種屬性的糧推坑,也歡迎發表自創、二創小說或插圖哦!', 'subscriptionCount': 32078, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:42:03.573Z', 'updatedAt': '2021-04-20T08:36:46.576Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '請記得在話題加入「作品名稱」或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['BL原創', 'CP', '創作', '越界', '廣播劇', 'BL推坑', '腐', '圖', '耽美', '虐', '攻受', '腐女', '腐男', '同人', '書單', 'BL漫畫', 'BL小說', '作者', '正太', '真人'], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6b13c26b-a5c9-47cc-a294-8955db8d6b7d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9ac875d1-775e-4719-867a-6267012f585d/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 236}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7b05043f-048a-467e-92b2-adf3f2054ca8', 'alias': 'girlslove', 'name': '百合', 'description': \"本板提供討論、分享、詢問百合(Girls' Love)相關話題,歡迎分享各種屬性的 CP 坑、動漫畫推薦,也歡迎發表自創、二創小說或插圖哦!\", 'subscriptionCount': 24755, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:42:13.573Z', 'updatedAt': '2021-04-20T08:36:47.871Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文請記得在下一頁加入其他相關分類話題喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['citrus', '百合漫畫', '百合小說'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9faaab73-7b84-451d-b076-9dfd11ff58df/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0639f5c7-65a5-43f5-9d17-8a7aeb45c074/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 63}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '06239f5b-6fbc-4159-bfa3-73defacf1c0a', 'alias': 'cosplay', 'name': 'Cosplay', 'description': '發文前務必先詳閱站規及版規\\n\\n希望大家能以友善包容的心互相交流~', 'subscriptionCount': 14071, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:42:23.573Z', 'updatedAt': '2020-09-24T09:55:59.351Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5135cef3-ec61-43bd-bbcf-22830f821a8e/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c0e6788f-81ed-4c44-acfa-1f3076f839b0/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 13}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '99049f5e-bb3d-420f-b708-52c384359952', 'alias': 'pokemon', 'name': '寶可夢', 'description': '寶可夢板提供玩家們討論攻略、抓寶心得或情報分享及詢問 Pokémon 之相關話題!', 'subscriptionCount': 33618, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:42:33.573Z', 'updatedAt': '2021-04-20T08:36:46.749Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選'], 'topics': ['攻略', '道館', '色違', 'IV', '團戰', '星塵', '雷達', '進化', '快龍', '皮卡丘', '卡比獸', '神奇寶貝球'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9c9ac778-b194-4179-87c0-651b37bc6109/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/96f83382-c6c5-472b-94d8-98cc9588347e/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 389}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '57288c27-f141-4270-8b56-a1c7a1336f54', 'alias': 'disney', 'name': '迪士尼', 'description': '歡迎喜歡迪士尼的卡友來分享情報!', 'subscriptionCount': 28940, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:42:43.573Z', 'updatedAt': '2020-09-14T06:39:20.773Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a525a9da-8f8f-44c6-a596-ce829c3854e6/orig.jpeg', 'type': 'image/jpeg', 'width': 553, 'height': 184}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/182cabb3-d1e9-436b-8a4a-acd16db15f2c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 28}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '097fb2e9-a399-4b10-829c-53b988d189cc', 'alias': 'conan', 'name': '柯南', 'description': '這裡是讓大家盡情聊柯南相關話題的小天地!!希望大家都能在這裡找到同好(*´∀`)~♥', 'subscriptionCount': 21873, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:42:53.573Z', 'updatedAt': '2020-09-14T06:39:48.960Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a82da879-3b8a-49a5-9f40-5b9fdfc4bb43/orig.jpeg', 'type': 'image/jpeg', 'width': 800, 'height': 266}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/da8d355a-31dc-4181-b1c8-3dd9ac00afa4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 29}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e3514197-0703-46dc-b2e5-7f5a728bcb4a', 'alias': 'onepiece', 'name': '航海王', 'description': '隨著和之國篇章戰況的白熱化,未來肯定有更多資訊,為了大家能夠在看到爆炸性資訊卻無人可分享的情況下有個良好的平臺可以使用,故在此設置航海王版', 'subscriptionCount': 21385, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:43:03.573Z', 'updatedAt': '2020-09-14T06:44:25.197Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/13fbcc49-613a-4a93-a136-efcfc233b46e/orig.jpeg', 'type': 'image/jpeg', 'width': 1376, 'height': 459}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b973da92-09c4-4533-898d-9b9f3a219caa/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 10}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '8088e42e-1dc9-4ff7-bcc2-ba8478685157', 'alias': 'kanahei', 'name': '卡娜赫拉', 'description': '每個板的板規不一樣,請麻煩詳閱板規後再發文,感謝配合。\\n\\n卡娜赫拉教的宗旨就是買爆周邊,買到你吃土也沒關係,這是正常現象。\\n\\n歡迎來到卡娜赫拉板,麻煩發文時請在標題前面加上#(文章類型)\\n\\n#心得文 活動遊記、遊戲(也可分類攻略類) 、吃的、保養品等等,都可以歸類在這邊\\n\\n#情報分享 例如新的周邊出來,或是有什麼新的展場 ex.15週年特展、日本最新資訊等\\n\\n#收藏分享 你的大大小小周邊都可以拿出來跟大家炫耀或是分享的文章類型,切記,嚴禁盜版!\\n\\n#圖片分享 老師的tweet或是IG更新圖片,或是自己的繪圖,都可以歸類在這!\\n\\n#新貼圖 顧名思義🤣\\n\\n#請益(問) 買不到什麼周邊?請問這哪裡有在賣?都可以用這個類型!\\n\\n#生活分享 搞笑、瑣碎、趣事等等的事情,都可以使用這個類型\\n\\n最後提醒發文前請務必詳閱板規,謝謝!', 'subscriptionCount': 14780, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:43:13.573Z', 'updatedAt': '2021-04-20T08:36:38.374Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '標題格式 : #(文章類型)xxx', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['卡娜赫拉', 'Kanahei', '卡娜赫拉老師', '卡娜赫拉板'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6a1fd590-2db9-4ceb-a2d9-91aeeb429549/orig.jpeg', 'type': 'image/jpeg', 'width': 454, 'height': 151}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/618264bf-0244-46b3-9966-712644b80cc2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 25}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'eb4592f3-bd75-44f0-839a-2da590a202fc', 'alias': 'shin_chan', 'name': '蠟筆小新', 'description': '歡迎各位小新粉多在板上分享、交流~~\\n\\n⛳️發文前,請詳閱板規\\n標題開頭請按照以下格式用# 加以分類:\\n①#問 — 發文詢問任何有關蠟筆小新的問題[例如:問集數(名)、周邊商品資訊、圖片桌布等...]\\n②#影片 — 文章內分享有關小新的影片\\n③#圖片 — 文章內分享有關小新的桌布、梗圖、手繪或影片截圖等...\\n④#討論 — 發文與其他板友討論有關蠟筆小新的劇情、人物或是周邊商品等內容\\n⑤#分享 — 文章內分享除了②影片、③圖片和⑥快閃店以外的其他內容(例如:周邊商品、海外小新商店或開箱等...)\\n⑥#快閃店 — 如有快閃店的資訊,標題請加#快閃店,讓板友能快速接獲資訊!', 'subscriptionCount': 40623, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:43:23.573Z', 'updatedAt': '2021-07-12T04:51:37.886Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '標題開頭處請按照板規加以分類⚠️', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['小新', '小葵', '小白', '美冴', '廣志', '阿呆', '正男', '妮妮', '風間', '快閃店'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/98740740-49d8-4231-9177-0db1bba4f800/orig.jpeg', 'type': 'image/jpeg', 'width': 1362, 'height': 454}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/23064cad-e1a0-48fe-969d-894266fb4a3a/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 137}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'f4388191-41f8-428d-af69-d07d4481dc9e', 'alias': 'sport', 'name': '運動', 'description': '本板提供討論各式運動技巧、運動器材的經驗分享,或是運動傷害的治療、預防也可在此發文討論。', 'subscriptionCount': 37363, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:43:33.573Z', 'updatedAt': '2021-04-20T08:36:48.089Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['籃球', '棒球', '排球'], 'topics': ['馬拉松', '游泳', '滑板', '登山', '攀岩', '潛水', '慢跑', '運動內衣', '運動彩券', '物理治療', '運動傷害', '排球', '歐冠', 'FIFA', '球拍', '啦啦隊', '體操', '奧運', '冬季奧運'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/53fde3ea-4e53-4668-a058-64af758cd6df/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bffddcf8-80e2-4ec3-b1ea-15541f52b5d6/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 116}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd55a7e9f-95c0-41d3-8890-964e233e36a7', 'alias': 'baseball', 'name': '棒球', 'description': '供討論、分享、詢問與棒球相關的話題。或是各種國內外賽事(例:CPBL、MLB、日職等...)皆可在此討論!', 'subscriptionCount': 23398, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:43:43.573Z', 'updatedAt': '2021-04-20T08:36:55.559Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文請記得在下一步驟加入話題或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['中華職棒', '中信兄弟', 'Lamigo桃猿', 'CPBL', 'MLB', '陳偉殷', '日職', 'WBC', '統一獅', '富邦悍將'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/40400e3b-3fa9-47b1-aaf0-06f1a2211e1c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e14798d3-48fc-4e60-8e5d-8f4a13653583/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 84}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c332e370-8d22-4b07-80ce-f27a93872962', 'alias': 'cpbl', 'name': '中職', 'description': '', 'subscriptionCount': 20408, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:43:53.573Z', 'updatedAt': '2021-03-15T16:36:52.806Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6a84fb3e-7a22-4ec3-90dd-19920be38e21/orig.jpeg', 'type': 'image/jpeg', 'width': 756, 'height': 252}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3b2d0912-60a9-40fe-b3e2-55012df081e2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 72}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '79b1968a-3d30-43b9-b413-1977725cf7f0', 'alias': 'tennis', 'name': '網球', 'description': '供討論、分享、詢問與網球相關的話題。或是各種國內外賽事(例:溫網、法網等...)皆可在此討論!', 'subscriptionCount': 6797, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:44:03.573Z', 'updatedAt': '2021-04-20T08:36:54.364Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文請記得在下一步驟加入話題或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['溫網'], 'topics': ['溫網', '澳網', '法網', '美網', '拉沃盃'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fc9ccbc2-2762-45d1-92fa-d52cb15b967d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/df96afef-6c3c-493a-8bfb-f49707cdafe0/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 39}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2ec9978e-181c-4e14-8ada-18533d1e3d45', 'alias': 'soccer', 'name': '足球', 'description': '供討論、分享、詢問與足球相關的話題。或是各種國內外賽事(例:世足賽、英超、歐冠等...)皆可在此討論!', 'subscriptionCount': 9996, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:44:13.573Z', 'updatedAt': '2021-04-20T08:36:58.771Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文請記得在下一步驟加入話題或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['新手球迷指南', '英超', '歐冠', '球隊介紹', '轉會市場', '德甲', '西甲', '運動彩券', '法國隊', '賽事分析', 'Mbappe', '英格蘭隊', '梅西', '孫興慜', '阿根廷隊', 'C羅', '冰島', 'FIFA', '比利時隊', '西葡大戰', '世足16強賽', '內馬爾', '巴西隊', '烏龍球', '烏拉圭隊', '德國隊', '日本隊'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ed503adf-fff4-4ca4-86a1-ed38bbea8cf8/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bda447b4-054c-46ca-8490-5a584210b53c/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 121}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '863b960a-72a8-4479-80a2-b976275c002e', 'alias': 'basketball', 'name': '籃球', 'description': '供討論、分享、詢問與籃球相關的話題。或是各種國內外賽事(例:NBA、SBL、HBL、瓊斯盃等...)皆可在此討論!', 'subscriptionCount': 54756, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:44:23.573Z', 'updatedAt': '2021-04-20T08:36:38.056Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文請記得在下一步驟加入「相關話題」或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['NBA', 'UBA', 'HBL', '林書豪', '瓊斯盃', 'JR', 'SBL', 'LBJ', '寶島夢想家'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/992d2e61-310c-4520-aa34-54a357b5bb0b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a5666150-9277-4fb5-be6b-72a600c8a09e/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 390}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4ccb34a2-4d78-4ec0-8287-b6006c9087f6', 'alias': 'badminton', 'name': '羽球', 'description': '歡迎大家討論有關羽球的資訊、各種硬體上的使用心得,還有學習羽球的歷程。', 'subscriptionCount': 15102, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:44:33.573Z', 'updatedAt': '2020-09-14T06:40:12.019Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前,先選擇⬆️標題分類', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/82965f29-741c-41d9-8248-8c11eb82bdf9/orig.jpeg', 'type': 'image/jpeg', 'width': 960, 'height': 320}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0bc0cf0f-2379-4c3f-8e8d-1c59d2f20445/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 19}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1f909292-e459-4f31-97e9-f25f0d7f7d77', 'alias': 'volleyball', 'name': '排球', 'description': '一起打球吧!', 'subscriptionCount': 20092, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:44:43.573Z', 'updatedAt': '2020-09-14T06:43:55.460Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/96617799-964f-4f62-82b2-cd1d6f606c5a/orig.jpeg', 'type': 'image/jpeg', 'width': 640, 'height': 213}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/74bc3547-65b7-4387-acd8-6554210d6217/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 34}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '78751e16-7e34-46cd-a121-a1c1f1233f8d', 'alias': 'table_tennis', 'name': '桌球', 'description': '歡迎各位喜歡桌球、愛好桌球的朋友來這裡和大家交流聚聚喔~\\n舉凡桌球場地、桌球用品、技術分享、球員資訊都可以在這裡和大家分享討論,一起進入桌球的小世界吧!', 'subscriptionCount': 5404, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:44:53.573Z', 'updatedAt': '2020-10-24T14:26:34.666Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6ceb3951-999f-4c65-b6c2-263ce3a6cb04/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/20b1c460-63d8-4484-8c32-ec536e5833f2/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b28f2404-4d9b-4944-8fe0-6d9eef5fdec1', 'alias': 'dance', 'name': '舞蹈', 'description': '歡迎各位交流舞蹈文化相關主題,包含舞蹈心得、舞蹈活動和舞蹈問題。', 'subscriptionCount': 19712, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:45:03.573Z', 'updatedAt': '2021-07-12T04:47:39.757Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['公告'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f6c1596d-d60d-4058-b737-5ed865113333/orig.jpeg', 'type': 'image/jpeg', 'width': 899, 'height': 300}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/60cb8446-1b3a-4a1f-a179-7b0b3744806b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 30}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '9b5213d6-4427-4a3c-b788-efa19493631c', 'alias': 'fitness', 'name': '健身', 'description': '請看版規!!!看完歡迎在此發表健身相關話題,例如:重訓技巧、健身飲食、健身房評比、體脂控制等經驗分享。', 'subscriptionCount': 227955, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:45:13.573Z', 'updatedAt': '2021-04-20T08:36:39.028Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '發文前看版規,發轉讓會籍、教練課相關貼文會永久禁言哦', 'ipCountryCondition': {}, 'subcategories': ['精選'], 'topics': ['生酮飲食', '減脂', '乳清', '增肌', '健身器材', '健身房', '重訓', '臥推', '熱量', '啞鈴', '有氧', '深蹲', '減重', '蛋白質', '伏地挺身', '胸肌', '健美', '腹肌', '肌肉'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/be60b966-28fb-4e0b-b870-53a2f05d1243/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9a3eb658-2908-4b63-8081-4c26c07139a5/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 369}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '8a2a0b9a-7664-49b0-bff7-58389ef73a0d', 'alias': 'weight_loss', 'name': '減肥', 'description': '本板供大家討論減肥上的任何問題和困難,互相扶持。\\n減肥的路上常覺得很孤單、路很長,可以上來多多交流,大家共同成長,期待都能夠達到自己的目標💜', 'subscriptionCount': 221896, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:45:23.573Z', 'updatedAt': '2021-04-20T08:36:40.945Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['飲食', '運動', '勵志'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f7247eb7-828d-40cd-a5f1-cd1de4c210c7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 599}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8bf5bed9-c587-46f5-b740-ddf03b2bfd43/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 627}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '826d2c1a-dc22-41fc-8963-5223339e7c00', 'alias': 'free_dive', 'name': '自由潛水', 'description': '歡迎各位小魚兒來到自由潛水看板!\\n此看板目的在給各位個平台分享自由潛水的各項資訊\\n從潛水考證資訊、分享練習技巧、揪團潛水練功、分享照片影片,讓更多人看見自由潛水的快樂與自由。\\n\\n揪團潛水辦法更新中!\\n會盡快推出全新辦法!敬請期待~\\n⚠️發文請注意版規規範,以免遭檢舉。\\n板規\\n*禁止發表與本板主旨無關的內容\\n*發表言論請相互尊重、互相欣賞、請注意發言、禁止筆戰,經版主認定有筆戰行為,版主或留言進行勸導,若再犯將刪除留言並禁言數天。\\n*Dcard 禁止交換個人資料(分享單一主題類連結除外)\\n*可以使用貼文或留言分享學習心得與經驗~(包含潛店、教練名稱)但請勿涉及商業行為如公佈價錢。(善用揪團辦法:版規第9條)\\n*相同活動禁止三天內重複貼文(請先爬文以免文章遭刪除)\\n*黑特特別規定:黑特請將圖片、店名、教練名全數打碼,「不得隱射、使用諧音」以免觸法,違者經勸導並限期改善,未依規定執行者文章將被刪文。\\n*請勿留言惡意抹黑、不實指控、罵髒話、使用諧音罵髒話、濫用檢舉功能。\\n*揪團辦法請用匿名信箱或賴社群,勿留個資。', 'subscriptionCount': 24152, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:45:33.573Z', 'updatedAt': '2021-06-21T10:16:07.923Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '板規', 'postTitlePlaceholder': '#放呆 #提問 #分享 #影片 #圖', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['自由潛水'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/36fa8f2e-dcc9-4852-9cd3-247108cbc91d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bc03d06d-dc9c-4979-befa-81aadebf1e91/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 25}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd5753f93-72b7-4330-98d9-2535835e4527', 'alias': 'cycling', 'name': '單車', 'description': '歡迎分享各種單車相關事物、騎乘心得。\\n標題前面請分類:#資訊、#心得、#問題、#討論', 'subscriptionCount': 10222, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:45:43.573Z', 'updatedAt': '2020-09-09T23:57:14.913Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/107b8d3a-0a15-44d4-bfcc-1c583367a882/orig.jpeg', 'type': 'image/jpeg', 'width': 382, 'height': 128}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7750f26f-830c-4b20-ac2b-d183a1c6db7e/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 40}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'efdb5f14-6e4f-464f-88c8-6cce1f3bcc42', 'alias': 'sportsevents', 'name': '大型賽事', 'description': '本板提供國家選手參與國內外大型賽事討論,一起為國家代表隊加油!', 'subscriptionCount': 13160, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:45:53.573Z', 'updatedAt': '2021-04-20T08:36:53.343Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['世大運'], 'topics': ['亞運', '世大運'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0d455ca7-7eb6-43b0-ac67-ebc0076b0336/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e9db762e-d2f8-410b-8625-cca12de2da34/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 19}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6df2a719-120d-42ed-b02f-8ada90ff8c99', 'alias': 'disabled', 'name': '身心障礙', 'description': '歡迎您來到身心障礙專板!本板特為身心障礙人士所設,歡迎各障別之板友相互交流,互相照顧!\\n請各位發文時保持尊重及友善,請勿任意交換個資,及出現任何商業交易等可能觸法之行為,一同創造一個有愛無礙的看板環境!\\n(特別說明:本板無治療及問診功能,若自覺生理心理有急迫性的問題或病痛亟需解決,請立即前往醫療單位尋求協助。)', 'subscriptionCount': 6259, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:46:03.573Z', 'updatedAt': '2021-06-20T14:16:33.805Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文請保持友善並遵守板規規定。', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['身障', '輔具', '聽障', '心情', '視障', '肢障', '資訊', '憂鬱症', '精神疾病', '憂鬱', '心理'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5855eef5-91c0-4a08-830a-d87cb9f4adb9/orig.jpeg', 'type': 'image/jpeg', 'width': 626, 'height': 209}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b6c63b63-8739-4d54-937c-6a5a27eba3d1/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 26}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9a62803d-a808-4a80-ad8c-1a3f2098c7bd', 'alias': 'boy', 'name': '男孩', 'description': '專屬男孩的討論版,提供和男生有關的話題討論。也能在這裡匿名分享、抒發、詢問遇到的困擾,或是即將登入國軍Online的你也能在這裡找到同梯好兄弟!(*禁止張貼裸露性器官之圖文*)', 'subscriptionCount': 41019, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:46:13.573Z', 'updatedAt': '2021-04-20T08:36:51.230Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['表特', '軍旅', '心事'], 'topics': ['男生保養', '兵變', '新訓', '男用香水', '當兵', '替代役', '學長幫幫我', '撩妹', '戒尻', '國軍', '表特', '入伍', '鬍子', '長高', '痘痘'], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f9e9ccc4-59c2-456b-b31b-56a1b65805bd/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9107cce1-b677-4288-b0b1-c6b55569c428/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 197}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e600760f-46c5-40a6-96b4-4b201770464a', 'alias': 'military', 'name': '軍旅', 'description': '洞!三!夭!又你!拖拖拉拉慢吞吞的,你過年啊?整個三連就等你一個。三連!\\n阿你不是三連的喔,舉手答有啊?莫名其妙。奇怪了~頭髮剪掉智商也剪掉了是不是?', 'subscriptionCount': 29995, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:46:23.573Z', 'updatedAt': '2021-04-20T08:36:41.218Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['洞八', '官校生', '國家國家', '親愛精誠'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9f07190c-6524-41f0-9e18-819a4e4381f5/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c16db6a0-8338-435a-9d9b-da69a7df4077/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 305}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0e0bc27b-8c69-4149-b229-04e7a865a09d', 'alias': '3c', 'name': '3C', 'description': '文章標題分類類別:\\n\\n💫討論類型\\n#問題\\n#請益\\n#閒聊\\n#問機\\n#菜單健檢\\n#問卷\\n\\n💫資訊分享\\n#開箱\\n#情報\\n#分享\\n#心得\\n#新聞', 'subscriptionCount': 103364, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:46:33.573Z', 'updatedAt': '2021-04-20T08:36:41.354Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '標題記得加入「分類標籤」喲!# # #', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '電腦', '手機', '音響', '教學'], 'topics': ['WhatsOnMyPhone', '機械式鍵盤', '藍牙耳機', '手機桌布', '手機維修', '電腦維修', '電腦', '手機殼', '筆電', '鍵盤', '滑鼠', '顯卡', '行動電源', '輸入法', 'HDMI', '音響', '處理器', 'Windows', 'MacBook', 'Android', 'iOS', 'iPhone', 'iCloud', 'Siri', 'NFC', '充電器', 'COMPUTEX', 'WWDC', '充電線', 'USB', '路由器', '主機', '網速', 'iPad'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/dba602da-9b99-4474-8c85-d55129dd0074/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bd6cc5dc-d66d-415a-9734-9857db016d64/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2301}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '577f29b9-ef96-4e05-9e79-55e2a7d6e3db', 'alias': 'app', 'name': 'App', 'description': '歡迎來到APP 板,發文前請詳閱板規,\\n發文請於標題使用#做分類。\\n參考分類標題:\\n#分享 - #分享 2019年度好用軟體\\n#請益 - #請益 如何在Excel中插入甘特圖?\\n#討論 - #討論 簡報大家喜歡使用PowerPoint或是Keynote?\\n#求救 - #求救 電腦軟體閃退!!!', 'subscriptionCount': 50151, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:46:43.573Z', 'updatedAt': '2021-04-20T08:36:37.190Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文請從上方選擇標籤分類', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['分享', '教學'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c3004a85-f254-440f-9c26-19c003344b07/orig.jpeg', 'type': 'image/jpeg', 'width': 1248, 'height': 416}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/49635bfd-a02f-4b07-83bb-717da73e804a/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 280}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a1e20a52-c15e-42a2-980d-7401cfab4df7', 'alias': 'smart_home', 'name': '智慧家庭', 'description': '討論各式智慧家電,從小插座至連網微波爐,從單一設備的採購至整個家庭的自動化建構都可以討論及分享。', 'subscriptionCount': 17780, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:46:53.573Z', 'updatedAt': '2020-09-14T06:35:31.240Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '這個板是討論關於 “連網家電” 的專板呦,任何一般家電請改 Po 至居家生活板。', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/965d844b-575e-430a-99cc-3974ccfed564/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5f706af6-9921-455a-b902-f4ea4170b61b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '42bfdc63-8601-4e87-b53c-53d831caa2cf', 'alias': 'apple', 'name': 'Apple', 'description': '請務必看完版規再發文及討論\\n在此版發文及討論視同同意版規', 'subscriptionCount': 173547, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:47:03.573Z', 'updatedAt': '2021-04-20T08:36:40.086Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請務必看完版規再發文\\n發文視同同意版規', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['AppleLearn', 'AppleWork', 'Mac', 'iPad'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b65eca8f-45e4-4b3d-a236-4a2ca71374be/full.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9a259447-9344-41d7-8d99-c81ed7aed21a/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1303}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ef97f735-6a13-4094-8288-d3a69c63aaba', 'alias': 'av_equipment', 'name': '視聽設備', 'description': '耳機🎧設備、音響設備、電視設備....等等視聽設備\\n都可以分享和討論問題哦', 'subscriptionCount': 11248, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:47:13.573Z', 'updatedAt': '2021-07-04T17:19:52.712Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文標題請依照分類格式,格式如下: #開箱 #分享 #心得 #情報 #新聞 #優惠 #討論 #閒聊 #請益 #問卷 #知識 #活動', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['藍芽耳機', '耳機線', '喇叭', '電視'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b9990243-742a-43ab-8739-263f1ee548e8/full.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f2635822-08c0-44a2-aa15-2b414f9cf602/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 57}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3bceb81c-574e-4a96-8050-4df56096d0df', 'alias': 'money', 'name': '理財', 'description': '理財板提供分享各種省錢小撇步、信用卡經驗、虛擬貨幣、股票投資心得等,歡迎你和大家交流各種不錯的理財方式哦~', 'subscriptionCount': 298184, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:47:23.573Z', 'updatedAt': '2021-04-20T08:36:40.750Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['請益', '虛擬貨幣', '基金', '股票期貨', '保險', '匯率'], 'topics': ['信用卡', '基金', '股票期貨', '虛擬貨幣', '匯率', '儲蓄險', '保險', '比特幣', '投資'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ec51eba2-1c07-4f9b-ac14-f46edb57a49c/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c1ece9f4-ffb2-4758-aac6-4169c9ebaebb/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 873}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'de2af8b3-8fd5-4864-9ac1-4351e579821a', 'alias': 'creditcard', 'name': '信用卡', 'description': '最新信用卡資訊分享、討論 🙌\\n⚠️ 發文前請閱讀板規 ⚠️\\n選擇分類後就不用在標題再打一次了哦!', 'subscriptionCount': 97534, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:47:33.573Z', 'updatedAt': '2021-06-24T13:09:49.998Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': True, 'hasPostCategories': True, 'titlePlaceholder': '發文標題請分類:#詢問、#討論、#心得、#情報、#優惠', 'postTitlePlaceholder': '[標題]請記得選分類哦', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['核卡', '現金回饋', '哩程', 'VISA', 'JCB'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1b2926fb-99df-426d-9435-047f7a0af8d7/orig.jpeg', 'type': 'image/jpeg', 'width': 961, 'height': 320}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5fa76433-5cba-4475-b891-74ab7de4ce17/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 252}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '817d71bb-ebdf-4326-b8aa-10df4fcdf03a', 'alias': 'savemoney', 'name': '省錢', 'description': '歡迎大家交流各種優惠訊息與省錢方法討論。\\n發文前,記得把標題分類唷!', 'subscriptionCount': 186877, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:47:43.573Z', 'updatedAt': '2021-07-07T06:40:02.753Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前,先選擇↑標題分類', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['優惠', '已兌換', '買一送一', '生日優惠', '折價券', '折扣碼'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a5d488c2-9b05-4b1f-8348-221be4b2ef73/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f9adcb6a-ca82-4741-b457-7ab2118aa89f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 955}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'c6fcf4d2-144b-40f7-b6f5-da4452e1616d', 'alias': 'mobile_payment', 'name': '行動支付', 'description': '歡迎大家來行動支付板,希望大家可以善用此板資源,讓自己的生活更便利', 'subscriptionCount': 41293, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:47:53.573Z', 'updatedAt': '2021-07-12T04:51:40.834Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '⚠️記得選分類,選擇正確的分類⚠️', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['優惠', '行動支付', '閒聊', '分享'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/222011a5-ca21-41cc-9ca0-e17c26d59ee1/orig.jpeg', 'type': 'image/jpeg', 'width': 1646, 'height': 548}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/afd2aacc-9b7b-4b42-a119-42d12c2f6b9b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 19}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'e91644fb-5a42-42e0-9526-61b1e133559d', 'alias': 'blockchain', 'name': '區塊鏈', 'description': '歡迎大家討論鏈圈、礦圈、幣圈的各種問題。', 'subscriptionCount': 31274, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:48:03.573Z', 'updatedAt': '2021-04-20T08:36:47.042Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前記得選一下分類看看板規喔!', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['區塊鏈', '加密貨幣'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0b8beacf-51cb-49f9-9940-bbd2e1b98120/orig.jpeg', 'type': 'image/jpeg', 'width': 1797, 'height': 599}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/572bf286-5262-488d-99e8-b1f81a8260c3/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 120}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '5f5d5f53-584d-4871-b6f0-afe8c24ce37e', 'alias': 'financial', 'name': '金融', 'description': '歡迎大家討論有關金融時事,還有金融相關工作的地方,對於金融方面或者工作上的問題都可以在這個地方討論。', 'subscriptionCount': 69101, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:48:13.573Z', 'updatedAt': '2020-09-14T06:44:17.450Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b7d0ba85-c26c-42dd-ad91-1096ebd3bd66/orig.jpeg', 'type': 'image/jpeg', 'width': 380, 'height': 127}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d74335d5-44ee-4940-b88e-1ae0d0c43ac0/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 367}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'fc7574ad-e1df-43d2-bb06-8825c718c065', 'alias': 'trending', 'name': '時事', 'description': '時事板歡迎針對國內外議題、國家政策、即時新聞等討論,也可在此分享時事議題的社論。', 'subscriptionCount': 326227, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:48:23.573Z', 'updatedAt': '2021-05-22T07:01:38.199Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['新聞', '討論', '爆料', '社論'], 'topics': ['校正回歸'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b160637b-af6d-4105-9edf-49d58de3088b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4c8abb6f-513d-4d9c-ae20-a11f7642172a/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 4877}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6eeeafb2-9dac-4d81-ae4b-ffecf0ad4444', 'alias': 'job', 'name': '工作', 'description': '本板提供分享面試經驗、職場心得、打工或實習經驗等相關工作話題。(徵才的職務刊登前請務必詳細閱讀置頂文。)', 'subscriptionCount': 205870, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:48:33.573Z', 'updatedAt': '2021-04-20T08:36:37.261Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '徵才', '經驗分享', '職業介紹', '勞工權益'], 'topics': ['面試經驗', '2020聯合校徵', '面試心得', '面試小技巧', '履歷教學', '航空業', 'Askmeanything', '應屆畢業生', '員工餐', '工作經驗', '空服員', '面試', '慣老闆', '奧客', '勞工權益', '勞基法', '薪資', '履歷', '打工職缺', '實習職缺', '試用期', '離職', '徵才', '實習'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2c5eab50-c8b4-44c0-bf5e-1fb057a38199/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/78eda0aa-6451-4d23-961a-5071325485a8/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 4582}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b22d2f5d-3c06-4d08-96b8-770eb38e8f96', 'alias': 'nursing', 'name': '護理', 'description': '請選擇標題分類發文。並不要再新增多餘的#等記號,直接打標題及文章內容。\\n\\n提供給職場的各位一個詢問環境的地方,也提供給護生們提前準備爆肝生活的動力!\\n希望大家都能好好討論,發現不適合的文或回應也好好檢舉,讓護理版能夠維持良好風氣', 'subscriptionCount': 54965, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:48:43.573Z', 'updatedAt': '2021-04-20T08:36:39.064Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前,請選擇⬆️標題分類,否則刪文', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['實習', '薪資', '醫院宿舍', '單位', '臨床', '面試', '國考'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/60e6b385-a32b-4b58-8fde-e37aa917226b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d2c901a3-18f6-425c-aa51-a7e985ad9524/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 570}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6ea32129-c485-4321-9030-f7f45ae4cc2f', 'alias': 'med', 'name': '醫事人員', 'description': '歡迎大家交流與醫事人員有關的內容還有做專業討論~不論是在學學生或是在實習、工作的大家,都歡迎在白袍板發文。\\n\\n過渡期背景圖圖源<a href=\"http://www.freepik.com\">Designed by macrovector_official / Freepik</a>', 'subscriptionCount': 33997, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:48:53.573Z', 'updatedAt': '2021-07-10T11:51:26.739Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '去看板規,我打的很幸苦🥺', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['工作', '考試', '健保', 'Askmeanything', '活動', '武漢肺炎', 'COVID19', '疫情', '時事', '醫檢師', '醫事人員板', '醫學系', '醫院'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b1b91677-4a5c-484b-aa41-8addab4c3e94/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/03ba4d24-9af3-4c38-bdd4-095d4354d905/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 80}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a6dc1b6b-0aca-4004-86c8-3af6cdbfad23', 'alias': 'softwareengineer', 'name': '軟體工程師', 'description': '歡迎大家討論軟體開發路上遇到的各種坑,踩到的各種雷,以及想要砍 PM 的各種衝動', 'subscriptionCount': 35495, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:49:03.573Z', 'updatedAt': '2021-04-20T08:36:44.277Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['隕石開發', '碼農', 'k8s', 'tensorflow', 'python'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a693d555-63cd-4719-b242-098d95b687ff/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8415acf2-d286-4a5d-b045-7cb4db98516b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 151}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'db2f1fc3-a1b0-4246-9d2d-bdfda8d83fa7', 'alias': 'f2e', 'name': '前端工程師', 'description': '歡迎來到前端工程師板,在這裡大家可以愉快的討論前端技術、聊聊前端發展、問問前端問題。\\n另外優質內容、留言會置頂哦!', 'subscriptionCount': 16446, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:49:13.573Z', 'updatedAt': '2021-06-06T15:30:08.755Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/aa728719-c207-4bcf-bc75-a35be97ed060/orig.jpeg', 'type': 'image/jpeg', 'width': 535, 'height': 178}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7e543d78-054a-4ddb-9e65-5852dcfb73b8/orig.jpeg', 'type': 'image/jpeg', 'width': 174, 'height': 174}, 'postCount': {'last30Days': 22}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ab9d18b6-f990-410a-8cf8-4dbb34f084fa', 'alias': 'maker', 'name': '創客', 'description': '歡迎在此分享作品,討論技術,尋求協助。', 'subscriptionCount': 13478, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:49:23.573Z', 'updatedAt': '2020-09-14T06:34:26.249Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9dfc38dd-2c2e-4302-b27e-d7e718872c87/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/31a9ed5f-fc67-4909-b830-8f694d94b0dc/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 16}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '35594cbb-4236-4be9-b7d7-bf97d22667ae', 'alias': 'science', 'name': '科學', 'description': '科學板提供給一個科學討論的平台,科學包含形式科學、自然科學與社會科學,歡迎大家分享出自己的科學報告、問題或經驗。', 'subscriptionCount': 16777, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:49:33.573Z', 'updatedAt': '2020-09-14T06:41:57.176Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/50d2a5d4-c0e5-4596-a81e-0c19d2d5b1a7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 599}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6dbcc77c-f36b-4b45-bba9-f238fa91bc29/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 19}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ee51d6cd-ad7e-4d73-b79a-3ce32ea7798b', 'alias': 'japan_life', 'name': '日本生活', 'description': '🇯🇵歡迎分享各種日本留學、工作、打工度假等等旅日生活資訊及心得。\\n也歡迎準備前往日本生活的人討論各種疑難雜症🇯🇵', 'subscriptionCount': 44063, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:49:43.573Z', 'updatedAt': '2020-09-14T06:35:05.028Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d73d8240-b218-4280-9c8d-396d15527ec4/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7b598fec-d25e-41ea-9d7c-fc67706ee1a0/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 61}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '53ad45b4-dcef-479a-8c3e-a9e97527b2a4', 'alias': 'parttime', 'name': '打工職缺', 'description': 'Dcard 提供各公司企業於打工職缺板張貼相關工作資訊,請務必依照格式填寫發文表單,經小天使審核後會張貼在看板上。\\n*若要討論職場心得、面試經驗等可至「工作板」哦!*', 'subscriptionCount': 76817, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:49:53.573Z', 'updatedAt': '2020-05-20T10:27:33.077Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '如需刊登職缺請至「置頂文」尋找表格填寫!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/50b09154-db6d-4b4e-9fce-385b502e4bcf/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1fb8d856-3d2a-4d16-8aff-dc7e08a4bb14/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 76}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '45a2cade-341a-4d5d-a1d8-b1cf82f77277', 'alias': 'intern', 'name': '實習職缺', 'description': 'Dcard 提供各公司企業於實習職缺板張貼相關工作資訊,請務必依照格式填寫發文表單,經小天使審核後會張貼在看板上。\\n*若要討論職場心得、面試經驗等可至「工作板」哦!*', 'subscriptionCount': 44690, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:50:03.573Z', 'updatedAt': '2020-05-20T10:27:07.469Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '如需刊登職缺請至「置頂文」尋找表格填寫!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e20a6c75-7dc6-4dce-a170-d3fe57389007/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/22029efe-e5c9-4f7d-b2c4-9850f0406943/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 25}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6bec54a2-7dfd-46b5-8752-7781b3f91efe', 'alias': 'studyabroad', 'name': '留學', 'description': '本板提供各種留學申請資訊、留學經驗、國外求學生活分享。', 'subscriptionCount': 79698, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:50:13.573Z', 'updatedAt': '2021-04-20T08:36:38.635Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '申請', '生活', '考試', '打工度假'], 'topics': ['留學生活', '簽證', '語言學校', '獎學金', '交換學生', '遊學', 'GPA', '留學申請', '留學考試', '德國留學', '音樂學院', '美國留學', '社區大學', '美國大學', '韓國留學', '日本留學', '英國留學', '英國大學', '韓國大學', '綠卡', '留學生'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2724eb86-52da-48a2-9a61-2645fa7e381e/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e7b01d04-4b7f-4821-a206-e4791b38d4bc/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 623}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '02d48166-87df-474a-beb3-cc376bfd560e', 'alias': 'korea_study', 'name': '韓國留學', 'description': '歡迎大家來板上分享在韓留學、生活、打工、旅遊 等資訊及心得👐🏻', 'subscriptionCount': 24175, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:50:23.573Z', 'updatedAt': '2021-04-20T08:36:51.039Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '標題前面請分類:#心得、#問題、#資訊', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['韓國留學', '韓國', '交換學生', '留學生', '留學生活', '打工', '語學堂'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c4238964-4918-4df9-afbc-b579ce0dd489/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b3bf8041-7152-48a5-b3a6-b69d4edf6ffb/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 71}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2bb20844-07ec-41d3-830f-ae551fc31246', 'alias': 'course', 'name': '課程', 'description': '本板提供討論、分享各類課程評價與心得 。', 'subscriptionCount': 25077, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:50:33.573Z', 'updatedAt': '2021-04-20T08:36:49.312Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['研究所', '補習班', '微積分', '程式語言', '通識', '選修', '必修'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/aee12037-f4ac-43e5-be6c-4373a195c4a0/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0e7914f1-11ed-4624-a5e3-01683b6514e9/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 285}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '58dba906-09c2-4808-9206-11c008c9ef16', 'alias': 'graduate_school', 'name': '研究所', 'description': '歡迎交流研究生學習歷程、研究所入學考試、推甄題目、解答、正備取錄取分數、考取研究所心得及系所介紹討論等等...。\\n\\n請勿發表易起爭論或貶低校系所之言論。', 'subscriptionCount': 64865, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:50:43.573Z', 'updatedAt': '2021-05-05T12:12:26.758Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '研究所板規', 'postTitlePlaceholder': '注意!心得文若提及補習班或補教老師名稱、姓氏、綽號、暱稱、大O、X碩等名稱形式會被刪除文章,堅持以該形式發文請至考試版或課程版等官方版面。', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c0fde9d4-d29d-44d9-9ae5-53b2bdc039b1/orig.jpeg', 'type': 'image/jpeg', 'width': 828, 'height': 276}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/baf9f299-234e-4273-bff4-76dffcbc573a/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 865}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd43f33f9-873f-46b3-9f24-38c90056b96c', 'alias': 'exam', 'name': '考試', 'description': '本板提供討論、分享各種考試心得,也歡迎提供你的考試經驗幫助更多人應考準備哦!', 'subscriptionCount': 124442, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:50:53.573Z', 'updatedAt': '2021-04-20T08:36:36.306Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['轉學考', '國考', '重考', '考古題'], 'topics': ['登記分發', '指考', '重考生', '轉學考', '國考', '考古題', '參考書', '普考', '特考', '研究所', '夜校', '夜間部', '推甄', '學測', '統測', '指考', '成績單', '正取', '繁星', '申請', '補習班', '獨招'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/09b44f55-3bbd-46af-9655-8d5466bbf196/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1277f351-cda6-4197-a238-bfce47f34dc2/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2244}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '638dede9-28ab-4288-9552-8345e4dbf9a8', 'alias': 'accounting', 'name': '會計', 'description': '歡迎交流有關會計的任何事物', 'subscriptionCount': 19371, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:51:03.573Z', 'updatedAt': '2021-02-15T08:31:07.192Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '更新Dcard至最新版,即享有發文提示選單卡!', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5bacc3c9-e202-453f-9d89-ce3ff5f16639/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ec8a9ecb-d9d3-45a1-bd32-c863d47ecdb6/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 189}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1d909998-c33f-4556-8685-63e507cdc853', 'alias': 'gov_employee', 'name': '公職', 'description': '', 'subscriptionCount': 55271, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:51:13.573Z', 'updatedAt': '2020-12-12T18:14:05.702Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '☝🏻標題要分類(上面有選單)☝🏻', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8ebf0767-35ce-494a-86cd-b71f30b7ab48/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b264f084-8658-4b0e-969a-e9916d2877a9/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 392}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'ecc6a486-c4ac-4f76-996d-7dda5dcb8518', 'alias': 'language', 'name': '語言', 'description': '語言板提供討論、詢問、交換、分享各國語言的學習經驗或語言學研究。', 'subscriptionCount': 100965, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:51:23.573Z', 'updatedAt': '2021-04-20T08:36:44.477Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': True, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['精選', '英語', '日語', '韓語', '歐語'], 'topics': ['日檢', '英語', '日語', '韓語', '翻譯', '歐語', '越南語', '動詞', '多益', '雅思', '托福', '日檢', 'TOPIK', '寫作', '字幕', '俄文', '漢字', '字典', '語言學', '第二外語', '德文', '檢定', '自學', '教材'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b9e300c1-7d8c-402e-9c81-c7cee6870f38/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/92c6f35e-0158-461a-9e61-0d587cd6d486/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 627}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e0a290f4-752b-48a2-a791-f211f8b46588', 'alias': 'book', 'name': '書籍', 'description': '書籍板提供好書推薦、書評、書展資訊或尋找各種國內外書籍。', 'subscriptionCount': 64603, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:51:33.573Z', 'updatedAt': '2021-04-20T08:36:48.512Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': ['精選', '教科書', '小說'], 'topics': ['言情小說', '書店', '二手書', '恐怖小說', '小說', '科幻小說', '散文', '書單', '推理小說', '作家', '東野圭吾', '奇幻小說', '穿越小說', '金庸', '原創小說', '輕小說', '繪本', '原文書', '武俠小說'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/55bd5211-cf04-48bd-9629-03c24b3142d7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a7bdc4c0-574e-4a91-a2a6-26c8897d3337/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 141}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a36c1acc-2417-4c75-924f-4079bb5b8e79', 'alias': 'novel', 'name': '小說', 'description': '歡迎來到小說版,這裡可以討論一切與小說相關之話題。', 'subscriptionCount': 68752, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:51:43.573Z', 'updatedAt': '2021-06-29T10:20:44.453Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '文章未分類會被禁言一天哦!', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['言情小說', '尋書', '愛情', '原創小說', '原創', '言情', '創作', 'BL小說', '找書'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4cf0129c-51b3-4d35-8579-6c16aa2c61d2/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 599}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2e8583fb-6fdc-4f08-a3d9-f2a637b0c993/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 447}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f8acd3e6-17e9-4d9d-bc7c-bf24d94c2ad1', 'alias': 'literature', 'name': '詩文', 'description': '詩文板提供分享討論新詩、散文、小說等文章,也歡迎各種詩詞文章創作在此發表。', 'subscriptionCount': 26769, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:51:53.573Z', 'updatedAt': '2021-04-20T08:36:48.196Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文記得加入「話題」分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['新詩', '散文', '小說', '詞創'], 'topics': ['新詩', '散文', '小說', '詞創'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b5dcb264-1714-431c-8ac1-609cd276096d/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/693c0654-f988-479e-9750-ff5e52bf99ab/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 323}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a91cd696-aaa9-4e11-a216-e7d37f898ec7', 'alias': 'taichung', 'name': '台中', 'description': '歡迎分享屬於你(妳)的台中故事,生活、美食、工作等等,都是你可以分享的內容。', 'subscriptionCount': 55141, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:52:03.573Z', 'updatedAt': '2021-07-12T04:47:42.953Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a4c95571-905e-41e3-8dad-b7b9514ffd5c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d29a7540-9895-45c4-b77a-9e330b9aeee6/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 205}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'f70adbe9-4a58-4dd6-b6a0-7e5e06e2afce', 'alias': 'kaohsiung', 'name': '高雄', 'description': '*****發文前請先詳細閱讀版規*****\\n\\n只要你是關心高雄、熱愛高雄的朋友們,都歡迎分享屬於你自己的高雄故事,無論是推薦的地方美食、分享資訊、協尋都可以!', 'subscriptionCount': 29599, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:52:13.573Z', 'updatedAt': '2021-07-12T04:47:36.701Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '標題記得打上文章分類,還麻煩看一下版規避免文章被刪除呦', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['高雄美食', '高雄旅遊'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/029c483b-627d-4c99-995e-7e84c36b9c7c/orig.jpeg', 'type': 'image/jpeg', 'width': 800, 'height': 267}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/626f7395-807f-4d3c-988c-8da57829e347/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 60}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'e33fd5d2-025f-46e4-8b81-3f6fecdd25e1', 'alias': 'whysoserious', 'name': '廢文', 'description': '人生苦短,why so serious', 'subscriptionCount': 33953, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:52:23.573Z', 'updatedAt': '2021-07-12T05:12:16.532Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 2, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/01e5491d-63ea-4253-a0a4-0dfad39d93c6/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/eb5e4c45-13f9-4ad1-b81b-695e7fd6acfc/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 41246}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3a7bac6f-f2cb-471d-b4fd-17bd26c9249a', 'alias': 'disaster', 'name': '災害回報', 'description': '本板提供即時災害回報,或各地物資需求分享的專區。', 'subscriptionCount': 9475, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:52:33.573Z', 'updatedAt': '2020-08-13T06:00:04.109Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/dbcb6b7d-6c36-4795-92c8-d7af210cd414/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d02a84e8-1f43-49bc-8638-9252f792f3ba/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 6}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0c7337db-20ae-43d8-a310-9b22d6ba5f9d', 'alias': 'dcard', 'name': '建議回饋', 'description': '建議回饋板主要提供 Dcard 產品使用上的問題或功能建議討論。\\n若對文章有疑慮請善用檢舉功能或私訊 Dcard 客服。', 'subscriptionCount': 13936, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:52:43.573Z', 'updatedAt': '2021-04-20T08:36:56.329Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['小天使公告', 'Dcard新功能', '開板連署文'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/46921746-51a7-42af-961e-f4ee3b412866/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7ce06c3e-c268-40de-a6af-9fde37bf577c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 55}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1ce3ebca-8701-42d5-b14c-076fc629bc8e', 'alias': 'sex', 'name': '西斯', 'description': '西斯板(Sex)提供男女私密話題分享或性教育等情慾議題討論,若有性方面相關問題也可在此發問。(發文前請詳閱板規。)', 'subscriptionCount': 609171, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:52:53.573Z', 'updatedAt': '2021-06-24T04:50:12.522Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '發文請記得在下一頁加入話題或其他相關分類喲!', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': ['創作', '知識', '圖文'], 'topics': ['A片', '甲', 'Les', '無碼片', 'NTR', '內射', '自慰', '3P', '外流', '意淫自拍OL黑絲', '玩具大賞', '情趣用品', '大雞雞', '保險套', '約炮', 'H漫', '調教', '潤滑液', '做愛'], 'nsfw': True, 'mediaThreshold': {'RACY': 1, 'ADULT': 1, 'VIOLENCE': 1}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/50a4b6e7-fe05-493c-af56-ac63a1a73259/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/24f4f7ef-c308-4c37-80dc-349fd47cf71a/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 5659}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '546b4c64-c37f-4907-adcb-cf37bda4ce8f', 'alias': 'sex_literature', 'name': '西斯文學', 'description': '透過文字,感受性與愛的美好。\\n\\n*希望這裡可以成為喜歡寫文章及看文章的人們自由的天地(*¯︶¯*)', 'subscriptionCount': 121095, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:53:03.573Z', 'updatedAt': '2021-04-20T08:36:42.589Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['成人', '色情文學', '情色文學', '西斯文學'], 'nsfw': True, 'mediaThreshold': {'RACY': 1, 'ADULT': 1, 'VIOLENCE': 1}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2e74c4f8-9f18-4174-ae65-020c131b180f/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c35188c6-11f8-4ba4-b814-4055f87f8201/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 184}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e49886b8-f081-42a2-8f7b-a5195bb1ac2b', 'alias': 'gaysex', 'name': '男同志西斯', 'description': '👨\\u200d❤️\\u200d💋\\u200d👨 屬於男同志的小天地。\\n因蓋樓文章過於空泛,請避免於本板蓋樓唷!\\n圖片或影片縮圖記得不能出現生殖器呀\\U0001f9cf🏻\\u200d♂️\\n請用【【【實心色塊】】】遮蔽或縮短網址處理👨🏻\\u200d✈️', 'subscriptionCount': 79059, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:53:13.573Z', 'updatedAt': '2021-04-20T08:36:35.513Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '👆🏻 選完標題分類後,在此直接輸入標題即可,不必再輸入 # 囉 !', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['找片', '野裸', '控射', 'BDSM', '熊', '多人', '鞋襪', '情趣', '鮮肉', '大叔', '腿控', '喜好', '正太', '性生活'], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a75cf7f1-5327-4ba0-bfe6-9707f87d825e/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c89df378-b546-4481-b9d8-9e5cff2c4a81/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 514}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3443ed73-6de8-4fea-87e8-176bbd8a2506', 'alias': 'ero_manga', 'name': '色情漫畫', 'description': '請在第一個 #後面說明發文的意圖 (ex: #請益, #分享, #討論\\n第二個 #後面加上題材類型 (ex: #獵奇, #ntr, #觸手, #鬼畜\\n但是我發現沒有人理我\\nQwQ', 'subscriptionCount': 103594, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:53:23.573Z', 'updatedAt': '2021-07-15T12:01:01.087Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['純愛', 'NTR'], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8daa93d4-4191-4e31-a512-34c50d335150/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/68c1e801-427c-4892-b798-51050bb19913/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 108}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f9355cbd-cc87-44e6-9ab9-d261522c63c4', 'alias': 'bdsm', 'name': 'BDSM', 'description': '歡迎大家交流各種 BDSM 資訊與個人心得。', 'subscriptionCount': 70668, 'subscribed': False, 'read': False, 'createdAt': '2020-02-04T07:53:33.573Z', 'updatedAt': '2021-04-20T08:36:41.423Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '標題請加上 分類#', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['置頂公告'], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/43d40d71-6fb2-476e-8d95-df460a52f1ef/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2b1cc3ac-a5a8-4f0a-8984-988a333584f2/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 156}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '05961595-fe4a-4843-9b54-4ec22951aa3f', 'alias': 'life_in_europe', 'name': '歐洲生活', 'description': '各種在歐洲生活大小事的分享,例如職場、旅遊、文化、日常生活等等\\n歡迎在歐洲、或是對歐洲有興趣的朋友,都能透過 #歐洲生活板 有更多交流哦!', 'subscriptionCount': 20000, 'subscribed': False, 'read': False, 'createdAt': '2020-02-24T06:40:28.102Z', 'updatedAt': '2021-04-20T08:36:50.222Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['歐洲', '北歐', '德國', '荷蘭', '法國', '歐洲生活', '歐洲旅遊', '留學', '求職', '海外工作'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/faf0c3ef-6ee7-4d02-8be1-17fde5726018/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8a6bdd28-91a2-43f6-a6f8-91d7cffba77f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 18}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '771433b1-f9d6-4a70-a9f8-1a8540bfe1b0', 'alias': 'test_hk', 'name': '測試香港看板', 'description': '', 'subscriptionCount': 109, 'subscribed': False, 'read': False, 'createdAt': '2020-02-25T10:01:27.581Z', 'updatedAt': '2020-02-26T09:11:36.709Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': ['HK']}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9320ddc4-9f23-40ad-970a-f43906bb947b', 'alias': 'hksponsored', 'name': 'HK 贊助活動', 'description': 'Dcard HK 官方提供各項優惠資訊的看板', 'subscriptionCount': 110, 'subscribed': False, 'read': False, 'createdAt': '2020-03-05T04:28:23.785Z', 'updatedAt': '2020-09-16T07:17:36.151Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': ['HK']}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '29cf0f9b-f04e-44bb-980d-67baedeb4b56', 'alias': 'dcardaddemo', 'name': '廣告 Demo', 'description': '', 'subscriptionCount': 2, 'subscribed': False, 'read': False, 'createdAt': '2020-03-09T04:40:44.327Z', 'updatedAt': '2021-07-21T08:50:28.127Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '832896a0-21e3-4bb0-b515-5e1ed7238ed4', 'alias': 'beauty', 'name': '表特', 'description': '欣賞美的人事物是人的天性,歡迎於此板分享美女帥哥的情報。進板請務必先閱讀板規。', 'subscriptionCount': 95618, 'subscribed': False, 'read': False, 'createdAt': '2020-03-10T03:44:54.104Z', 'updatedAt': '2020-05-20T10:26:14.361Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3f193554-bffa-4a05-aef4-187f350f9e66/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b54fe120-9cf1-4b57-b588-4f94def8e3da/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 179}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '521f686c-a54c-4217-9577-9a4e13f77f49', 'alias': 'latenightsnack', 'name': '宵夜文', 'description': '單純就想害別人餓,沒有別的😈', 'subscriptionCount': 16348, 'subscribed': False, 'read': False, 'createdAt': '2020-03-24T03:23:09.812Z', 'updatedAt': '2020-04-09T04:35:00.813Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['image', 'video'], 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1d914fb0-5579-498f-b9e6-3137bce94997/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 6}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '1dcd2a25-2ed8-4353-b594-711d14cf6121', 'alias': 'toofunny', 'name': '不能只有我看到', 'description': '不好笑不要發在這裡', 'subscriptionCount': 1361, 'subscribed': False, 'read': False, 'createdAt': '2020-03-24T03:23:42.682Z', 'updatedAt': '2020-04-23T04:00:43.066Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['image', 'video'], 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f6a0925b-a175-413e-beb3-9e8c5ef575ee/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '932897c6-3470-4053-993a-86aa390b9c6b', 'alias': 'showdogs', 'name': '曬狗', 'description': '你家的狗曬起來!', 'subscriptionCount': 35710, 'subscribed': False, 'read': False, 'createdAt': '2020-03-24T03:24:26.856Z', 'updatedAt': '2020-05-20T10:27:42.648Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a98c56b9-c92b-45de-9fa7-c98de734fede/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e9197f8e-f922-4518-8e2b-5a66b11533de/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 98}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'cbf56d6a-2fa9-4c63-b646-47b27abf1c02', 'alias': 'animal_crossing', 'name': '動物森友會', 'description': '動物森友會板提供板友們討論攻略、情報分享、無人島生活心得跟詢問集合吧!動物森友會之相關問題!', 'subscriptionCount': 45033, 'subscribed': False, 'read': False, 'createdAt': '2020-03-31T03:24:35.412Z', 'updatedAt': '2021-07-20T06:35:20.621Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '可以討論攻略、情報分享、無人島生活心得跟詢問集合吧!動物森友會之相關問題!希望大家發文前注意板規以及站規,也希望大家可以注意小心用詞,要跟其他同學友好相處哦!', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['動物森友會', '島民', 'switch', '任天堂', '動森', '動物之森'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f7386c40-64a5-43ba-b48c-72e1cfc3976e/orig.jpeg', 'type': 'image/jpeg', 'width': 1230, 'height': 410}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5bc9d956-0bbe-42bc-ab29-f01eec167f34/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3118}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '9d18b308-26f5-4239-960b-df60f81175c7', 'alias': 'hkjob', 'name': '港澳工作', 'description': '呢度係比香港澳門嘅同學仔討論同港澳有關既工作議題、分享打工資訊嘅討論區,發文留言前請先閱讀板規', 'subscriptionCount': 2651, 'subscribed': False, 'read': False, 'createdAt': '2020-03-31T05:57:36.148Z', 'updatedAt': '2021-07-12T04:51:44.687Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['社畜', '翻工', 'Intern', '搵工', '見工'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': ['HK', 'MO'], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d7d746e8-a57d-4d07-93fa-ab9f9d917e27/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4a1b4be9-d712-44d8-82fe-ff8519f0afda/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 27}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '61f1f8d3-0d15-4f5c-81ce-f6b361c7242e', 'alias': 'catering', 'name': '餐飲業', 'description': '歡迎交流包含餐飲職缺、心得、發問,以及同業討論類型的文章。😝\\n\\n請多多在這餐飲業板發文\\n大家一起努力維持良好風氣💪🏻', 'subscriptionCount': 13368, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:47:48.890Z', 'updatedAt': '2021-04-20T08:36:54.955Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '👉🏻 標題 👈🏻 🙈', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['餐飲', '奧客', '心得', '面試', '討論'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/012b19b7-86d6-4e9d-82f3-5de12f5cfef0/orig.jpeg', 'type': 'image/jpeg', 'width': 1181, 'height': 393}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2e447be1-bca9-4ea8-99be-e52264b11d25/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 23}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '960b42f9-9dc3-483e-829f-466be52d2906', 'alias': 'tech_job', 'name': '科技業', 'description': '科技業版', 'subscriptionCount': 42149, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:48:11.419Z', 'updatedAt': '2021-04-05T12:06:28.632Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5856e177-e78a-44d1-9a1c-f53be1a7db74/orig.jpeg', 'type': 'image/jpeg', 'width': 512, 'height': 171}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c4bb5181-2f3c-4c5e-8006-c8bea5a69e99/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 450}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9a9e6d52-2192-4c1c-a8f0-3104700c3309', 'alias': 'teacher', 'name': '教師', 'description': '「教育無他,唯愛與榜樣。」\\n\\n願我們能一起為台灣的教育帶來更美好的時光', 'subscriptionCount': 17970, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:49:42.835Z', 'updatedAt': '2021-04-20T08:36:54.624Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['教師', '教檢'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b74dd1aa-3ac3-4ada-8f7a-a462179c1dc1/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5850794e-ccb0-416e-bb3c-50bcf85a4105/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 256}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c73d4eb9-bf99-4cfd-a874-bf2d0c1b527c', 'alias': 'service', 'name': '服務業', 'description': '歡迎來到服務業版,不管是職場老手想分享自身經驗,還是新手想詢問的不用客氣,但回覆也注意禮貌和平相處遵照版規唷:)', 'subscriptionCount': 8218, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:50:17.769Z', 'updatedAt': '2021-04-26T11:55:44.080Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文記得選擇標題分類唷,有需要新增的標題也歡迎於頂置文章告訴我', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['靠北老闆', '靠北奧客'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/eb68ab89-e0cb-4408-bd63-f409d4576011/orig.jpeg', 'type': 'image/jpeg', 'width': 1640, 'height': 546}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6f49b2d2-a4aa-4087-954b-ce4bcf4b1ab5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a5837b3c-088c-45a0-b537-28f91ce83c63', 'alias': 'oversea_job', 'name': '海外工作', 'description': 'Hello, 到世界各地挑戰也是你的夢想嗎?你是已經過關斬將成功踏上世界舞台的前輩嗎?一起來分享這份寶貴的經驗吧!', 'subscriptionCount': 28566, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:51:07.457Z', 'updatedAt': '2021-04-20T08:36:42.168Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '徵才張貼統一限制文章格式如下:\\n\\n【公司名稱】\\n\\n【工作職缺】\\n\\n【工作內容】\\n\\n【工作地點】請填工作詳細地址,或註明在家工作\\n\\n【工作時間】請註明上下班時間、有無午休時間、休假與加班,若是排班制,也請註明每個班別的時段\\n\\n【徵求條件】禁止違反就業服務法及性別工作平等法等台灣現行法律,嚴禁特定性別與年齡\\n\\n【加分條件】禁止違反就業服務法及性別工作平等法等台灣現行法律,嚴禁特定性別與年齡\\n\\n【薪資待遇】請註明薪資範圍,不得低於台灣現行基本工資\\n\\n【公司福利】\\n\\n【聯絡方式】請填寫聯絡人、聯絡資訊,聯絡方式不得只有通訊軟體或社群網站帳號\\n\\n【其他備註】請註明消息來源:本人設立之公司招募/本人所屬公司招募/純屬代po,個人並不屬於此公司', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['澳洲', '英國', '美國', '歐洲', '海外工作', '菲律賓'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7682d2fa-06a1-4582-9fc9-ce15e19b9bb4/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e856449a-bcc7-4ba2-8179-eeff3e7fdb8e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 44}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '61fc48f0-84f5-4117-8016-01b16f702791', 'alias': 'job_search', 'name': '求職', 'description': '歡迎於本板討論各種求職相關話題。\\n無論是求職經驗分享、甘苦談、求職評價都可以在這裡討論喔~', 'subscriptionCount': 39003, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:51:34.863Z', 'updatedAt': '2021-04-20T08:36:42.491Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '【公司名稱】\\n\\n【工作職缺】\\n\\n【工作內容】\\n\\n【工作地點】請填工作詳細地址,或註明在家工作\\n\\n【工作時間】請註明上下班時間、有無午休時間、休假與加班,若是排班制,也請註明每個班別的時段\\n\\n【徵求條件】禁止違反就業服務法及性別工作平等法等台灣現行法律,嚴禁特定性別與年齡\\n\\n【加分條件】禁止違反就業服務法及性別工作平等法等台灣現行法律,嚴禁特定性別與年齡\\n\\n【薪資待遇】請註明薪資範圍,不得低於台灣現行基本工資\\n\\n【公司福利】\\n\\n【聯絡方式】請填寫聯絡人、聯絡資訊,聯絡方式不得只有通訊軟體或社群網站帳號\\n\\n【其他備註】', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['面試經驗', '2020聯合校徵'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d9732b64-419e-485a-bf2e-7d71457e7418/orig.jpeg', 'type': 'image/jpeg', 'width': 240, 'height': 80}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/caf64188-7b8d-409d-bded-c809226a192c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 232}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'fc663236-e2df-4e06-bee0-430188307dad', 'alias': 'police', 'name': '警察', 'description': '', 'subscriptionCount': 16677, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:51:58.957Z', 'updatedAt': '2021-03-11T22:39:31.077Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '要好好使用發文標題\\n不然出門會遇到天竺鼠車車', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fa9a4db5-657c-44a2-b551-a534127ca7a6/orig.jpeg', 'type': 'image/jpeg', 'width': 1080, 'height': 360}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/57169ea1-2768-4490-8941-d6a880902304/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 87}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '212db32e-b00b-4bce-8702-00958b2e6218', 'alias': 'insurance', 'name': '保險業', 'description': '由於保險業一直常被社會大眾所詬病,但以實質效益上而言這項金融商品卻是對許多人都有相當大的幫助,雖說可能會有人認為可能他這輩子都不會用到保險,那為何要多花錢呢?但...你真的確定你永遠都不會遭遇不測嗎?因此希望在板上的大家都可以以良性的言論去做任何有關於保單或是整體保險業的討論,讓比較不熟悉的人可以有良善的討論空間獲知保險相關資訊。\\n\\n身為是動物園大學風管系的學生,會希望大家可以討論更多有關於整體保險業的發展,可以從會計、財務又亦或是總體經濟的方面去做討論,以期可以讓大家都可以增進更多有關於金融市場相關資訊。', 'subscriptionCount': 9338, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:54:56.992Z', 'updatedAt': '2020-09-14T06:45:48.828Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b24192b5-7e19-4a26-a84e-c5e8ce8f5860/orig.jpeg', 'type': 'image/jpeg', 'width': 1200, 'height': 400}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/88ff85ba-32b5-49a9-8b6d-2461601aea15/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 92}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e8266f3a-34f3-45a9-a252-889d7f4a5ff5', 'alias': 'gov_owned', 'name': '國營', 'description': '🙆🏻\\u200d♀️幫我看一下版規,再行發文喔🙆🏻\\u200d♀️\\n💁🏻\\u200d♀️國營甘苦談💕理性討論 友善發言💁🏻\\u200d♀️\\n🙇🏻\\u200d♀️版主現職國營小員工,服務不周請見諒🙇🏻\\u200d♀️', 'subscriptionCount': 19435, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:55:41.006Z', 'updatedAt': '2021-04-20T08:36:41.147Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '👆🏻先選好「標題分類」,即可直接下標題囉!👆🏻', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['國營考試'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b4155c5e-19e5-4c9f-bb1e-9e2b4c8221ed/orig.jpeg', 'type': 'image/jpeg', 'width': 1024, 'height': 341}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9da89df3-fd1e-49c3-aa57-9e432acc38a9/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 101}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4fb1cbbf-57e3-46d0-9242-eb99fc0beea8', 'alias': 'startup', 'name': '創業', 'description': '任何關於創業的話題都可以在這邊發表、提問!', 'subscriptionCount': 35900, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:56:27.713Z', 'updatedAt': '2021-07-12T04:47:20.621Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請於上方⬆️選擇標題分類', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['創業歷程', '創業', '創業分享', '投資'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/73787ba3-c47a-4ae7-b0f5-ef0bf6cd318a/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 599}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9be6709a-cf3c-4b26-bc2b-ebd46311b1c5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 107}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '8e09d062-401c-4d91-a700-d4d93cd134e1', 'alias': 'I_I_IX', 'name': '消防', 'description': '臨火無懼,一生懸命', 'subscriptionCount': 5611, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:56:57.620Z', 'updatedAt': '2021-07-15T09:42:39.508Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文請記得選分類↑,不然板主晚上睡覺會抱著枕頭哭இдஇ', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/af9911f6-3ec9-41bf-a7b1-866a1f535ed6/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/75671d6c-2e1a-4a2a-860c-87ba7763b13d/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 20}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '834c3634-e023-4189-a2fd-3c2cc47133e4', 'alias': 'free_lancer', 'name': '自由工作者', 'description': '歡迎大家一起討論在接案或日常中遇到的狀況或問題,或是分享好用工具或最新消息。期待大家都能守好版規內容,讓我們共同創造維持良好的討論環境。', 'subscriptionCount': 15693, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:57:31.723Z', 'updatedAt': '2020-09-15T08:34:37.500Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2d197bbd-0179-4a01-8900-7dafb004a972/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b16d44c0-557b-4e00-b734-6407608ba50e/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 24}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b62a4ac6-3e5f-4046-b9ee-dfde760cc06d', 'alias': 'lawyer', 'name': '法律人', 'description': '本板強制標題分類,發文前請詳閱板規及置頂公告,有關文章分類之規定,目前新板規已經施行,若有違規一律刪文。\\n(尤其注意本板目前已禁止生活法律之諮詢文,詳見板規及置頂公告)\\n歡迎法律從業人員、相關系所學生與 Dcard 卡友在此交流,\\r\\n希望除了討論案例外,能成為法律人在 Dcard 站上的小天地 :)\\r\\n討論主題包含但不限於實務案件、國家考試、學術、職涯規劃。', 'subscriptionCount': 20513, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T05:58:32.228Z', 'updatedAt': '2021-06-12T15:03:06.136Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文請注意👿👿👿\\n請選擇發文標題分類,分類如下:(未分類可檢舉刪文)\\n\\n(1)#討論:交流對案例或與法律人相關之議題等看法。\\n\\n(2)#分享:法律書籍、上榜經驗、工作經驗等與法律人相關經驗之分享。\\n\\n(3)#請益:詢問各類與法律相關之考試不懂的問題、職業上的問題、教科書選擇的問題等與法律人增強其專業能力可能須瞭解之問題。\\n\\n(4)#其他:明顯上述各個分類都無法相容而與法律人有關之發文內容。\\n\\n請養成良好習慣,以免日後違反板規而被刪文!!', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['民法', '刑法', '憲法', '行政法', '國考'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/65676ee9-8cab-45c5-9c59-0aba43721c66/orig.jpeg', 'type': 'image/jpeg', 'width': 840, 'height': 279}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6280dbbb-2875-49dc-9eb3-b5e48e034899/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 181}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '77956722-a5a4-4640-8ebd-93797ae15f44', 'alias': 'realtor', 'name': '房仲業', 'description': '發文前記得看過板規唷(=゚ω゚)ノ', 'subscriptionCount': 8003, 'subscribed': False, 'read': False, 'createdAt': '2020-04-20T06:00:11.736Z', 'updatedAt': '2021-05-31T22:11:10.640Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '#買賣 #租賃 #新聞 #閒聊 #心得 #資訊 #求助 #請益', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5585261c-774e-41fe-87f0-07e02957ece2/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/525d623f-b62f-470f-a537-fad05175e0b9/orig.jpeg', 'type': 'image/jpeg', 'width': 98, 'height': 98}, 'postCount': {'last30Days': 24}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd9f27e0c-6d9d-4ac9-ab76-c333acade9a6', 'alias': 'reptile', 'name': '兩棲爬寵', 'description': '我們不歡迎對我們有惡意的人來我們的板面!😈\\n這是動物爬蟲板‼️🈲昆蟲;不是網路(程式)爬蟲板,注意下。🙏\\n我們也歡迎兩棲、部分節肢動物(禁止昆蟲)分享喔!\\n記得選「標題分類」喔!', 'subscriptionCount': 8695, 'subscribed': False, 'read': False, 'createdAt': '2020-05-12T06:11:27.589Z', 'updatedAt': '2021-05-25T05:35:01.452Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '注意規範 禁止昆蟲(눈‸눈)', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['蛇', '守宮', '蜥蜴', '龜', '兩棲'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/04fb4415-78e5-404c-8cfb-5c1eb48c4727/orig.jpeg', 'type': 'image/jpeg', 'width': 1280, 'height': 426}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6d8bf9b2-d8f9-49dd-a017-bf90102ce13e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 108}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '90eceb29-529f-4179-962f-b15d1a1f4740', 'alias': 'dashcam', 'name': '行車記錄器', 'description': '綠燈可以再等,人生不能重來', 'subscriptionCount': 7244, 'subscribed': False, 'read': False, 'createdAt': '2020-05-14T07:05:00.527Z', 'updatedAt': '2021-04-20T08:36:47.435Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '如果內容可能令人不適,請於標題加上 #可能不適 提醒', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['行車記錄器'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6864d348-089a-4b18-a042-715ccb7678e0/orig.jpeg', 'type': 'image/jpeg', 'width': 1200, 'height': 399}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/edf6e0d9-fdd8-42e7-aa9a-9ef5a7606036/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 31}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '19202aff-7a81-4da6-bc02-cc0ad07a13e7', 'alias': 'math', 'name': '數學', 'description': '數學版是一個讓大家自由自在討論數學的地方,讓數學版繁榮的成長吧!!', 'subscriptionCount': 10057, 'subscribed': False, 'read': False, 'createdAt': '2020-05-14T07:36:27.376Z', 'updatedAt': '2020-09-15T04:15:56.264Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/4a5b4c95-3549-4f11-9dc8-4593698459e5/orig.jpeg', 'type': 'image/jpeg', 'width': 800, 'height': 267}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/cb6d0c84-57ea-4bc4-a715-31ba95c39955/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 119}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3b3a9566-c6b1-43fd-9040-394b4bd2747d', 'alias': 'female_sex', 'name': '女孩西斯', 'description': '很高興有了女孩們討論私密心事的女孩西斯版,感謝當初發起的同學以及支持的同學們。\\n女性私密話題在Dcard一直是一個很尷尬的地帶,發在西斯版總是有被歪樓的可能性,而發在女孩版則是有部分的同學並不想看到關於西斯的內容,所以有了女孩西斯版女孩們就可以有一個適合的空間可以暢所欲言了。\\n希望女孩西斯版除了能夠分享關於女性西斯的內容外也能多多分享關於性知識、性教育等等的話題,作為一個正向教育的存在。\\n\\n*最後特別感謝真理大學的同學為我們提供Miss Sex米西斯這麼可愛的名子(๑ơ ₃ ơ)♥️', 'subscriptionCount': 103810, 'subscribed': False, 'read': False, 'createdAt': '2020-05-14T07:45:18.570Z', 'updatedAt': '2020-09-14T06:46:14.074Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e96c4a83-2a73-479d-a442-ba8be26cd5f3/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6d61a6a8-8ecb-4beb-b725-308cf4c94508/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 123}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '16d8d779-4aff-4455-93e0-4a6cf3413d19', 'alias': 'aov', 'name': '傳說對決', 'description': '歡迎來到傳說對決板!不管你是OG級的高手,又或是喜歡一般娛樂的休閒玩家,只要喜歡傳說對決的都給我點進來並且按追蹤,讓你感受到大家的熱情及溫暖ψ(`∇´)ψ\\n\\n維持板上風氣由你我做起,以上🙇\\u200d♂️', 'subscriptionCount': 43260, 'subscribed': False, 'read': False, 'createdAt': '2020-05-19T05:35:50.145Z', 'updatedAt': '2021-07-22T07:10:18.622Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['手遊', '排位', '遊戲', '傳說對決板', '隊友', '傳說', '日常', '屁孩', '造型', '輔助'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c0f8c6b0-e298-4769-ba7f-19c18220a35d/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0bd49421-e01f-4cc0-b724-003f57d18767/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2639}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '9acbdf1b-6406-4ddd-9924-8b0ef61e8851', 'alias': 'lovenproducer', 'name': '戀與製作人', 'description': '歡迎大家來到戀與製作人板,希望太太們能在這裡交流,互相交換資訊唷(⁎⁍̴̛ᴗ⁍̴̛⁎)', 'subscriptionCount': 5705, 'subscribed': False, 'read': False, 'createdAt': '2020-05-19T05:36:12.826Z', 'updatedAt': '2021-07-12T04:48:26.167Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前,請先選擇標題分類⬆️', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['白起', '李澤言', '許墨', '周棋洛', '抽卡', '陸服'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7b70eaaf-704d-486c-a15f-7d2dbe4fa0ba/full.jpeg', 'type': 'image/jpeg', 'width': 1242, 'height': 414}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f7478759-25a1-453b-a81f-ff58e31ebb36/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 25}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '612dd883-8f57-45a1-ac8d-47c706f25e6b', 'alias': 'food_mover', 'name': '食物語', 'description': '歡迎來到食物語版~\\n希望在這邊可以解決你們的問題❤️', 'subscriptionCount': 3662, 'subscribed': False, 'read': False, 'createdAt': '2020-05-19T05:36:33.432Z', 'updatedAt': '2021-07-12T04:48:42.530Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '需要發文詢問組隊請標上 #求健檢\\n有任何新活動請標上 #活動文', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/97ee2ffb-7a8e-4af7-8380-d646550258b1/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/949a216a-5241-4eba-bde9-08abc7e24b4c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 6}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '9ee9fe06-9ffa-4169-8910-618e783f862f', 'alias': 'maple_story', 'name': '楓之谷', 'description': '每週最新活動資訊點置頂文章🔝或話題傳送門\\n發文注意板規⚠️標題分類沒選會被刪文!', 'subscriptionCount': 13000, 'subscribed': False, 'read': False, 'createdAt': '2020-05-19T05:36:52.263Z', 'updatedAt': '2021-07-12T04:48:28.774Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '👆上方標題分類沒選會刪文!', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['黃金蘋果', '時尚隨機箱', '皇家美髮整形', '新楓之谷', '楓之谷', '楓之谷M', '楓谷', '楓之谷板', '新手'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5c263ce3-a520-4c77-a864-d734efdcd131/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bebab651-4fe2-4191-83f4-cf1313b4df33/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 466}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'e064be2b-13f8-468a-9e88-b357170a0e33', 'alias': 'identity_v', 'name': '第五人格', 'description': '歡迎大家在這個板上一起討論第五這個遊戲~', 'subscriptionCount': 7855, 'subscribed': False, 'read': False, 'createdAt': '2020-05-19T05:37:14.181Z', 'updatedAt': '2021-07-12T04:48:54.777Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['教學', '金光', '討論', '公告', '情報'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/29a685d7-f4c0-49de-b46e-f787a4eb7033/orig.jpeg', 'type': 'image/jpeg', 'width': 1029, 'height': 343}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/be582a3b-d843-4cd4-8903-3552f922b7b1/orig.jpeg', 'type': 'image/jpeg', 'width': 179, 'height': 179}, 'postCount': {'last30Days': 277}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'ea38435a-1533-4eb0-9c66-76451d31fc35', 'alias': 'switch', 'name': 'Nintendo Switch', 'description': '哈囉大家好~如果有任何跟Nintendo Switch相關的問題或是遊戲都可以在此版分享討論喔。\\n小提醒發文或留言之前請先參照版規以免被停權喔~', 'subscriptionCount': 28347, 'subscribed': False, 'read': False, 'createdAt': '2020-05-19T05:37:44.144Z', 'updatedAt': '2021-07-12T04:50:54.918Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['switch'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'video', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9d524427-6bea-424d-be03-d167758b5722/orig.jpeg', 'type': 'image/jpeg', 'width': 1230, 'height': 410}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b3ba574c-d9ef-427e-8748-ab439d4f8222/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 132}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '332fec2f-7d2c-40f2-b594-cef586c849c5', 'alias': 'playstation', 'name': 'PlayStation', 'description': '歡迎來到PlayStation版,各位可以分享你們玩遊戲的心得,攻略,以及PlayStation的新聞。\\n請先詳閱版規,切勿違反呦', 'subscriptionCount': 9210, 'subscribed': False, 'read': False, 'createdAt': '2020-05-19T05:38:33.868Z', 'updatedAt': '2021-07-12T04:50:49.422Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請注意⚠️內文須超過十五個中文字元⚠️', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['心得', '攻略', 'PS5', 'PS4', '新聞', 'PlayStation'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5f795fff-a810-4cc5-a568-07cfa92964cb/orig.jpeg', 'type': 'image/jpeg', 'width': 1181, 'height': 393}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/35173eb6-2043-4e37-93ee-c84d85281720/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 37}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'b5bffbd7-da4f-4591-8323-e763b3accf4d', 'alias': 'pc_game', 'name': 'PC Game', 'description': '歡迎大家來找隊友一起打團,排位,分享攻略等等', 'subscriptionCount': 9085, 'subscribed': False, 'read': False, 'createdAt': '2020-05-19T05:39:41.737Z', 'updatedAt': '2021-07-12T04:50:59.451Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['Steam', 'EA', 'Origin'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b8286277-19cb-42e2-88ea-e88dd0c4a474/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/82738950-377d-4886-b2a9-7fc43405ea58/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 55}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'd6d6529d-4886-4564-aeb7-e27adc0ae3f0', 'alias': '7_deadly_sins', 'name': '七大罪', 'description': '歡迎七大罪的粉絲們在這裡交流哦!', 'subscriptionCount': 1969, 'subscribed': False, 'read': False, 'createdAt': '2020-05-19T06:31:20.260Z', 'updatedAt': '2021-07-12T04:48:49.932Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f9b3c16a-be1c-498c-9f20-d7f867c2d1aa/orig.jpeg', 'type': 'image/jpeg', 'width': 1500, 'height': 500}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/766241ed-5d43-40c2-9613-4b7a3085755b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '0acd2601-4462-4bcb-9802-3ca7aacab20c', 'alias': 'dd52', 'name': '菱格世代 DD52', 'description': '', 'subscriptionCount': 7199, 'subscribed': False, 'read': False, 'createdAt': '2020-05-26T12:48:20.450Z', 'updatedAt': '2020-05-28T03:51:30.187Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fd6555ba-3a16-4c82-a765-d37ef8bfb6df/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4890ca81-a3d1-44bc-92db-061c75d5b15b/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 13}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e6e73722-dade-42c8-971f-16253c1b7a7c', 'alias': 'podcast', 'name': 'Podcast', 'description': 'Podcast 板以討論 Podcast 為主,歡迎分享你喜歡的節目、主持人或者相關資訊,發文前記得先詳閱板規唷!', 'subscriptionCount': 17551, 'subscribed': False, 'read': False, 'createdAt': '2020-06-08T09:01:58.763Z', 'updatedAt': '2020-09-14T06:46:48.761Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7cb7ae19-afb3-4f21-b3ad-4ed1c9686ea1/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e8df17a5-f76f-40ee-89e8-74fc18e4a25d/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 38}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '99602190-dde4-4ba0-82da-068d65bbe2ff', 'alias': 'funny_video', 'name': '梗影', 'description': '梗圖=有梗的圖,梗影=有梗的影片', 'subscriptionCount': 25289, 'subscribed': False, 'read': False, 'createdAt': '2020-06-09T10:48:52.539Z', 'updatedAt': '2020-08-13T06:01:53.443Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/48f4f123-7cfa-42ed-95ff-417243d88b61/orig.jpeg', 'type': 'image/jpeg', 'width': 856, 'height': 285}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b3e0cc07-5358-4b31-a26e-ec5bb86b4cab/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 67}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e2602a00-3f9a-435e-98a2-457ff4da77fc', 'alias': 'line_fresh', 'name': 'LINE FRESH 校園競賽', 'description': '', 'subscriptionCount': 937, 'subscribed': False, 'read': False, 'createdAt': '2020-06-30T04:09:20.357Z', 'updatedAt': '2020-08-28T08:08:26.074Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2eb818cc-4e4b-4b73-b8e7-a88b76886733/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/54b42ca7-32d3-498e-94b7-d163862c90cd/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a9e7e219-20dc-4c3c-9df0-92b25260ca9d', 'alias': 'nba_test', 'name': 'NBA測試板', 'description': '', 'subscriptionCount': 2, 'subscribed': False, 'read': False, 'createdAt': '2020-07-29T16:27:34.696Z', 'updatedAt': '2020-07-29T16:27:34.696Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 19}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '214c3abb-6cf8-44b2-86b3-c156a9c5bbf2', 'alias': 'nba', 'name': 'NBA', 'description': '季後賽熱烈進行中,快來一起討論吧!\\n在這裡不用擔心因為用了 “崩潰” 這個詞被水桶喔^^', 'subscriptionCount': 62013, 'subscribed': False, 'read': False, 'createdAt': '2020-07-30T06:24:14.191Z', 'updatedAt': '2021-07-06T13:11:44.169Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '建議加入分類喔!', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['NBA先知', 'NBA', 'NBA數據', '湖人BOX', '快艇BOX', '公鹿BOX', '塞爾提克BOX', '暴龍BOX', '熱火BOX', '雷霆BOX', '火箭BOX', '金塊BOX', '籃球', '湖人', '季後賽', 'LBJ', '爵士', '籃網', '太陽', '快艇'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e6578008-14c4-4255-a65f-2a6bde18e1a4/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7b61e121-ad9a-4057-a823-9cacd5b09558/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 535}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3ec80bb0-2f02-478f-9afd-711327b460c5', 'alias': 'google', 'name': 'Google/Android', 'description': '歡迎發文分享討論 Alphabet 旗下相關產品、服務、技術或活動,例如 Google、Android、Chromium、Nest、Wear OS、Tensorflow、Waymo 等等。\\n\\n請不要在這詢問非 Google 品牌的硬體問題,可改發至問答板或 3C 板。\\n\\n發問前請善用 Google 搜尋,並在文章附上查詢到的資訊,不要發伸手文。\\n\\n發文請加標題分類。', 'subscriptionCount': 11262, 'subscribed': False, 'read': False, 'createdAt': '2020-07-31T03:45:28.382Z', 'updatedAt': '2021-05-29T09:17:10.534Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['Android'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c53e4a07-31b6-4f9f-8095-09a4e0ab3cd8/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/339e6e08-5ae2-4ded-b361-176068f673fc/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 50}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f0c59f55-18bb-4190-a15a-a5413ca9fa9d', 'alias': 'japan_star', 'name': '日本明星', 'description': '歡迎各位小夥伴來到日本明星板!🥳\\n在這裡,你可以盡情跟大家討論你喜歡的日本明星✨', 'subscriptionCount': 8979, 'subscribed': False, 'read': False, 'createdAt': '2020-07-31T03:45:51.603Z', 'updatedAt': '2021-04-20T08:36:56.940Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['演員', '模特兒', '歌手', '聲優', '偶像'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/61b69d0d-a1d4-4ca0-9462-f3943456e2a8/orig.jpeg', 'type': 'image/jpeg', 'width': 1005, 'height': 335}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/dac23ce2-5273-44e3-ab71-8418df5e6d59/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 39}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a98953c3-52e1-4f06-b788-8e15634dbbaa', 'alias': 'crystal', 'name': '水晶礦石', 'description': '歡迎喜愛與研究水晶與礦石的朋友們這裡為大家分享與討論水晶與礦石...等事物的專屬園地', 'subscriptionCount': 10069, 'subscribed': False, 'read': False, 'createdAt': '2020-07-31T03:46:29.035Z', 'updatedAt': '2020-09-14T06:46:54.600Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e9456bec-544c-44cc-80e6-8de342e69592/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9209af64-76c3-4234-8141-1a031672d0c7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 227}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '758a54be-4d68-43a8-b1db-b870a62b6d3f', 'alias': 'kart_rider', 'name': '跑跑卡丁車', 'description': '各位不管是電腦板,或是手遊板都歡迎在此板討論\\n此板主要以情報、心得、閒聊、問題討論為主\\n分享情報請以官方公佈為主,也請各位理性討論', 'subscriptionCount': 8845, 'subscribed': False, 'read': False, 'createdAt': '2020-07-31T03:48:26.983Z', 'updatedAt': '2020-09-24T08:34:10.115Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/12f0548b-8f74-4ff3-8014-6542492b8dd2/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b7c3ce73-3e4c-42b9-a08e-0b905df5b59d/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 170}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '08b853ae-f652-4cb2-b5ea-21c6909db951', 'alias': 'esports', 'name': '電競實況', 'description': '在 Dcard 上找不到電競和實況相關的看板?Say no more fam ( •̀ ω •́ )✧ \\n儘管在這裡分享與電競或實況相關的大小事,讓我們一同於 Dcard 掀起這股熱潮吧ヾ(≧▽≦*)o 但開心分享前,還是要記得詳閱全板板規和電競實況板規以免受罰喔(~ ̄▽ ̄)~', 'subscriptionCount': 7517, 'subscribed': False, 'read': False, 'createdAt': '2020-07-31T03:48:39.592Z', 'updatedAt': '2021-07-12T04:51:02.912Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請先閱讀板規再行發文,以免遭受禁言或停權處分。', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['電競', '實況', 'Twitch', 'YoutubeGaming', 'FacebookGaming', '直播', '嗨賴'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fac5d449-9423-4fc5-86c6-899fad31bd17/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1e6b4273-ff6b-4e3c-bff8-6e9df2bff127/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 39}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '529cb30e-bc50-4e47-8d6f-337daa31a210', 'alias': 'newforum', 'name': '新看板資訊', 'description': '新看板資訊主要提供最新的開板消息、開板連署討論以及有趣的看板介紹,歡迎訂閱此板獲得最新看板的第一手消息!', 'subscriptionCount': 3428, 'subscribed': False, 'read': False, 'createdAt': '2020-08-13T09:05:26.128Z', 'updatedAt': '2021-04-20T08:36:58.493Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文務必要選擇標題分類,且內容要與標題分類相關!', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['公告'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bc97a750-4484-4488-8dca-5725c4009614/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/91ff6cdc-67b6-4311-a094-d35d81f2eaf5/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 13}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4d78d281-81f7-49f1-994e-8457402564f5', 'alias': 'vote', 'name': '投票', 'description': '請將要投票的選項另外逐項發在留言內~', 'subscriptionCount': 4104, 'subscribed': False, 'read': False, 'createdAt': '2020-08-17T09:37:03.037Z', 'updatedAt': '2020-08-17T09:53:36.185Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e31e39d3-b58c-449a-91a4-6da2520b67ef/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/60834ff0-27f4-41cf-a9b3-98aed7911302/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 8}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'a4d1c423-f5b2-43fd-9a30-58fc505ed718', 'alias': 'nou', 'name': '國立空中大學', 'description': '', 'subscriptionCount': 312, 'subscribed': False, 'read': False, 'createdAt': '2020-08-31T08:06:44.070Z', 'updatedAt': '2020-08-31T08:06:44.070Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'postCount': {'last30Days': 6}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '628f3897-3548-482a-8271-d53aa82a0b53', 'alias': 'thai_series', 'name': '泰國影視', 'description': '歡迎交流包含泰國電影、戲劇、音樂、明星等泰國娛樂圈的文章。', 'subscriptionCount': 6849, 'subscribed': False, 'read': False, 'createdAt': '2020-09-03T04:02:31.491Z', 'updatedAt': '2021-04-20T08:36:53.851Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請記得看過版規及站規再發文哦!', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['泰劇', '泰星', '泰國電影', '音樂', '爆雷', '討論', '分享'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/17b668ff-46ed-42ba-9d0b-6912b5e131c8/orig.jpeg', 'type': 'image/jpeg', 'width': 735, 'height': 244}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a654a995-031e-4694-880c-6b99647c777f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 50}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '52c22d4d-58ba-4dbf-862e-bbc5928a2eba', 'alias': 'superjunior', 'name': 'Super Junior', 'description': '藍家與ELF💙\\n\\nSuper Junior從限定團到韓流帝王,地位不可動搖,哥哥們全員續約就是為了給ELF一個承諾,攜手相伴到永遠\\n\\n看著越來越多新ELF加入,希望大家也能好好愛哥哥們,哥哥們一路走來的艱辛和努力都有目共睹,在這希望大家能盡情暢談,不要吵架,謝謝。\\n\\nSuper Junior的官方歌迷名稱是E.L.F(韓語:엘프),意思是Ever Lasting Friends(永遠的朋友),他們也成為韓團史上唯一一隊在偶像問候語後回禮的粉絲團。\\n\\n2006年6月2日正式公開,由隊長利特命名,應援色是寶藍色💙\\n\\n2018年11月6日,Super Junior於出道13周年的特別影片中公開官方應援棒名稱為「슈퍼봉」(英文:Super Wand;中文:超級棒)。\\n\\n小分隊:K.R.Y 、D&E 、Super Junior T、Super Junior M、Super Junior Happy', 'subscriptionCount': 13317, 'subscribed': False, 'read': False, 'createdAt': '2020-09-07T02:36:52.969Z', 'updatedAt': '2021-07-22T07:10:21.463Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['利特', '金希澈', '藝聲', '神童', '銀赫', '李東海', '厲旭', '圭賢'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/57a16a2d-271c-47b4-b749-4e2f080aa216/orig.jpeg', 'type': 'image/jpeg', 'width': 1080, 'height': 360}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/22ac3882-2e06-4540-af0e-94ea3e22de92/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 80}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '3f0027cd-fe1c-44a3-bfea-de14a291da66', 'alias': 'blackpink', 'name': 'BLACKPINK', 'description': '🖤💖 𝐁𝐋ΛƆ𝐊𝐏𝐈И𝐊 𝐈𝐍 𝐘𝐎𝐔𝐑 𝐀𝐑𝐄𝐀 💖🖤\\n 歡迎大家來到BLACKPINK板!', 'subscriptionCount': 25677, 'subscribed': False, 'read': False, 'createdAt': '2020-09-07T05:54:34.987Z', 'updatedAt': '2021-07-04T19:07:42.929Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前請務必詳閱板規與板規說明,發文視同同意板規', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['BLACKPINK', 'Lisa', 'Rose', 'Jennie', 'Jisoo', '追星', '韓星', '韓團'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/69c529c3-4652-422f-b56b-8c19f381e9c3/orig.jpeg', 'type': 'image/jpeg', 'width': 1217, 'height': 406}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ca71c7bd-d0b8-4232-880b-d6a599fc7e52/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 74}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6f430a9c-8e0a-43fd-be03-db4706c957e5', 'alias': 'surfing', 'name': '衝浪', 'description': '感謝大海的緣份讓大家都聚集在這裡,希望大家都能更熱愛衝浪、更愛海洋、也更珍惜在這裡交流的機會。', 'subscriptionCount': 6571, 'subscribed': False, 'read': False, 'createdAt': '2020-09-11T08:27:21.002Z', 'updatedAt': '2021-07-12T04:47:54.911Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/012a8040-a060-4884-a340-4da9db50d107/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8a9f76f9-172e-4532-9257-92b281ad8440/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '8ddb3848-cd34-4042-9a5e-184560764005', 'alias': 'mancare', 'name': '男性保養', 'description': '歡迎大家來男性保養版,可以一起討論與保養及彩妝相關問題,也歡迎大家分享自己的保養心路歷程。希望男性保養的觀念也能漸漸融入大家的生活,打破只有女性才能保養的觀念!', 'subscriptionCount': 16578, 'subscribed': False, 'read': False, 'createdAt': '2020-09-17T07:22:40.169Z', 'updatedAt': '2020-10-05T12:32:36.768Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '文章標題前建議加上分類,可方便板友查閱。\\n👉🏿#分享 #試用 #黑特 #學術表單 #情報 #問\\n👉🏿範例: #分享 改善油痘肌的心路歷程。', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/930550ba-4918-4e16-9bff-6ba8f319d3b1/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a2c61a78-16c2-45c9-8b29-8d5a6d55571e/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 52}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '007cd01a-9ec0-4a8d-9d83-87d1d9f9b09a', 'alias': 'werewolf', 'name': '狼人殺', 'description': '安安這邊是狼人殺板呦\\n歡迎大家可以跟板友分享有關狼人殺的各種大小事:)', 'subscriptionCount': 2955, 'subscribed': False, 'read': False, 'createdAt': '2020-09-17T07:23:57.156Z', 'updatedAt': '2021-04-20T08:36:58.378Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['狼人殺'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/66570a10-fc72-4579-93fe-0120fbb5fe0e/orig.jpeg', 'type': 'image/jpeg', 'width': 580, 'height': 194}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c92c8c4f-cb44-4886-b887-00d99a4655b5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 9}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '31c5c0e7-af98-453c-8994-f2eee483b490', 'alias': 'resit', 'name': '重考', 'description': '歡迎大家交流分享關於重考的點滴,期許你離開重考板的時候,能有所成長\\n\\n1.發文留言要自負責任。\\n2.請注意自身狀況,有急切之需求請先尋求現實之親友協助。\\n3.以後想到再補', 'subscriptionCount': 7037, 'subscribed': False, 'read': False, 'createdAt': '2020-09-17T07:25:18.680Z', 'updatedAt': '2020-09-27T07:13:33.203Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7d9a0f86-a74c-4626-b4c1-05c17e21d71a/orig.jpeg', 'type': 'image/jpeg', 'width': 1472, 'height': 491}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/88bdce40-2488-44fb-b8fe-f5c5eafb24a6/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 236}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7d8e003b-a518-4f1e-9eb8-ae927feb5db7', 'alias': 'pubg', 'name': 'PUBG', 'description': '嗨嗨各位,歡迎來到PUBG的大家庭♥(´∀` )人,無論你是PUBG的電腦玩家或是手遊版玩家,還是為和平精英的gamer,都可以來到這裡討論有關PUBG的電競活動與比賽以及即將推出的遊戲活動,讓我們一同創造和平且愉悅的看板環境吧耶呼✧◝(⁰▿⁰)◜✧\\n\\n啊po文時要記得選標題分類呦!!😠', 'subscriptionCount': 5865, 'subscribed': False, 'read': False, 'createdAt': '2020-09-21T09:45:07.323Z', 'updatedAt': '2021-07-12T04:48:57.788Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'video', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c3a7809c-732c-4958-bd0e-3045bc81f5a9/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1c9c77be-8146-4943-9a1d-b1e884fdd81c/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 60}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '83f7e9ec-7dcb-445a-9ab5-a4f73937fca0', 'alias': 'zelda', 'name': '薩爾達傳說', 'description': '歡迎大家來到薩爾達傳說板~\\n大家能在這裡盡情地討論有關薩爾達傳說的相關資訊,也能進行遊戲相關討論\\n也希望大家能創造一個友善的討論空間哦~', 'subscriptionCount': 2437, 'subscribed': False, 'read': False, 'createdAt': '2020-09-22T06:39:58.474Z', 'updatedAt': '2021-07-12T04:50:31.581Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'video', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/64d23e0f-8d46-4814-b801-972b23bad8e0/orig.jpeg', 'type': 'image/jpeg', 'width': 1258, 'height': 419}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0b2a54ba-6a35-4dc6-b38c-4bb91b02ce10/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 16}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'cedc1f9a-5f01-4002-b063-76379e0a183e', 'alias': 'r6', 'name': '虹彩六號', 'description': '歡迎大家來到虹彩六號版~\\n如果覺得有甚麼需要改進的地方都歡迎告訴我!!\\n發文前請記得觀看一下板規並保持理性友善的溝通喔٩(๑•̀ω•́๑)۶\\n\\n最後\\n這裡是虹彩六號版不是彩虹版\\n看版名稱都看不懂我是勸你不要出來發文', 'subscriptionCount': 4079, 'subscribed': False, 'read': False, 'createdAt': '2020-09-22T06:46:28.842Z', 'updatedAt': '2021-07-12T04:49:00.897Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '可以在右上角選擇標題分類囉~', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['情報', '更新', '徵人', '分享'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'video', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0bafcee6-d141-485b-a42e-5af11c60457b/orig.jpeg', 'type': 'image/jpeg', 'width': 1080, 'height': 360}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/10043489-868b-4eca-8c26-eae50fde8523/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 21}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '79c34b70-b8fc-42e7-8f49-8caf8b2ae2ce', 'alias': 'fgo', 'name': 'Fate/Grand Order', 'description': '你好,這裡是板主狐狸~\\n歡迎大家來到Fate/Grand Order板,這裡歡迎任何有關fate系列的討論!\\n\\n請注意以下的事情:\\n\\n這裡禁止張貼個人 Line、FB、IG 帳號,在貼文或是留言都是不行的!\\n\\n我們很樂意幫助任何有問題的朋友,但是在詢問之前請善用Google,巴哈上面有很多大大整理好的資料了,如果找不到再來發文大家都會很樂意幫助你的。\\n\\n最後,祝福大家脫非入歐,把公/婆順利寶五!', 'subscriptionCount': 2095, 'subscribed': False, 'read': False, 'createdAt': '2020-09-24T04:25:13.930Z', 'updatedAt': '2021-07-12T04:49:14.316Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'video', 'link', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fc3f7be4-f425-4fb8-b0bc-d5297ec956f6/orig.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7828a4da-185d-4b8b-b511-aed4a71a54cb/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 14}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'b3c0adaa-88e0-4342-8184-7320680c3c0e', 'alias': 'marketing', 'name': '行銷', 'description': '【我們行銷的不只是品牌,更是自己!】\\n歡迎交流包含行銷、廣告、公關、製圖、文案、品牌、問卷等類型文章', 'subscriptionCount': 6864, 'subscribed': False, 'read': False, 'createdAt': '2020-09-24T04:26:12.702Z', 'updatedAt': '2021-07-11T16:14:39.524Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/560ab4f1-9132-46fe-b1ad-dad62d4cddcc/orig.jpeg', 'type': 'image/jpeg', 'width': 987, 'height': 329}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/caa758f7-e330-41af-a8fe-e487cd4b1cc7/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 35}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0961b13e-c2ff-4819-b0b5-b97cb4f82b28', 'alias': 'tearsofthemis', 'name': '未定事件簿', 'description': '這是一個讓你體驗甚麼是推理與戀愛的結合,歡迎大家來到未定事件簿版!讓我們一起沉浸在四位男主神奇的魅力裡吧!', 'subscriptionCount': 2048, 'subscribed': False, 'read': False, 'createdAt': '2020-09-25T04:08:01.708Z', 'updatedAt': '2021-07-20T14:38:13.373Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請記得加註分類(如#劇情、#攻略等板規有參考範例),有標題分類可參考選擇,一起讓版上有好版面吧~。', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/15f5ee11-cd37-485d-8446-850e389697d0/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/28a1c151-a5e7-46f1-a770-adcb33d154e4/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 12}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'ed8d137d-f683-45d7-8b8f-a5b99b2d1463', 'alias': 'delivery', 'name': '外送', 'description': '歡迎大家來到外送板,為了大家的權益著想\\n請不要違反板規和站規歐❤', 'subscriptionCount': 10968, 'subscribed': False, 'read': False, 'createdAt': '2020-09-25T04:12:58.055Z', 'updatedAt': '2021-07-02T18:23:44.215Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請加上 #心得 #問題 #分享 等....\\n可自行決定', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['Ubereats', 'foodpanda', '熊貓', '外送員', '疫情'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/862738fc-7d43-4dbc-a4d6-435c37eef190/orig.jpeg', 'type': 'image/jpeg', 'width': 1600, 'height': 533}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6214e758-0e01-4625-9748-0a1477c5596b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 253}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd5f84f54-2ac2-42a2-96b1-de3d8c409b8b', 'alias': 'psychology', 'name': '心理', 'description': '心理板是提供一個提供任何心理相關知識交流的地方,希望能讓對心理學有興趣的人、專業人員,以及相關科系的學生們進行討論。本板不具任何心理諮商及治療的功能,有迫切需求建議直接洽詢學校輔導中心或醫院。', 'subscriptionCount': 35086, 'subscribed': False, 'read': False, 'createdAt': '2020-09-28T03:46:14.321Z', 'updatedAt': '2020-10-15T06:53:51.560Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文分類可自訂\\n徵求受試及情報文請務必點選分類', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/69fca327-4028-4e9b-8f9b-661e894afd31/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9ecd290f-796f-4bb7-b639-9014339b2850/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 97}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '64b2af79-2bcf-4935-8237-ee088b4edc85', 'alias': 'redive', 'name': '超異域公主連結', 'description': '小小甜心團長在此,雖然我不是小小甜心服的,但如果有任何問題都可以發到版上來問 我會盡量回答你們的', 'subscriptionCount': 2399, 'subscribed': False, 'read': False, 'createdAt': '2020-09-28T03:48:10.714Z', 'updatedAt': '2021-07-12T04:50:28.865Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/2923a8b8-481f-4419-a6dd-735ff4f55001/orig.jpeg', 'type': 'image/jpeg', 'width': 1280, 'height': 426}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/80ea6ff5-dc53-41ba-96b2-d7792c7b577d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 16}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '0c23472a-1a56-4735-a88b-68134197d4e3', 'alias': 'sky', 'name': 'Sky 光遇', 'description': '本版專為Sky 光遇所建立,促進彼此友好交流。只要簡單的牽手、擁抱,都能讓人感到幸福快樂,溫暖的你能照亮每位光孩版友,你也會是他人心中的小太陽,歡迎來到光遇版,成為大家庭的一份子。', 'subscriptionCount': 6190, 'subscribed': False, 'read': False, 'createdAt': '2020-09-28T03:50:13.090Z', 'updatedAt': '2021-07-12T04:49:25.114Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '更新:發文不用自己打分類囉!(請在上方選擇標題分類即可)', 'ipCountryCondition': {'exclude': [], 'include': []}, 'subcategories': [], 'topics': ['SKY光遇', '徵友', '光遇', '互火', '日常', '季票', '小王子', '新手', '互心互火', '景點'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/baec67fd-e347-40ac-9c1e-56e018aaa1b2/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/0f813b22-7f0f-4756-80cb-530f4ad27ba6/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 382}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'c5f6f3b2-8690-4eee-b60d-d17af2cc5959', 'alias': 'ask', 'name': '問答', 'description': '在問答板發文不用擔心錯板,我們歡迎大部分的問題!(小部分不歡迎請看板規)在問答板,我們關注問題本身,而非發問的人。這個板希望透過問答的方式,激發大家的思考與討論,引出一些輕鬆有趣的知識;同時,也希望問答板可以成為一個讓大家運用社群的力量,一起解決難題的好地方。板規補充請看:https://www.dcard.tw/f/ask/p/234644860', 'subscriptionCount': 17608, 'subscribed': False, 'read': False, 'createdAt': '2020-10-15T07:14:29.710Z', 'updatedAt': '2021-04-20T08:36:40.847Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '本板標題請一律使用問題的形式,即使你在文章內會自問自答', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['美食', '美甲', '重機', '烹飪', '星座'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/81c7e943-ab48-4bd9-a364-9c9740170512/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/16ee5a64-0bb8-44bd-834a-32c766ff5010/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1036}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2fb88b62-aa28-4b18-af51-dda08dd037a9', 'alias': 'stock', 'name': '股票', 'description': '本板為股票專門討論板,討論內容不侷限台灣股市,貼文必須有股市相關點,並符合板規規範,若貼文內容違反規定,板主將透過管理後臺刪除貼文,貼文刪除的同時,會依照板規規範自動禁止發文者繼續於本看板發言。\\n\\n發文前請先看下板規,才不會踩到喔', 'subscriptionCount': 209699, 'subscribed': False, 'read': False, 'createdAt': '2020-11-19T09:17:31.052Z', 'updatedAt': '2020-12-22T18:06:09.500Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '⚠️發文請記得選分類⚠️\\n❗️選了分類就不用再重複輸入❗️\\n#標的 #新聞 #分享 #請益 #畢業 #其他', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bc0c4cf3-3b79-4697-9a22-75da3ec6c7c4/orig.jpeg', 'type': 'image/jpeg', 'width': 600, 'height': 200}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fb2eb050-73eb-4d13-a200-22fbab9a4b4d/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1584}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'b550ff87-4e78-4d67-98b1-6e8113374239', 'alias': 'kimetsu', 'name': '鬼滅之刃', 'description': '歡迎一起交流《鬼滅之刃》相關內容。\\n建議文章內文至少 10 字。', 'subscriptionCount': 15722, 'subscribed': False, 'read': False, 'createdAt': '2020-11-19T09:18:52.815Z', 'updatedAt': '2021-07-12T04:47:11.780Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '禁止內容農場式誇大不實的標題。', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f86d48c7-6e1e-46cd-ad94-ba07633222de/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4beb5c79-c3b2-4385-8ab3-69ef64dbe627/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 33}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '8329565a-d1da-4a46-a3d3-aff6f9f8ddbf', 'alias': 'twice', 'name': 'TWICE', 'description': 'ONE IN A MILLION!\\n歡迎進版~\\n發文前記得先看進版須知2.0喔!', 'subscriptionCount': 19037, 'subscribed': False, 'read': False, 'createdAt': '2020-11-19T09:19:14.419Z', 'updatedAt': '2021-07-12T04:51:29.597Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '👆發文按分類', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['TWICE', 'ONCE', '追星', '愛之味', '子瑜', '舞台', 'AlcoholFree', '專輯', '韓星'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e85b93bf-3c9a-4cd4-b37c-1eaa81aeeea3/orig.jpeg', 'type': 'image/jpeg', 'width': 1500, 'height': 500}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/97f7262a-c701-4451-bb37-a34bffeb7a92/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 148}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '8e21e2e3-1e7a-42e4-86bb-0b15d034b634', 'alias': 'hiking', 'name': '登山', 'description': '本板的宗旨以提供新手友善學習登山為目的,歡迎各路老手、大師、大大、大神分享自身經驗,不過請理解不同經驗的多元性和差異性,希望這裡是一個大家能理性正向且互相包容的討論平台。', 'subscriptionCount': 14068, 'subscribed': False, 'read': False, 'createdAt': '2020-11-19T09:20:14.273Z', 'updatedAt': '2021-07-16T02:32:21.881Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '☝🏻請善用標題分類☝🏻', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['登山', '新手', '爬山'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3c6b7a5f-87f2-41f2-a3d6-941d6244811e/orig.jpeg', 'type': 'image/jpeg', 'width': 1200, 'height': 400}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/b0f4287e-bd41-4c06-8e77-9fa77e022fa7/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 22}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'b9ef6eea-d192-455b-877b-6a912a92de49', 'alias': 'rom_gnjoy', 'name': 'RO:守護永恆的愛', 'description': '承諾忠於經典,秉持最高誠意守護你我的回憶,更以豐富的冒險系統打造全新的RO體驗,讓我們一起創造新回憶,再次守護永恆的愛!', 'subscriptionCount': 761, 'subscribed': False, 'read': False, 'createdAt': '2020-11-26T04:41:42.026Z', 'updatedAt': '2020-12-02T04:00:03.249Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['image', 'video', 'classic', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f920e1ff-94b1-46f9-8fc6-7619f15b17fc/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/bdb9394d-9802-4f80-8b0f-ccbf6a507944/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '61ab38c2-f754-4796-8f2d-6cc6f7e61c20', 'alias': 'brawl_stars', 'name': '荒野亂鬥', 'description': '歡迎大家來到荒野亂鬥版💕\\n在這裡不論是對於賽季更新或者是戰術等都可以提出~\\n想在這裡徵好友或戰隊隊員也是可以的喔~\\n-\\n另外 有任何建議也歡迎各位的指教\\n發文前也務必查看一下版規噢', 'subscriptionCount': 3187, 'subscribed': False, 'read': False, 'createdAt': '2020-11-26T07:54:09.203Z', 'updatedAt': '2021-07-20T04:52:04.970Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['荒野亂鬥'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d9175fd8-ba90-4699-bb38-b75ae86feae7/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/73287e84-39d5-4889-801a-9d24be77b0b4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 54}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'a6748b05-cd2b-4b0a-ad1b-c84bed9aab4a', 'alias': 'lolm', 'name': '英雄聯盟:激鬥峽谷', 'description': '本版為激鬥峽谷版,希望大家抱著友善的心意起玩遊戲呦(❁´◡`❁)', 'subscriptionCount': 16825, 'subscribed': False, 'read': False, 'createdAt': '2020-11-26T07:56:05.691Z', 'updatedAt': '2021-07-12T04:49:27.915Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b2075cfd-6853-4827-a9b0-775dbfbe6ddf/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2ae766e2-416e-4c90-b07b-93833b1fac6c/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 252}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '9f51ebd1-8aca-45d5-aafa-1fb46d4b002f', 'alias': 'arknights', 'name': '明日方舟', 'description': '歡迎各位博士們來到羅德島的大家庭♥️\\n大家一起來創造和平且愉快的看板空間吧!\\n博士,您還有許多事情需要處理,現在還不能休息哦!', 'subscriptionCount': 1152, 'subscribed': False, 'read': False, 'createdAt': '2020-11-26T07:58:30.113Z', 'updatedAt': '2021-07-12T04:49:54.839Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '1.標題要記得分類!\\n2.不能在標題大暴雷!', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f1895d1e-c9a9-4d17-a292-1da5a4118445/orig.jpeg', 'type': 'image/jpeg', 'width': 1200, 'height': 400}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/29ab7b58-2d41-41df-9c39-448234f68112/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 12}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'f126c7dc-0364-4569-8536-f16f0b795b31', 'alias': 'valorant', 'name': '特戰英豪', 'description': '這是讓大家自由討論特戰英豪的版版~也歡迎大家在這認識朋友哦😋', 'subscriptionCount': 2997, 'subscribed': False, 'read': False, 'createdAt': '2020-11-26T07:59:35.463Z', 'updatedAt': '2021-07-12T04:49:51.992Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8ae0536e-a51c-4b0a-9fb6-d82f559e0b64/orig.jpeg', 'type': 'image/jpeg', 'width': 1024, 'height': 341}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ac8c47be-df59-46f9-bf7e-f32293bb0163/orig.jpeg', 'type': 'image/jpeg', 'width': 194, 'height': 194}, 'postCount': {'last30Days': 68}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'eef93186-1ef2-48fa-ad6d-96bc5a75ea8b', 'alias': 'konosuba', 'name': '為美好的世界獻上祝福', 'description': '', 'subscriptionCount': 331, 'subscribed': False, 'read': False, 'createdAt': '2020-11-27T08:13:46.884Z', 'updatedAt': '2021-07-12T04:49:57.263Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b4369f03-1f80-4721-9a06-baa3384b622f/orig.jpeg', 'type': 'image/jpeg', 'width': 1048, 'height': 349}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9105aa87-074b-47ec-b0c7-629d6ac008b0/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '917d5f52-14c4-4821-9e18-9ffd9e927d49', 'alias': 'among_us', 'name': 'Among Us', 'description': '大家好,歡迎來到among us版\\n請各位詳閱公開說明...欸不是,是詳閱板規\\n不管你是手遊還是電腦版的\\n都歡迎在此討論呦(。・ω・。)ノ♡', 'subscriptionCount': 2423, 'subscribed': False, 'read': False, 'createdAt': '2020-11-27T08:20:12.494Z', 'updatedAt': '2021-07-12T04:49:30.775Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '內容要超過15字٩(。・ω・。)\\ufeffو', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/ed913386-bb04-4ef2-98fd-0a0575764f75/orig.jpeg', 'type': 'image/jpeg', 'width': 512, 'height': 171}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9acfb1ee-02ab-4ead-a65c-b46690b0e01e/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 18}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'bd5f5b7c-5bb1-4a99-8e01-6c4e3451dd64', 'alias': 'vtuber', 'name': 'VTuber', 'description': '可以推廣各種VTUBER的地方喔', 'subscriptionCount': 29828, 'subscribed': False, 'read': False, 'createdAt': '2020-12-08T08:13:43.717Z', 'updatedAt': '2021-06-09T04:34:58.175Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link', 'image', 'video'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/27f2acd2-a48d-4778-b84d-2b1c2306135b/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1bed222b-4fcf-4776-8030-e8b1e68dddea/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 765}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '96de4688-7bb0-4bc8-92fa-e625408fc962', 'alias': 'tainan', 'name': '台南', 'description': '台南鄉土情,美食吃不停。 你/妳要找的台南,全都在這裡了 : )除了基本的什麼該發什麼不該發請詳見板規之外,標題最前面別忘了加上分類呦:#住宿、#景點、#活動、#美食、#情報、#詢問、#討論、#酒吧。', 'subscriptionCount': 15723, 'subscribed': False, 'read': False, 'createdAt': '2020-12-10T08:43:00.947Z', 'updatedAt': '2021-03-11T08:54:18.672Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a1b1a4d0-2e4c-485c-b074-bc30baebeaaf/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/52fcb95f-5d54-460d-89a7-3516e59ac4db/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 55}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '42528f0b-02e5-4493-8891-cc1c01eed184', 'alias': 'hsinchu', 'name': '新竹', 'description': '哈囉大家 這裡是新竹版 \\n只要你對新竹有興趣或是有任何疑問任何想知道的都歡迎來這邊與大家一起討論\\n或是分享各種大大小小新舊事物給大家呦', 'subscriptionCount': 10491, 'subscribed': False, 'read': False, 'createdAt': '2020-12-10T08:43:36.269Z', 'updatedAt': '2021-07-22T17:43:18.657Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文前請詳閱版規', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'logo': {'url': 'https://megapx-assets.dcard.tw/images/f63b1cc1-a275-41c9-a511-bafe38e1599c/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 37}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '81d355d4-5f2e-45fb-8399-984c5296d73e', 'alias': 'nct', 'name': 'NCT', 'description': '歡迎加入NCT World💚發文前請先閱讀板規💚', 'subscriptionCount': 11334, 'subscribed': False, 'read': False, 'createdAt': '2020-12-18T10:00:36.645Z', 'updatedAt': '2021-07-16T03:38:31.271Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '記得選擇標題分類👆', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['NCT', 'NCT127', 'NCTDream', 'WayV', '追星', 'NCTU'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'video', 'image', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a68f20da-1d44-404a-8228-8a97eb3c7ae5/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/871d4982-cc3d-4dae-a4b4-63a3e68f18fa/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 214}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '14ff5822-97b8-422e-a86b-ac374e666d65', 'alias': 'ninjamustdie', 'name': '忍者必須死', 'description': '忍者必須死看板提供卡友們忍界最新資訊、心得攻略,在這裡尋找忍界羈絆,踏上最強忍者之路吧!', 'subscriptionCount': 1122, 'subscribed': False, 'read': False, 'createdAt': '2021-01-04T05:46:05.004Z', 'updatedAt': '2021-07-12T04:51:19.682Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'video', 'link', 'image'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/05db629e-eda1-4480-8ea8-9d5acd72625c/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7bca7372-5b40-40c5-b3ed-edfd88fb1152/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 28}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '49864c40-dd4e-4a4c-a7e6-d3fe28ea8b0c', 'alias': 'puipui', 'name': '天竺鼠車車', 'description': '天竺鼠車車真的好可愛喔。', 'subscriptionCount': 9538, 'subscribed': False, 'read': False, 'createdAt': '2021-01-21T06:26:36.990Z', 'updatedAt': '2021-04-20T08:36:40.510Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['天竺鼠車車'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b2e42b88-4b8f-4344-93e7-26882471357b/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/7b3188d9-1e46-48c9-8e81-012385762a5c/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 8}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c385435b-ddd2-410f-bb75-c4522905c4dd', 'alias': 'redvelvet', 'name': 'Red Velvet', 'description': 'Happiness! 💗💛💙💚💜', 'subscriptionCount': 6796, 'subscribed': False, 'read': False, 'createdAt': '2021-01-21T06:51:18.384Z', 'updatedAt': '2021-03-02T15:34:42.599Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6bd6ecc0-8f59-4e91-855d-35cc147beef0/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8fdbbb04-0ce0-451c-a6bd-80158938b724/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 18}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd43c42b8-1018-495b-9ca1-0430e660456b', 'alias': 'jp_univoftokyo', 'name': '東京大学', 'description': '東京大学掲示板へようこそ!!\\n東京大学掲示板では東京大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 105, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:14:12.269Z', 'updatedAt': '2021-04-09T11:33:33.374Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8da96cdf-a01f-4710-b1eb-e242ffbc36d5/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/872e6411-ff65-4b46-a354-62136c304b87/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '7e3e20d7-eeff-41e9-a89c-ab12fd6d96e0', 'alias': 'jp_agu', 'name': '青山学院大学', 'description': '青学掲示板へようこそ!!\\n青学掲示板では青山学院大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 102, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:30:34.293Z', 'updatedAt': '2021-04-09T11:34:13.925Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e9fef723-fa70-4ce6-99bd-a9d0a9368d9f/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/067c3121-c7b6-47fc-b6a0-34c9b06f18d2/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 9}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '06289c42-54bb-4600-b9f9-aa97f5c4250d', 'alias': 'jp_waseda', 'name': '早稲田大学', 'description': '早稲田大学掲示板へようこそ!!\\n早稲田大学掲示板では早稲田大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 188, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:32:38.538Z', 'updatedAt': '2021-04-09T11:31:04.806Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e0cfd3dc-69cb-4f5e-85c7-a21f825c06d7/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/fda43b68-5e5b-4d3f-9bc7-08236eff24cb/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 20}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '59eeb40a-4fcd-4dd6-aa57-b7ad1f28b948', 'alias': 'jp_keio', 'name': '慶応義塾大学', 'description': '慶應大学掲示板へようこそ!!\\n慶應大学掲示板では慶應義塾大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 100, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:35:34.922Z', 'updatedAt': '2021-04-09T11:32:46.924Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': False, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9aa33870-b03d-40f6-a1d8-f98b58edd904/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4a4cbcf1-e317-4d82-b9ba-ca000d038533/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 5}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '24e4c631-bfa1-4b33-b0ee-d22ab6218a7c', 'alias': 'jp_meiji', 'name': '明治大学', 'description': '明治大学掲示板へようこそ!!\\n明治大学掲示板では明治大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 139, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:40:42.548Z', 'updatedAt': '2021-04-09T11:32:05.344Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5088a2cc-8334-4ad4-957c-2ffedc8bf1ec/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/469fc6f0-7f14-48c1-a977-f49ea6c797a9/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 5}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '81ebee91-492d-4713-9ba9-552943f70de4', 'alias': 'jp_chuo', 'name': '中央大学', 'description': '中央大学掲示板へようこそ!!\\n中央大学掲示板では中央大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 70, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:42:23.942Z', 'updatedAt': '2021-04-09T11:44:59.361Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/df1ffb6f-d96f-4b93-b4d2-5ee7c0150303/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/02a32bc2-ea09-4c0a-a33a-9eae0a4e1a54/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '75fe9a0f-4262-47e3-bbd0-baaa34152cf8', 'alias': 'jp_hitotsubashi', 'name': '一橋大学', 'description': '一橋大学掲示板へようこそ!!\\n一橋大学掲示板では一橋大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 50, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:44:16.800Z', 'updatedAt': '2021-04-09T11:44:09.340Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/fa3faa78-de2d-4b5a-af46-fe6efb681c55/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3533ad15-6b49-450b-9986-c4e68b5045c2/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0a368a58-5ba5-4780-a35c-636f1131932a', 'alias': 'jp_gakushuin', 'name': '学習院大学', 'description': '学習院大学掲示板へようこそ!!\\n学習院大学掲示板では学習院大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 58, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:46:07.776Z', 'updatedAt': '2021-04-09T11:43:35.677Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/e14be426-4aed-48f2-80e0-9dd12a57ce88/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/62654f71-a9e4-4f9d-8221-a2835296de11/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 3}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c3c48d5f-30f8-4064-9ded-8ce715fa0eee', 'alias': 'jp_hosei', 'name': '法政大学', 'description': '法政大学掲示板へようこそ!!\\n法政大学掲示板では法政大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 78, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:48:04.122Z', 'updatedAt': '2021-04-09T11:42:54.571Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bfb9f7b6-c620-4815-aa80-e3b4996dba59/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/86f14a70-4d0f-4d00-8d69-3781c02926dc/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 5}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '4b88adcc-9f39-4d92-aa72-ce9861378101', 'alias': 'jp_sophia', 'name': '上智大学', 'description': '上智大学掲示板へようこそ!!\\n上智大学掲示板では上智大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 86, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:49:40.367Z', 'updatedAt': '2021-04-09T11:42:13.651Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8ac409c9-57f2-4fe4-b9e9-0503aa1e294a/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/249c22db-f013-4ce5-942f-5010077c7757/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '707b6a63-8b10-440e-bfff-a4d4f01986ce', 'alias': 'jp_ochanomizu', 'name': 'お茶の水女子大学', 'description': 'お茶の水大学掲示板へようこそ!!\\nお茶の水大学掲示板ではお茶の水大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 43, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T00:53:43.579Z', 'updatedAt': '2021-04-09T11:41:29.760Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/0beceaf4-ad53-4513-aecb-0ce10985ceaa/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/023b0e57-ed18-45a8-8181-142216a655cc/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 2}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '932c4823-42ff-4c07-80ef-681174279e1d', 'alias': 'jp_rikkyo', 'name': '立教大学', 'description': '立教大学掲示板へようこそ!!\\n立教大学掲示板では立教大学に関することならなんでも投稿できます!タイトルだけの記事、無駄に空白の多い記事、全く掲示板と関係のない記事は削除対象になりますのでお気をつけください😭自分の気になること、みんなにシェアしたい内容などを投稿して掲示板を盛り上げよう!', 'subscriptionCount': 84, 'subscribed': False, 'read': False, 'createdAt': '2021-01-29T01:04:09.768Z', 'updatedAt': '2021-04-09T11:40:21.329Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/587426ea-4e13-4897-8d16-d8e82bf4f8d7/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d28fe6c1-3c22-4c41-83b5-1864946bb470/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 20}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3fc54182-dc8a-4008-82ba-a12ac2e09a8c', 'alias': 'got7', 'name': 'GOT7', 'description': '歡迎各位在這裡討論跟GOT7有關的一切、一起交流哦~\\n也歡迎路人粉、音粉甚至是還不認識GOT7的朋友們一起來了解我們帥氣又多才多藝的7位寶藏男孩哦❤️', 'subscriptionCount': 8283, 'subscribed': False, 'read': False, 'createdAt': '2021-01-30T09:56:22.676Z', 'updatedAt': '2021-03-02T03:17:27.752Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發表文章前,可選擇分類,讓瀏覽者能夠更一目了然哦~', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'video', 'image', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/a37c2c4b-0898-43a1-a94a-2a3c71612bea/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1485ff77-546c-475d-890f-5d04fb4024cf/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 59}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '57baf81e-59da-465a-88b6-fe6d43f01c2a', 'alias': 'nycu', 'name': '陽明交通大學', 'description': '陽明交通大學板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,舉凡課程資訊、教授教學評價,又或是學校活動,只要是你想要知道的資訊,都能在校板中迅速獲得解答!', 'subscriptionCount': 2589, 'subscribed': False, 'read': False, 'createdAt': '2021-02-02T07:28:46.097Z', 'updatedAt': '2021-02-02T10:02:47.115Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': True, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'link'], 'postCount': {'last30Days': 44}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '65c97d05-4696-4c49-8906-acb6398ba317', 'alias': 'clubhouse', 'name': 'Clubhouse', 'description': '語音社交平台 Clubhouse 專屬討論交流板,你都聽或是開哪些房間?快來分享!', 'subscriptionCount': 8380, 'subscribed': False, 'read': False, 'createdAt': '2021-02-04T14:19:25.224Z', 'updatedAt': '2021-02-04T14:45:41.126Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/1fe4b1bc-8c76-4d3f-b4b2-b73619c232c0/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/25601f75-4825-453b-93ce-086a8480a3ea/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 4}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6e906a60-b8c9-401d-9183-f0bd376791b2', 'alias': 'kebuke', 'name': '可不可真心話', 'description': '', 'subscriptionCount': 6734, 'subscribed': False, 'read': False, 'createdAt': '2021-02-17T07:25:01.859Z', 'updatedAt': '2021-03-31T11:04:23.476Z', 'canPost': False, 'ignorePost': True, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/132f375e-0740-495f-ab47-1258df79dc1b/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/441c5c5f-e579-41bb-8f09-0131e87df6d8/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 119}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '8e349c6d-4459-4e26-b967-430d0ebf843c', 'alias': 'world_flipper', 'name': '彈射世界', 'description': '《彈射世界》玩法以「彈珠台」為基礎,加入冒險劇情、角色培養、戰鬥系統等 RPG 元素,來享受快節奏打擊感吧!', 'subscriptionCount': 617, 'subscribed': False, 'read': False, 'createdAt': '2021-02-17T08:01:18.094Z', 'updatedAt': '2021-02-22T03:38:51.601Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/687888f3-ce0c-42eb-8dc6-2efb8820cd75/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/61562559-70eb-4839-9ece-ce938456c329/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'f0b871c6-ad91-41b4-8e98-0d6b3ebf2844', 'alias': 'shinee', 'name': 'SHINee', 'description': '5늘처럼2렇게5래보자♥️\\n我們就像今天一樣一起長長久久的走下去吧!💎', 'subscriptionCount': 6131, 'subscribed': False, 'read': False, 'createdAt': '2021-03-08T03:44:41.998Z', 'updatedAt': '2021-04-20T08:36:41.827Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['SHINee', '溫流', '鐘鉉', 'KEY', '珉豪', '泰民'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/18687493-f643-43a1-8e7b-8abce17991ad/orig.jpeg', 'type': 'image/jpeg', 'width': 720, 'height': 240}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6f7b7827-59cc-41c6-a396-b5da8f04efc4/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 19}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0a24ebc1-46f1-4a0a-8699-bbac5ac69abd', 'alias': 'men_dressup', 'name': '男生穿搭', 'description': '提供男生穿搭討論的小天地,任何男性時尚潮流相關話題也可以在此發文詢問!', 'subscriptionCount': 46450, 'subscribed': False, 'read': False, 'createdAt': '2021-04-08T07:47:31.246Z', 'updatedAt': '2021-04-08T08:07:52.434Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'video', 'image', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7ad6835c-caf8-4aba-8122-c52688acb13c/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e631e2b1-1f74-429f-a9ba-3f883adf3f82/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 88}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '63e3b3ee-dc29-4aa9-80b7-def40ccf7715', 'alias': 'epic_war_thrones', 'name': '鴻圖之下', 'description': '《鴻圖之下》看板提供玩家們討論戰術攻略、情報分享、聯盟交友、玩家問答總匯等鴻圖之下相關話題。', 'subscriptionCount': 120, 'subscribed': False, 'read': False, 'createdAt': '2021-04-13T08:58:30.139Z', 'updatedAt': '2021-04-14T04:11:34.276Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'video', 'image', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d75658a5-f708-446c-aa8c-31a8eb93101f/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ce466bb8-13f0-4145-81b5-66020b95b57a/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 1}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '2f8c3fce-d9d3-4c20-aeae-efd96e4ed5fb', 'alias': 'indie_game', 'name': '獨立遊戲', 'description': '任何跟獨立遊戲相關的議題,都歡迎在這個板上與大家分享及討論。', 'subscriptionCount': 1186, 'subscribed': False, 'read': False, 'createdAt': '2021-05-17T05:22:36.828Z', 'updatedAt': '2021-05-17T05:44:27.689Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d07dbfd2-6a02-4f73-a85c-ae72797b3bb0/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5120b344-a231-4ecd-9ea8-85d281c9810e/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 20}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '0e68aa75-7b2d-4586-b74b-fdaf6fbe94f5', 'alias': 'sex_game', 'name': '西斯遊戲', 'description': '透過遊戲,感受性與愛的美好,本版開放討論含有性議題的相關遊戲,發文前記得詳閱板規。', 'subscriptionCount': 1424, 'subscribed': False, 'read': False, 'createdAt': '2021-05-17T05:31:43.488Z', 'updatedAt': '2021-05-17T05:43:51.330Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/6a3c0a9e-7c6f-4dd7-9a3e-c3dbdb9cdead/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d81e4513-82a5-4769-aa98-5fa4decbf323/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 10}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '18bfea65-824d-4b32-b209-f82a3bb4caa4', 'alias': 'stayhome', 'name': '防疫生活', 'description': '歡迎分享你的居家防疫日記,遠距上班上課發生什麼趣事、居家運動菜單、躺床一整天追劇清單等等,疫情嚴峻的時刻,Dcard 陪你一起待在家!', 'subscriptionCount': 18900, 'subscribed': False, 'read': False, 'createdAt': '2021-05-21T06:00:16.712Z', 'updatedAt': '2021-06-29T07:17:35.198Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['不見面在一起', '居家防疫', '防疫'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/02f09f16-350b-499f-8a9f-adfce84563a0/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/16a75f37-8cd1-4fd2-b049-76256906029d/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 460}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'aa511662-26c6-4de6-96e4-8b0014449110', 'alias': 'apex_legends', 'name': 'APEX 英雄', 'description': '本板為《APEX 英雄》板,討論主旨為一切與《APEX 英雄》有關之事物與話題。', 'subscriptionCount': 3515, 'subscribed': False, 'read': False, 'createdAt': '2021-05-28T06:22:59.474Z', 'updatedAt': '2021-07-12T04:50:22.800Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9276308c-9fb9-4e16-84c0-d2fbbcad2d00/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a7ee947c-9234-4985-af50-673a4f099326/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 188}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '09514be5-22d6-4946-b35f-e015678e9b5e', 'alias': 'cookie_run_kingdom', 'name': '薑餅人王國', 'description': '拜託幫我看版規拜託🥺\\n本板為《薑餅人王國》板,討論主旨為與《薑餅人王國》有關之事物與話題,大家可以多多交流喔!', 'subscriptionCount': 2142, 'subscribed': False, 'read': False, 'createdAt': '2021-05-28T06:23:47.015Z', 'updatedAt': '2021-07-12T04:50:17.323Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['餅乾', '公會', '薑餅人', '組隊'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/db584765-d885-401f-8800-fb8c09e451d5/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ce9429c0-fcde-4f5e-9da9-59bb1e0aa25d/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 261}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'de8420b3-cb1f-438e-b373-cf18d7a7f2fe', 'alias': 'monster_hunter', 'name': '魔物獵人', 'description': '發文前麻煩先選擇標題分類喔!\\n未選擇分類一律刪文+禁言一天處理!', 'subscriptionCount': 2413, 'subscribed': False, 'read': False, 'createdAt': '2021-05-28T06:24:32.242Z', 'updatedAt': '2021-07-16T11:55:51.938Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5aff410a-2306-4faa-b368-95454db6df2d/orig.jpeg', 'type': 'image/jpeg', 'width': 1080, 'height': 360}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/8643c243-6d8e-4ca2-b602-2f2c08112751/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 33}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '5e5e5bd5-8214-488f-8e3a-12ab90e37c3c', 'alias': 'sponsored', 'name': '贊助', 'description': '', 'subscriptionCount': 67, 'subscribed': False, 'read': False, 'createdAt': '2021-05-31T06:52:36.123Z', 'updatedAt': '2021-06-10T06:47:41.577Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic'], 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ae0980f4-f161-41ab-9e8f-d276470011a4/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 11}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '19ab5226-9d84-4d5a-a730-59643b65a9d9', 'alias': 'show_goods', 'name': '曬好物', 'description': '📋 暖心提醒:\\n- 歡迎分享自己在好物購物後的實際使用心得!\\n- 如果分享的內容與商品為🔞,記得勾選「標記為🔞」唷!\\n- 分享內容要包含至少 1 張商品照片及 30 字,並請標記曾經購買的好物商品!\\n- 為了維持分享品質,曬好物文章發出後即不能刪除、修改標記商品,但隨時想編輯文章都可以唷!', 'subscriptionCount': 1173, 'subscribed': False, 'read': False, 'createdAt': '2021-06-02T07:12:42.072Z', 'updatedAt': '2021-06-16T13:57:58.056Z', 'canPost': False, 'ignorePost': False, 'invisible': True, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['ecSharing'], 'postCount': {'last30Days': 138}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '401afe74-7a71-481d-a2b9-420ce383d3a5', 'alias': 'show_sexgoods', 'name': '曬西斯好物', 'description': '📋 暖心提醒:\\n- 歡迎分享自己在好物購入西斯玩具後的實際使用心得!\\n- 如有裸露照片,記得打好馬賽克再上傳,避免違規遭到刪除🚫\\n- 分享內容要包含至少 1 張商品照片及 30 字,並請標記曾經購買的好物商品!\\n- 為了維持分享品質,曬好物文章發出後即不能刪除、修改標記商品,但隨時想編輯文章都可以唷!', 'subscriptionCount': 10, 'subscribed': False, 'read': False, 'createdAt': '2021-06-02T07:15:42.935Z', 'updatedAt': '2021-06-16T13:58:13.523Z', 'canPost': False, 'ignorePost': True, 'invisible': True, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': True, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['ecSharing'], 'postCount': {'last30Days': 9}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3f077677-d1fd-4e93-ad7a-5a4685bfa2f3', 'alias': 'jujutsu_kaisen', 'name': '咒術迴戰', 'description': \"歡迎來到咒術迴戰板 (֊'ヮ'֊)\\n\\n在這邊可以進行動漫畫、周邊、活動等討論與分享。\\n✏ 發文時請詳閱板規,並按照類別使用 # 做分類,讓板友可以方便搜尋。\\n和平相處、理性討論。\", 'subscriptionCount': 5198, 'subscribed': False, 'read': False, 'createdAt': '2021-06-04T05:21:54.443Z', 'updatedAt': '2021-06-23T13:53:50.086Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '標題如有其他分類可自行添加\\n(๑•̀ᄇ•́)و✧', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/c5254ae7-694c-484b-aa6c-d3e6ddebafe7/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c568bacd-4134-4254-ba36-0cdf71ff17f5/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 57}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'e69178a2-533a-43f2-8401-1b819b7102bf', 'alias': 'attack_on_titan', 'name': '進擊的巨人', 'description': '歡迎各位多多分享有關「進擊的巨人」的資訊及劇情討論!也請務必要遵守板規,給大家一個優良的交流空間。', 'subscriptionCount': 4072, 'subscribed': False, 'read': False, 'createdAt': '2021-06-10T03:02:17.959Z', 'updatedAt': '2021-06-16T16:54:44.059Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['進擊的巨人', '漫畫', '動畫'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/d7a71958-5566-42c1-afa2-2f457aba81ad/orig.jpeg', 'type': 'image/jpeg', 'width': 1181, 'height': 393}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/06342c1f-979d-4ee3-9348-98086da8a5cd/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 60}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '3a23e13c-919f-4088-8c5c-a6a69a62de99', 'alias': 'tower_of_saviors', 'name': '神魔之塔', 'description': '本板為《神魔之塔》板,討論主旨為一切與《神魔之塔》有關之事物與話題。', 'subscriptionCount': 762, 'subscribed': False, 'read': False, 'createdAt': '2021-06-11T13:55:22.026Z', 'updatedAt': '2021-07-22T07:10:11.110Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/53900d9b-dcf4-4a4d-85d3-4c28ddd50612/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/ca6df3bc-da4e-4359-9823-bcbcc7645c7f/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 40}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'df1b5e5c-f158-4f08-bff9-8a9822f7793a', 'alias': 'monster_strike', 'name': '怪物彈珠', 'description': '本板為《怪物彈珠》板,討論主旨為一切與《怪物彈珠》有關之事物與話題。', 'subscriptionCount': 510, 'subscribed': False, 'read': False, 'createdAt': '2021-06-11T13:56:01.035Z', 'updatedAt': '2021-06-11T13:59:32.106Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/516d59d5-b4d5-4c9e-a409-516fc9f7228b/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/5cb75938-f300-44a7-b1f2-d9fb5cc279a8/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 9}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6abae5c6-33c2-463d-9629-d4432662c9ab', 'alias': 'rent', 'name': '租屋', 'description': '這裡是租屋板,歡迎討論租屋的大小事。發文前請詳閱版規,務必詳閱版規 。', 'subscriptionCount': 6336, 'subscribed': False, 'read': False, 'createdAt': '2021-06-17T02:34:05.102Z', 'updatedAt': '2021-07-22T04:34:59.769Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文請於前方加上地區。 如租屋疑問(台北)。', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['台北', '租屋糾紛', '台中', '套房', '台中租屋', '房東', '房客', '出租', '合租'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bb285585-be6a-46b3-9244-f4fe0453c113/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d5a65e10-a3a8-4f98-86ca-705856af661d/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 351}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '5bced431-fdc2-49d5-acc6-9cc41c7e6532', 'alias': 'kr_drama', 'name': '韓劇', 'description': '親朋好友照過來,一起來分享韓劇的美好吧!\\n讓還沒入坑的朋友跳進大坑,也讓已經深陷韓劇泥潭的朋友脫離片荒的苦惱~\\n希望大家能一起推廣韓劇的美好嘍🇰🇷🇰🇷', 'subscriptionCount': 10744, 'subscribed': False, 'read': False, 'createdAt': '2021-06-17T02:43:42.208Z', 'updatedAt': '2021-06-17T03:42:41.125Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/9f9fb76b-a07a-4156-90a4-37ec9f55feda/orig.jpeg', 'type': 'image/jpeg', 'width': 380, 'height': 127}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/02f78db4-9f4f-44e0-ae46-f8b8d7612cf5/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 127}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'af1a2923-2b26-4fe1-927d-d3304616d709', 'alias': 'hkmacboy', 'name': '港澳男生', 'description': '屬於港澳男仔嘅討論區,呢到只限男仔可以po文,女仔係不能留言的!\\n要注意唔可以PO色情內容\\n歡迎大家係到討關於男仔既問題~請注意!發文字數不得少於15個字', 'subscriptionCount': 1084, 'subscribed': False, 'read': False, 'createdAt': '2021-06-23T04:25:20.818Z', 'updatedAt': '2021-06-24T16:23:05.245Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': True, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/f6dba9d7-7d68-461e-b6a3-2a58c597b3fe/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/2408233e-c4d4-439e-ab55-0ce0fc98a19c/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 42}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '508d842d-eb50-4847-a63d-df9f5a06ec47', 'alias': 'hkmacentertainer', 'name': '港澳追星', 'description': '屬於港澳人嘅追星板!快啲分享下你最鐘意嘅明星同埋關於佢地嘅事啦!請注意!發文字數不得少於15個字', 'subscriptionCount': 647, 'subscribed': False, 'read': False, 'createdAt': '2021-06-23T04:28:33.091Z', 'updatedAt': '2021-07-12T04:52:46.212Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {'include': []}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3976a96d-98c7-4a71-9fe5-f925eb5d1aba/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/74df2f77-0e24-4adf-923f-80e9917fbcb7/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 46}, 'favorite': False, 'enablePrivateMessage': True}, {'id': 'e44b7e81-ff65-4f8c-8c9b-1d411526655b', 'alias': 'ragnarokx_nextgeneration', 'name': 'RO:新世代的誕生', 'description': 'ROX看板提供卡友們討論攻略、情報分享、遊戲心得跟詢問RO\\n\\n仙境傳說:新世代的誕生之相關問題!', 'subscriptionCount': 92, 'subscribed': False, 'read': False, 'createdAt': '2021-06-24T06:12:55.144Z', 'updatedAt': '2021-06-24T06:17:20.796Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/7a078bc4-2102-4298-8ceb-265ab299c219/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/038e9349-2d51-44de-a5b5-f10f9cd99c24/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 24}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c8a16de0-b76f-450e-b0b0-29081c27495c', 'alias': 'play_together', 'name': '天天玩樂園', 'description': '歡迎來到《天天玩樂園》板,祝大家好運連連,一起釣大魚吧٩(˃̶͈̀௰˂̶͈́)و', 'subscriptionCount': 2228, 'subscribed': False, 'read': False, 'createdAt': '2021-06-25T09:27:38.356Z', 'updatedAt': '2021-07-22T07:10:16.449Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['遊戲', '釣魚', '手機遊戲'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/78ca0d97-797a-4a7f-8e75-b31b1f58fdb8/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/6f43bd10-1158-46b4-ba82-f297dfe7ce59/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 143}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '495d9eec-0689-44c6-9341-2b96af6bee7a', 'alias': 'dead_by_daylight', 'name': '黎明死線', 'description': '本板為黎明死線(Dead By Daylight)板,討論主旨為一切與黎明死線(Dead By Daylight)有關之事物與話題。', 'subscriptionCount': 241, 'subscribed': False, 'read': False, 'createdAt': '2021-06-25T09:28:55.547Z', 'updatedAt': '2021-07-22T07:10:13.978Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/8c5aeaa7-628f-4f4e-9578-cd1d7518abe4/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/e1f405dd-65bb-4b3b-a021-4e3dc625a733/full.jpeg', 'type': 'image/png', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 7}, 'favorite': False, 'enablePrivateMessage': True}, {'id': '0c16b7c5-1c70-4923-9175-ff8041ab2d52', 'alias': 'yoga', 'name': '瑜珈', 'description': '給熱愛瑜珈、對瑜珈有興趣的同好們,我們一起在相同興趣下努力吧。\\n不論瑜珈上的問題或是對瑜珈的想法,\\n都非常歡迎好夥伴們發文哦~\\n希望好夥伴們可以有個舒適的空間哦!\\n也要記得遵守板規規範,\\n最後祝大家在瑜珈的路上開開心心~', 'subscriptionCount': 1979, 'subscribed': False, 'read': False, 'createdAt': '2021-06-30T08:46:41.541Z', 'updatedAt': '2021-06-30T09:57:26.256Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '🧘🏻\\u200d♀️標題前面請分類:#資訊、#心得、#問題、#揪團、#閒聊\\n🧘🏻\\u200d♀️請加上話題:#哈達瑜珈、#熱瑜珈、#陰瑜珈、#空中瑜珈、#艾揚格瑜珈、#阿斯湯珈瑜珈、#流瑜珈、#居家瑜珈、#晨間瑜珈等\\n🧘🏻\\u200d♀️請加上標題:維持體態美、容易上手、新手可以嘗試等\\n🧘🏻\\u200d♀️標題範例:\\n#閒聊 #居家瑜珈 防疫期間也要做瑜珈\\n#心得 #流瑜珈 塑造我美好體態', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/cb047fe6-3f44-45a5-9eed-50d4d572fc9f/orig.jpeg', 'type': 'image/jpeg', 'width': 728, 'height': 242}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/d05a54ab-9ee7-4efd-a125-31f3149adf0b/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 17}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '9f2e8e2f-10f4-415f-870b-eaaeb01a9505', 'alias': 'luxury_watch', 'name': '精品錶', 'description': '供討論、分享、詢問與精品錶相關的話題。請大家理性溝通、和平相處喔!', 'subscriptionCount': 2148, 'subscribed': False, 'read': False, 'createdAt': '2021-06-30T09:04:50.969Z', 'updatedAt': '2021-07-13T09:17:04.550Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/df66dfd5-8c23-4069-b8a0-686748fa9111/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/cdf88109-f640-4728-adee-8cb858c24bf8/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 57}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '339e5449-5b42-48cb-b425-3ffcb845babe', 'alias': 'tattoo', 'name': '刺青', 'description': '', 'subscriptionCount': 2392, 'subscribed': False, 'read': False, 'createdAt': '2021-07-06T07:18:35.667Z', 'updatedAt': '2021-07-06T14:09:37.928Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '發文開頭需先放上標題喔 ex. #日式傳統', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/3c1cf8c8-fc34-4793-82c1-07e0aa47a785/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/1ea606ab-cc8d-4f70-b619-eb7c7259a719/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 58}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '8118cda0-37d5-48c9-85f1-71e088b5e8f1', 'alias': 'jpop', 'name': 'JPOP', 'description': '無論找歌、分享或是發布cover影片通通歡迎(*´³`*)\\nJPOP板將提供給每位日音飯最完善的討論空間❤️❤️', 'subscriptionCount': 1475, 'subscribed': False, 'read': False, 'createdAt': '2021-07-06T07:25:13.770Z', 'updatedAt': '2021-07-15T07:25:18.286Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '請大家儘量加上標題(ex.#找歌),這樣才能夠獲得更精確、快速的回應或幫助唷!', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['JPOP', '日文歌', '找歌', '分享'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/bd62e5d8-867c-4d34-bb89-b707c9040748/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c0e0dd41-4fa0-4890-bd0b-e47d0957a7db/full.jpeg', 'type': 'image/jpeg', 'width': 199, 'height': 199}, 'postCount': {'last30Days': 53}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '879d66df-5755-4d88-92c9-656893243067', 'alias': 'gfriend', 'name': 'GFRIEND', 'description': '둘 셋!안녕하세요 여자친구입니다!\\U0001f90d💙💜\\n二 三!大家好!\\n歡迎來到Dcard GFRIEND板!\\U0001f90d💙💜', 'subscriptionCount': 1560, 'subscribed': False, 'read': False, 'createdAt': '2021-07-08T05:43:33.946Z', 'updatedAt': '2021-07-22T16:19:27.038Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': True, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': ['GFRIEND', 'Sowon', 'Yerin', 'Eunha', 'Yuju', 'SinB', 'Umji'], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/434459bc-192f-4ebf-9573-f30e87bff8f6/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/c8111011-f78c-477f-9545-c146b88b2a4f/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 38}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '84809ec4-193a-4088-bf88-3660f5330163', 'alias': 'love_of_unknown', 'name': '未生逆行', 'description': '歡迎大家來到《未生逆行》板,希望各位小主播能在這裡交流,互相交換心動資訊呦σ ゚∀ ゚) ゚∀゚)σ', 'subscriptionCount': 132, 'subscribed': False, 'read': False, 'createdAt': '2021-07-09T03:38:05.681Z', 'updatedAt': '2021-07-09T03:40:30.704Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/209e73a3-be5c-4322-bfd1-de1f8d675cb5/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/3173ae9e-04df-4b0b-96e0-74200ad2825f/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 46}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'd7938942-0bfb-41a6-abaf-829932ba1057', 'alias': 'china_star', 'name': '中國明星', 'description': '歡迎光臨中國明星板ヾ(・ω・*)ノ\\n詳細閱讀板規後你可以盡情分享、討論或是詢問相關資訊。\\n希望卡友能在這裡渡過快樂時光ฅ^•ﻌ•^ฅ', 'subscriptionCount': 1088, 'subscribed': False, 'read': False, 'createdAt': '2021-07-13T06:05:23.153Z', 'updatedAt': '2021-07-13T09:20:40.125Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/957cb22a-286e-41c6-9820-78ebbcfb244c/orig.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/4d1b07c9-14e8-4228-8369-752fd9a8e2be/orig.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 16}, 'favorite': False, 'enablePrivateMessage': False}, {'id': '6ae7cfd6-7d16-42cc-a040-5e6a8e711899', 'alias': 'genshin', 'name': '原神', 'description': '本板為《原神》板,討論主旨為一切與《原神》有關之事物與話題。', 'subscriptionCount': 282, 'subscribed': False, 'read': False, 'createdAt': '2021-07-16T04:13:22.850Z', 'updatedAt': '2021-07-16T04:26:19.951Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/5553f002-4fb1-4dec-8673-62e9b184a5cd/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/a264e66d-5ac3-4ccb-aa7c-540e0372c847/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 9}, 'favorite': False, 'enablePrivateMessage': False}, {'id': 'c0c98884-d13c-4cdd-85e6-b48c85fdfa8b', 'alias': 'otome_game', 'name': '女性向遊戲', 'description': '本板為女性向遊戲板,討論主旨為一切與女性向遊戲有關之事物與話題。', 'subscriptionCount': 710, 'subscribed': False, 'read': False, 'createdAt': '2021-07-16T04:13:54.009Z', 'updatedAt': '2021-07-16T04:25:36.680Z', 'canPost': False, 'ignorePost': False, 'invisible': False, 'isSchool': False, 'fullyAnonymous': False, 'canUseNickname': True, 'postThumbnail': {'size': 'small'}, 'shouldCategorized': False, 'shouldPostCategorized': False, 'hasPostCategories': False, 'titlePlaceholder': '', 'postTitlePlaceholder': '', 'ipCountryCondition': {}, 'subcategories': [], 'topics': [], 'nsfw': False, 'mediaThreshold': {}, 'limitCountries': [], 'limitStage': 0, 'availableLayouts': ['classic', 'image', 'video', 'link'], 'heroImage': {'url': 'https://megapx-assets.dcard.tw/images/b988a13a-6e6a-4ba7-b8c7-495cecb52cc7/full.jpeg', 'type': 'image/jpeg', 'width': 1800, 'height': 600}, 'logo': {'url': 'https://megapx-assets.dcard.tw/images/9565f435-1196-4a43-97e2-41198e587924/full.jpeg', 'type': 'image/jpeg', 'width': 200, 'height': 200}, 'postCount': {'last30Days': 12}, 'favorite': False, 'enablePrivateMessage': False}]\n" ], [ "df=pd.DataFrame(data)\ndf=df.sort_values(by=['subscriptionCount'], ascending = False)\ndf.head(5)\n\ndf.to_csv('./20210412_Dcaed.csv', index = False)\nprint(df)", " alias availableLayouts canPost \\\n373 sex [classic] False \n228 relationship [classic] False \n224 dressup [classic, link] False \n217 makeup [classic] False \n233 meme [image] False \n273 food [classic, link, image, video] False \n270 horoscopes [classic] False \n230 talk [classic, link] False \n346 trending [classic, link] False \n340 money [classic, link] False \n231 funny [classic, link] False \n234 girl [classic, link] False \n229 mood [classic] False \n212 youtuber [classic, link] False \n287 netflix [classic, link] False \n261 pet [classic, link] False \n327 fitness [classic] False \n328 weight_loss [classic, link] False \n447 stock [classic, link] False \n347 job [classic] False \n257 house [classic, link] False \n342 savemoney [classic, link] False \n274 cooking [classic, link] False \n284 movie [classic, link] False \n232 joke [classic, link] False \n283 travel [classic, link, image, video] False \n238 rainbow [classic, link] False \n338 apple [classic, link] False \n226 buyonline [classic, link] False \n307 acg [classic] False \n.. ... ... ... \n469 jp_meiji [classic] False \n510 love_of_unknown [classic, image, video, link] False \n484 epic_war_thrones [classic, video, image, link] False \n380 hksponsored [classic] False \n379 test_hk [classic] False \n465 jp_univoftokyo [classic] False \n466 jp_agu [classic] False \n468 jp_keio [classic] False \n502 ragnarokx_nextgeneration [classic, image, video, link] False \n474 jp_sophia [classic] False \n476 jp_rikkyo [classic] False \n473 jp_hosei [classic] False \n470 jp_chuo [classic] False \n491 sponsored [classic] False \n472 jp_gakushuin [classic] False \n97 delete [classic] False \n471 jp_hitotsubashi [classic] False \n475 jp_ochanomizu [classic] False \n169 info [classic] False \n5 bugreport [classic] False \n493 show_sexgoods [ecSharing] False \n419 nba_test [classic] False \n381 dcardaddemo [classic] False \n168 infotest [classic] False \n172 athlete [classic, link] False \n207 hkbeauty [classic, image, video, link] False \n208 hktrending [classic, image, video, link] False \n182 hkmacdaily [classic, image, video, link] False \n159 mkc [classic, link] False \n209 hkacg [classic, image, video, link] False \n\n canUseNickname createdAt \\\n373 True 2020-02-04T07:52:53.573Z \n228 True 2020-02-04T07:28:43.573Z \n224 True 2020-02-04T07:28:03.573Z \n217 True 2020-02-04T07:26:53.573Z \n233 True 2020-02-04T07:29:33.573Z \n273 True 2020-02-04T07:36:13.573Z \n270 True 2020-02-04T07:35:43.573Z \n230 True 2020-02-04T07:29:03.573Z \n346 True 2020-02-04T07:48:23.573Z \n340 True 2020-02-04T07:47:23.573Z \n231 True 2020-02-04T07:29:13.573Z \n234 True 2020-02-04T07:29:43.573Z \n229 True 2020-02-04T07:28:53.573Z \n212 True 2020-02-04T07:26:03.573Z \n287 True 2020-02-04T07:38:33.573Z \n261 True 2020-02-04T07:34:13.573Z \n327 True 2020-02-04T07:45:13.573Z \n328 True 2020-02-04T07:45:23.573Z \n447 True 2020-11-19T09:17:31.052Z \n347 True 2020-02-04T07:48:33.573Z \n257 True 2020-02-04T07:33:33.573Z \n342 True 2020-02-04T07:47:43.573Z \n274 True 2020-02-04T07:36:23.573Z \n284 True 2020-02-04T07:38:03.573Z \n232 True 2020-02-04T07:29:23.573Z \n283 True 2020-02-04T07:37:53.573Z \n238 True 2020-02-04T07:30:23.573Z \n338 True 2020-02-04T07:47:03.573Z \n226 True 2020-02-04T07:28:23.573Z \n307 True 2020-02-04T07:41:53.573Z \n.. ... ... \n469 True 2021-01-29T00:40:42.548Z \n510 True 2021-07-09T03:38:05.681Z \n484 True 2021-04-13T08:58:30.139Z \n380 True 2020-03-05T04:28:23.785Z \n379 True 2020-02-25T10:01:27.581Z \n465 True 2021-01-29T00:14:12.269Z \n466 True 2021-01-29T00:30:34.293Z \n468 False 2021-01-29T00:35:34.922Z \n502 True 2021-06-24T06:12:55.144Z \n474 True 2021-01-29T00:49:40.367Z \n476 True 2021-01-29T01:04:09.768Z \n473 True 2021-01-29T00:48:04.122Z \n470 True 2021-01-29T00:42:23.942Z \n491 True 2021-05-31T06:52:36.123Z \n472 True 2021-01-29T00:46:07.776Z \n97 True 2016-05-23T02:15:15.879Z \n471 True 2021-01-29T00:44:16.800Z \n475 True 2021-01-29T00:53:43.579Z \n169 True 2017-02-25T06:52:03.772Z \n5 True 2016-05-18T07:20:35.140Z \n493 True 2021-06-02T07:15:42.935Z \n419 True 2020-07-29T16:27:34.696Z \n381 True 2020-03-09T04:40:44.327Z \n168 True 2017-02-25T06:52:03.772Z \n172 True 2017-08-22T05:22:03.772Z \n207 True 2020-01-02T03:21:28.406Z \n208 True 2020-01-02T03:22:36.962Z \n182 True 2018-10-03T03:41:18.556Z \n159 True 2016-09-23T09:35:46.370Z \n209 True 2020-01-02T03:23:17.450Z \n\n description enablePrivateMessage \\\n373 西斯板(Sex)提供男女私密話題分享或性教育等情慾議題討論,若有性方面相關問題也可在此發問。... False \n228 無論是遠距離戀愛、情侶間的有趣互動、分手後的藕斷絲連等...都可以在感情板分享你們的愛情故事... False \n224 穿搭板提供各種服裝搭配、包鞋、飾品配件等相關話題討論。\\n歡迎分享自己的日常穿搭,或任何潮流... False \n217 不管你喜歡開架彩妝還是專櫃彩妝,美妝板提供各種最新彩妝開箱評比、粉底色號、唇膏試色、眼影試色... False \n233 梗圖=有梗的圖 False \n273 美食板歡迎分享各種吃貨食記心得,或提供手搖飲料、校園美食、美食情報等文章! False \n270 星座版提供各種星座運勢、心理測驗、星座感情分享,或是有任何塔羅占卜相關的專業知識也可在此發文討論! False \n230 閒聊板提供各種生活周遭大小事的討論,無論是半夜睡不著想找同好,甚至是戴牙套遇到的困擾等...... False \n346 時事板歡迎針對國內外議題、國家政策、即時新聞等討論,也可在此分享時事議題的社論。 False \n340 理財板提供分享各種省錢小撇步、信用卡經驗、虛擬貨幣、股票投資心得等,歡迎你和大家交流各種不錯... False \n231 有趣板歡迎發表任何自己或親友的耍笨事蹟!各種好笑、傻眼、母湯的生活趣事或笑話大全通通都可以在... False \n234 專屬女孩的討論版,提供和女生有關的話題討論。也能在這裡匿名分享、抒發、詢問遇到的困擾,就像有... False \n229 提供分享生活情緒、抒發心情或交流各種情緒處理的經歷故事。在這裡你可以安心匿名,用無壓力的書寫... False \n212 只要有手機你就是Youtuber,一起將你的作品分享給全世界吧! False \n287 希望大家能一起創造友善小天地\\n分享我們對於Netflix 的熱愛\\nENJOY❤️\\n**... False \n261 寵物板無論是貓狗、毛小孩或任何養其他寵物的經驗都可以在此討論,另外像是寵物協尋或動物醫院的分... False \n327 請看版規!!!看完歡迎在此發表健身相關話題,例如:重訓技巧、健身飲食、健身房評比、體脂控制等... False \n328 本板供大家討論減肥上的任何問題和困難,互相扶持。\\n減肥的路上常覺得很孤單、路很長,可以上來... False \n447 本板為股票專門討論板,討論內容不侷限台灣股市,貼文必須有股市相關點,並符合板規規範,若貼文內... False \n347 本板提供分享面試經驗、職場心得、打工或實習經驗等相關工作話題。(徵才的職務刊登前請務必詳細閱... False \n257 居家生活板以家或個人空間出發,舉凡室內設計、空間風格、裝潢、what’s in my roo... False \n342 歡迎大家交流各種優惠訊息與省錢方法討論。\\n發文前,記得把標題分類唷! True \n274 歡迎大家分享以下內容:\\n1. 自己的手做料理\\n2. 料理問題提問\\n料理提問請具備足夠條... False \n284 注意:本板嚴禁標題爆雷,內文如有爆雷內容\\n1. 請於標題最前面加上 #有雷\\n2. 請在內... False \n232 歡迎分享各種類型的笑話、梗圖、meme,不管是好笑的、冷場的、能讓人引發思考的,或者是諷刺社... False \n283 旅遊板歡迎分享你的旅行紀錄或是國內外自由行、背包客心得、打工度假、機票購買等經驗,或是有什麼... False \n238 Love Wins!專屬彩虹(LGBT)們的討論板,在這裡可以用最無壓力的方式分享你們的故事。 False \n338 請務必看完版規再發文及討論\\n在此版發文及討論視同同意版規 False \n226 網路購物板主要提供線上購物之經驗分享與網購教學討論。\\n或是在網購前中後遇到問題也能在此發文... False \n307 動漫板提供各種輕小說、動畫討論、新番推薦、公仔模型、同人二創或Cosplay分享,動漫周邊或... False \n.. ... ... \n469 明治大学掲示板へようこそ!!\\n明治大学掲示板では明治大学に関することならなんでも投稿できま... False \n510 歡迎大家來到《未生逆行》板,希望各位小主播能在這裡交流,互相交換心動資訊呦σ ゚∀ ゚) ゚... False \n484 《鴻圖之下》看板提供玩家們討論戰術攻略、情報分享、聯盟交友、玩家問答總匯等鴻圖之下相關話題。 False \n380 Dcard HK 官方提供各項優惠資訊的看板 False \n379 False \n465 東京大学掲示板へようこそ!!\\n東京大学掲示板では東京大学に関することならなんでも投稿できま... False \n466 青学掲示板へようこそ!!\\n青学掲示板では青山学院大学に関することならなんでも投稿できます!... False \n468 慶應大学掲示板へようこそ!!\\n慶應大学掲示板では慶應義塾大学に関することならなんでも投稿で... False \n502 ROX看板提供卡友們討論攻略、情報分享、遊戲心得跟詢問RO\\n\\n仙境傳說:新世代的誕生之相... False \n474 上智大学掲示板へようこそ!!\\n上智大学掲示板では上智大学に関することならなんでも投稿できま... False \n476 立教大学掲示板へようこそ!!\\n立教大学掲示板では立教大学に関することならなんでも投稿できま... False \n473 法政大学掲示板へようこそ!!\\n法政大学掲示板では法政大学に関することならなんでも投稿できま... False \n470 中央大学掲示板へようこそ!!\\n中央大学掲示板では中央大学に関することならなんでも投稿できま... False \n491 False \n472 学習院大学掲示板へようこそ!!\\n学習院大学掲示板では学習院大学に関することならなんでも投稿... False \n97 False \n471 一橋大学掲示板へようこそ!!\\n一橋大学掲示板では一橋大学に関することならなんでも投稿できま... False \n475 お茶の水大学掲示板へようこそ!!\\nお茶の水大学掲示板ではお茶の水大学に関することならなんで... False \n169 False \n5 臨時回報版本問題 False \n493 📋 暖心提醒:\\n- 歡迎分享自己在好物購入西斯玩具後的實際使用心得!\\n- 如有裸露照片,... False \n419 False \n381 False \n168 False \n172 False \n207 呢度係比香港澳門嘅同學仔討論化妝、護膚、美髮、任何扮靚相關話題嘅討論區,發文留言前請先閱讀板規 False \n208 呢度係比香港澳門嘅同學仔討論同港澳有關既時事議題嘅討論區,發文留言前請先閱讀板規 False \n182 專屬於香港澳門o既討論區,日常生活大小事都可以係度傾~發文請注意需超過15個中文字 False \n159 馬偕醫護管理專科學校板,一個能讓你暢所欲言的地方。在這裡,卡友們可以盡情討論校園裡的大小事,... False \n209 呢度係比香港澳門既同學仔討論同分享各種動漫、遊戲嘅討論區,發文留言前請先閱讀板規 True \n\n favorite fullyAnonymous hasPostCategories ... \\\n373 False True False ... \n228 False True False ... \n224 False False False ... \n217 False False True ... \n233 False False False ... \n273 False False False ... \n270 False True False ... \n230 False False False ... \n346 False False False ... \n340 False False False ... \n231 False False False ... \n234 False True False ... \n229 False True False ... \n212 False False False ... \n287 False False False ... \n261 False False False ... \n327 False False False ... \n328 False False False ... \n447 False False True ... \n347 False True False ... \n257 False False False ... \n342 False False True ... \n274 False False True ... \n284 False False False ... \n232 False False False ... \n283 False False False ... \n238 False True False ... \n338 False False True ... \n226 False False False ... \n307 False False False ... \n.. ... ... ... ... \n469 False False False ... \n510 False False False ... \n484 False False False ... \n380 False False False ... \n379 False False False ... \n465 False False False ... \n466 False False False ... \n468 False False False ... \n502 False False False ... \n474 False False False ... \n476 False False False ... \n473 False False False ... \n470 False False False ... \n491 False False False ... \n472 False False False ... \n97 False False False ... \n471 False False False ... \n475 False False False ... \n169 False False False ... \n5 False False False ... \n493 False True False ... \n419 False False False ... \n381 False False False ... \n168 False False False ... \n172 False False False ... \n207 False False False ... \n208 False False False ... \n182 False False False ... \n159 False False False ... \n209 False False False ... \n\n postTitlePlaceholder read \\\n373 False \n228 False \n224 False \n217 發文前請選擇標題分類,提高文章曝光度喔!❤️ False \n233 False \n273 False \n270 False \n230 False \n346 False \n340 False \n231 False \n234 False \n229 False \n212 請善用搜尋功能,不要發表相同或類似文章。\\n\\n發文前請先仔細閱讀板規,若因違反板規而被刪文... False \n287 False \n261 False \n327 發文前看版規,發轉讓會籍、教練課相關貼文會永久禁言哦 False \n328 False \n447 ⚠️發文請記得選分類⚠️\\n❗️選了分類就不用再重複輸入❗️\\n#標的 #新聞 #分享 #請... False \n347 False \n257 False \n342 發文前,先選擇↑標題分類 False \n274 發文前請先瞭解版規規定喔 False \n284 False \n232 False \n283 False \n238 False \n338 請務必看完版規再發文\\n發文視同同意版規 False \n226 False \n307 False \n.. ... ... \n469 False \n510 False \n484 False \n380 False \n379 False \n465 False \n466 False \n468 False \n502 False \n474 False \n476 False \n473 False \n470 False \n491 False \n472 False \n97 False \n471 False \n475 False \n169 False \n5 False \n493 False \n419 False \n381 False \n168 False \n172 False \n207 False \n208 False \n182 False \n159 False \n209 False \n\n shouldCategorized shouldPostCategorized \\\n373 False False \n228 False False \n224 False False \n217 False False \n233 False False \n273 True False \n270 True False \n230 False False \n346 False False \n340 True False \n231 False False \n234 False False \n229 False False \n212 False False \n287 False False \n261 True False \n327 True False \n328 False False \n447 False False \n347 True False \n257 True False \n342 False False \n274 False False \n284 True False \n232 False False \n283 True False \n238 False False \n338 False False \n226 True False \n307 True False \n.. ... ... \n469 False False \n510 False False \n484 False False \n380 False False \n379 False False \n465 False False \n466 False False \n468 False False \n502 False False \n474 False False \n476 False False \n473 False False \n470 False False \n491 False False \n472 False False \n97 False False \n471 False False \n475 False False \n169 False False \n5 False False \n493 False False \n419 False False \n381 False False \n168 False False \n172 False False \n207 False False \n208 False False \n182 False False \n159 False False \n209 False False \n\n subcategories subscribed \\\n373 [創作, 知識, 圖文] False \n228 [曖昧, 閃光, 劈腿, 失戀, 分手, 告白] False \n224 [精選, 日常, 正式, 情侶, 鞋款] False \n217 [精選, 底妝, 眼妝, 唇彩, 保養, 情報] False \n233 [] False \n273 [精選, 食譜, 食記, 評比, 超商] False \n270 [占卜, 心理測驗, 白羊, 金牛, 雙子, 巨蟹, 獅子, 處女, 天秤, 天蠍, 射手,... False \n230 [醫療, 法律] False \n346 [新聞, 討論, 爆料, 社論] False \n340 [請益, 虛擬貨幣, 基金, 股票期貨, 保險, 匯率] False \n231 [] False \n234 [購物, 髮型, 心事] False \n229 [] False \n212 [] False \n287 [] False \n261 [精選, 協尋, 狗, 貓, 小動物, 爬蟲, 水族] False \n327 [精選] False \n328 [] False \n447 [] False \n347 [精選, 徵才, 經驗分享, 職業介紹, 勞工權益] False \n257 [] False \n342 [] False \n274 [] False \n284 [精選, 情報, 電影, 臺灣, 韓國, 歐美, 日本, 中國] False \n232 [] False \n283 [精選, 臺灣, 日韓, 亞洲, 歐美] False \n238 [心情, 議題] False \n338 [] False \n226 [教學, 發問, 集運, 心得] False \n307 [精選, 情報, 心得, 推坑, 同人, COS] False \n.. ... ... \n469 [] False \n510 [] False \n484 [] False \n380 [] False \n379 [] False \n465 [] False \n466 [] False \n468 [] False \n502 [] False \n474 [] False \n476 [] False \n473 [] False \n470 [] False \n491 [] False \n472 [] False \n97 [] False \n471 [] False \n475 [] False \n169 [] False \n5 [] False \n493 [] False \n419 [] False \n381 [] False \n168 [] False \n172 [] False \n207 [] False \n208 [] False \n182 [] False \n159 [課程評價] False \n209 [] False \n\n subscriptionCount titlePlaceholder \\\n373 609171 發文請記得在下一頁加入話題或其他相關分類喲! \n228 550783 發文記得加入「話題」分類喲! \n224 534417 發文記得加入「話題」分類喲! \n217 450324 發文請記得在下一步驟加入「相關話題」或其他相關分類喲! \n233 422567 \n273 383179 發文記得加入「話題」分類喲! \n270 347350 發文記得加入「話題」分類喲! \n230 330688 發文記得加入「話題」分類喲! \n346 326227 發文記得加入「話題」分類喲! \n340 298184 發文記得加入「話題」分類喲! \n231 283557 發文記得加入「話題」分類喲! \n234 269475 發文記得加入「話題」分類喲! \n229 263890 發文記得加入「話題」分類喲!! \n212 260715 \n287 256225 \n261 231298 發文記得加入「話題」分類喲! \n327 227955 發文記得加入「話題」分類喲! \n328 221896 \n447 209699 \n347 205870 發文記得加入「話題」分類喲! \n257 190291 發文記得加入「話題」分類喲! \n342 186877 \n274 182357 \n284 180189 請記得話題加入「電影名稱」或其他相關分類喲! \n232 179987 \n283 177008 發文記得加入「話題」分類喲! \n238 175839 發文記得加入「話題」分類喲! \n338 173547 \n226 169904 發文記得加入「話題」分類喲! \n307 161557 請記得在話題加入「作品名稱」或其他相關分類喲! \n.. ... ... \n469 139 \n510 132 \n484 120 \n380 110 \n379 109 \n465 105 \n466 102 \n468 100 \n502 92 \n474 86 \n476 84 \n473 78 \n470 70 \n491 67 \n472 58 \n97 56 \n471 50 \n475 43 \n169 23 \n5 17 \n493 10 \n419 2 \n381 2 \n168 1 \n172 0 \n207 0 \n208 0 \n182 0 \n159 0 \n209 0 \n\n topics \\\n373 [A片, 甲, Les, 無碼片, NTR, 內射, 自慰, 3P, 外流, 意淫自拍OL黑... \n228 [微西斯, 愛情, 閃光, 價值觀, 告白, 分手, 遠距離, 失戀, 曖昧, 做愛, 在一... \n224 [蝦皮, 耳環, 襯衫, 工裝, 後背包, 寬褲, 淘寶, 涼鞋, 洋裝, 情侶穿搭, 鞋子... \n217 [潔膚水, 防曬, 粉餅, 受受狼, 刷具, 遮瑕, 粉刺, 打亮, 眼影, 粉底, 眉筆,... \n233 [] \n273 [台中美食, 高雄美食, 台南美食, 台北美食, 新竹美食, 板橋美食, 全聯, 711, ... \n270 [心理測驗, 占卜, 雙魚, 射手, 天蠍, 雙子, 巨蟹, 白羊, 金牛, 水瓶, 獅子,... \n230 [網美媽媽, 廢墟探險, 畢旅, 童年回憶, 泰國浴, 租屋, 牙套, 法律, 困擾, 醫療] \n346 [校正回歸] \n340 [信用卡, 基金, 股票期貨, 虛擬貨幣, 匯率, 儲蓄險, 保險, 比特幣, 投資] \n231 [笑話, 梗圖, Wootalk, 愛莉莎莎, 黃金12猛漢, 撩妹, 微西斯, 貼圖, 網... \n234 [心事, 男友, 比基尼, 除毛, WhatsInMyBag, 內衣, 家人, 發胖, 桌布... \n229 [女大十八變, 租屋糾紛, 畢旅, 感動的事, 一句晚安, 想念你, 謝謝你, 靠北, 勵志... \n212 [] \n287 [Netflix, 影集, 美劇, 電影, 推薦, 觀後感] \n261 [領養代替購買, 米克斯, 貓, 狗, 柯基, 柴犬, 認養, 貓咪真的很可愛, 動物醫院,... \n327 [生酮飲食, 減脂, 乳清, 增肌, 健身器材, 健身房, 重訓, 臥推, 熱量, 啞鈴, ... \n328 [飲食, 運動, 勵志] \n447 [] \n347 [面試經驗, 2020聯合校徵, 面試心得, 面試小技巧, 履歷教學, 航空業, Askme... \n257 [WhatsInMyRoom, 居家佈置, 空間風格, 租屋, 室內香氛, 家具, 輕裝潢,... \n342 [優惠, 已兌換, 買一送一, 生日優惠, 折價券, 折扣碼] \n274 [料理, 提問, 廚具, 烹飪, 食譜, 小資料理] \n284 [影評, MARVEL系列, 迪士尼, DC系列, 觀後感, 電影院, 奧斯卡獎, 預告片,... \n232 [地獄梗, meme, 梗圖, 冷笑話] \n283 [畢旅, 自由行, 賞楓, 海外志工, 台灣秘境, 臥鋪火車, 獨旅, 飛機餐, 沙發衝浪,... \n238 [微西斯, 高馬尾和長直髮, PPL, 早知道系列, Les, 天菜老師, 總在夜半消失的室... \n338 [AppleLearn, AppleWork, Mac, iPad] \n226 [網購教學, 淘寶, 退貨, 蝦皮, 支付寶, 賣家, 集運運費, 官方集運, 私人集運, ... \n307 [蠟筆小新, 庫洛魔法使, 聲之形, 動漫展, 初音未來, Cosplay, 動漫周邊, 動... \n.. ... \n469 [] \n510 [] \n484 [] \n380 [] \n379 [] \n465 [] \n466 [] \n468 [] \n502 [] \n474 [] \n476 [] \n473 [] \n470 [] \n491 [] \n472 [] \n97 [] \n471 [] \n475 [] \n169 [] \n5 [] \n493 [] \n419 [] \n381 [] \n168 [] \n172 [] \n207 [減肥, 護膚, 打扮, 搽面, 化妝, 分享] \n208 [港聞, 正苦, 林鄭, 時事, 社會, 政治] \n182 [好玩, 港澳板, 生活, 日常] \n159 [] \n209 [電玩, 動漫節, Cosplay, 動漫, ACG, 遊戲] \n\n updatedAt \n373 2021-06-24T04:50:12.522Z \n228 2021-04-20T08:36:40.391Z \n224 2021-04-20T08:36:37.330Z \n217 2021-06-25T07:07:20.485Z \n233 2020-08-31T09:47:51.769Z \n273 2021-04-20T08:36:35.879Z \n270 2021-04-20T08:36:39.207Z \n230 2021-04-20T08:36:35.632Z \n346 2021-05-22T07:01:38.199Z \n340 2021-04-20T08:36:40.750Z \n231 2021-04-20T08:36:37.118Z \n234 2021-04-20T08:36:37.575Z \n229 2021-04-20T08:36:40.189Z \n212 2020-10-07T10:21:01.877Z \n287 2021-05-02T01:11:11.254Z \n261 2021-04-20T08:36:39.170Z \n327 2021-04-20T08:36:39.028Z \n328 2021-04-20T08:36:40.945Z \n447 2020-12-22T18:06:09.500Z \n347 2021-04-20T08:36:37.261Z \n257 2021-04-20T08:36:40.284Z \n342 2021-07-07T06:40:02.753Z \n274 2021-04-20T08:36:38.750Z \n284 2021-04-20T08:36:36.924Z \n232 2021-04-20T08:36:39.292Z \n283 2021-04-20T08:36:39.419Z \n238 2021-04-20T08:36:36.801Z \n338 2021-04-20T08:36:40.086Z \n226 2021-04-20T08:36:41.949Z \n307 2021-04-20T08:36:39.864Z \n.. ... \n469 2021-04-09T11:32:05.344Z \n510 2021-07-09T03:40:30.704Z \n484 2021-04-14T04:11:34.276Z \n380 2020-09-16T07:17:36.151Z \n379 2020-02-26T09:11:36.709Z \n465 2021-04-09T11:33:33.374Z \n466 2021-04-09T11:34:13.925Z \n468 2021-04-09T11:32:46.924Z \n502 2021-06-24T06:17:20.796Z \n474 2021-04-09T11:42:13.651Z \n476 2021-04-09T11:40:21.329Z \n473 2021-04-09T11:42:54.571Z \n470 2021-04-09T11:44:59.361Z \n491 2021-06-10T06:47:41.577Z \n472 2021-04-09T11:43:35.677Z \n97 2017-06-18T20:42:53.510Z \n471 2021-04-09T11:44:09.340Z \n475 2021-04-09T11:41:29.760Z \n169 2017-02-25T06:52:03.772Z \n5 2017-06-18T03:31:45.331Z \n493 2021-06-16T13:58:13.523Z \n419 2020-07-29T16:27:34.696Z \n381 2021-07-21T08:50:28.127Z \n168 2018-02-06T17:18:30.699Z \n172 2020-08-13T06:02:20.625Z \n207 2021-04-20T10:15:48.549Z \n208 2021-04-20T10:31:36.975Z \n182 2021-07-16T07:05:15.714Z \n159 2020-08-13T08:58:10.340Z \n209 2021-07-12T04:51:47.346Z \n\n[514 rows x 34 columns]\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ] ]
4aa438d26ea60798d4e88dcc4807cf7303940328
79,673
ipynb
Jupyter Notebook
0802_ResNet_Mel.ipynb
root4kaido/Cornell-Birdcall-Identification
186008965ad7e8797fc181a2836bb63aacb324e4
[ "MIT" ]
1
2020-11-21T12:03:07.000Z
2020-11-21T12:03:07.000Z
0802_ResNet_Mel.ipynb
root4kaido/Cornell-Birdcall-Identification
186008965ad7e8797fc181a2836bb63aacb324e4
[ "MIT" ]
null
null
null
0802_ResNet_Mel.ipynb
root4kaido/Cornell-Birdcall-Identification
186008965ad7e8797fc181a2836bb63aacb324e4
[ "MIT" ]
null
null
null
80.477778
45,080
0.76327
[ [ [ "!pip install torchvision==0.2.2\n!pip install https://download.pytorch.org/whl/cu100/torch-1.1.0-cp36-cp36m-linux_x86_64.whl\n!pip install typing\n!pip install opencv-python\n!pip install slackweb", "Defaulting to user installation because normal site-packages is not writeable\nRequirement already satisfied: torchvision==0.2.2 in /home/user/.local/lib/python3.6/site-packages (0.2.2)\nRequirement already satisfied: tqdm==4.19.9 in /home/user/.local/lib/python3.6/site-packages (from torchvision==0.2.2) (4.19.9)\nRequirement already satisfied: torch in /home/user/.local/lib/python3.6/site-packages (from torchvision==0.2.2) (1.1.0)\nRequirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from torchvision==0.2.2) (1.17.4)\nRequirement already satisfied: pillow>=4.1.1 in /usr/local/lib/python3.6/dist-packages (from torchvision==0.2.2) (7.1.2)\nRequirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from torchvision==0.2.2) (1.11.0)\n\u001b[33mWARNING: You are using pip version 20.1.1; however, version 20.2.1 is available.\nYou should consider upgrading via the '/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\nDefaulting to user installation because normal site-packages is not writeable\nRequirement already satisfied: torch==1.1.0 from https://download.pytorch.org/whl/cu100/torch-1.1.0-cp36-cp36m-linux_x86_64.whl in /home/user/.local/lib/python3.6/site-packages (1.1.0)\nRequirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from torch==1.1.0) (1.17.4)\n\u001b[33mWARNING: You are using pip version 20.1.1; however, version 20.2.1 is available.\nYou should consider upgrading via the '/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\nDefaulting to user installation because normal site-packages is not writeable\nRequirement already satisfied: typing in /home/user/.local/lib/python3.6/site-packages (3.7.4.3)\n\u001b[33mWARNING: You are using pip version 20.1.1; however, version 20.2.1 is available.\nYou should consider upgrading via the '/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\nDefaulting to user installation because normal site-packages is not writeable\nRequirement already satisfied: opencv-python in /home/user/.local/lib/python3.6/site-packages (4.3.0.36)\nRequirement already satisfied: numpy>=1.11.3 in /usr/local/lib/python3.6/dist-packages (from opencv-python) (1.17.4)\n\u001b[33mWARNING: You are using pip version 20.1.1; however, version 20.2.1 is available.\nYou should consider upgrading via the '/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\nDefaulting to user installation because normal site-packages is not writeable\nRequirement already satisfied: slackweb in /home/user/.local/lib/python3.6/site-packages (1.0.5)\n\u001b[33mWARNING: You are using pip version 20.1.1; however, version 20.2.1 is available.\nYou should consider upgrading via the '/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\n" ], [ "!pip list | grep torchvision\n!pip list | grep torch", "torchvision 0.2.2\n\u001b[33mWARNING: You are using pip version 20.1.1; however, version 20.2.1 is available.\nYou should consider upgrading via the '/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\ntorch 1.1.0\ntorchvision 0.2.2\n\u001b[33mWARNING: You are using pip version 20.1.1; however, version 20.2.1 is available.\nYou should consider upgrading via the '/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\n" ], [ "# import cv2\nimport audioread\nimport logging\nimport os\nimport random\nimport time\nimport warnings\nimport glob\nfrom tqdm import tqdm\n\nimport librosa\nimport numpy as np\nimport pandas as pd\nimport soundfile as sf\nimport torch\nimport torch.optim as optim\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.utils.data as data\n\nfrom contextlib import contextmanager\nfrom pathlib import Path\nfrom typing import Optional\n\nfrom fastprogress import progress_bar\nfrom sklearn.metrics import f1_score\nfrom sklearn.preprocessing import LabelEncoder\nfrom sklearn.model_selection import train_test_split\nfrom torchvision import models\n\nimport matplotlib.pyplot as plt\n\nimport slackweb", "_____no_output_____" ], [ "def set_seed(seed: int = 42):\n random.seed(seed)\n np.random.seed(seed)\n os.environ[\"PYTHONHASHSEED\"] = str(seed)\n torch.manual_seed(seed)\n torch.cuda.manual_seed(seed) # type: ignore\n torch.backends.cudnn.deterministic = True # type: ignore\n torch.backends.cudnn.benchmark = True # type: ignore\n \n \ndef get_logger(out_file=None):\n logger = logging.getLogger()\n formatter = logging.Formatter(\"%(asctime)s - %(levelname)s - %(message)s\")\n logger.handlers = []\n logger.setLevel(logging.INFO)\n\n handler = logging.StreamHandler()\n handler.setFormatter(formatter)\n handler.setLevel(logging.INFO)\n logger.addHandler(handler)\n\n if out_file is not None:\n fh = logging.FileHandler(out_file)\n fh.setFormatter(formatter)\n fh.setLevel(logging.INFO)\n logger.addHandler(fh)\n logger.info(\"logger set up\")\n return logger\n \n \n@contextmanager\ndef timer(name: str, logger: Optional[logging.Logger] = None):\n t0 = time.time()\n msg = f\"[{name}] start\"\n if logger is None:\n print(msg)\n else:\n logger.info(msg)\n yield\n\n msg = f\"[{name}] done in {time.time() - t0:.2f} s\"\n if logger is None:\n print(msg)\n else:\n logger.info(msg)", "_____no_output_____" ], [ "logger = get_logger(\"main.log\")\nset_seed(1213)", "2020-08-08 23:21:47,138 - INFO - logger set up\n" ], [ "DATA_PATH = '/home/knikaido/work/Cornell-Birdcall-Identification/data/birdsong_recognition/'\nTRAIN_PATH = DATA_PATH + 'train_audio/'\nMEL_PATH = '/home/knikaido/work/Cornell-Birdcall-Identification/data/feature/08_06_melspectrogram_small/'", "_____no_output_____" ], [ "\n", "_____no_output_____" ], [ "class ResNet(nn.Module):\n def __init__(self, base_model_name: str, pretrained=False,\n num_classes=264):\n super().__init__()\n base_model = models.__getattribute__(base_model_name)(\n pretrained=pretrained)\n layers = list(base_model.children())[:-2]\n layers.append(nn.AdaptiveMaxPool2d(1))\n self.encoder = nn.Sequential(*layers)\n\n in_features = base_model.fc.in_features\n\n self.classifier = nn.Sequential(\n nn.Linear(in_features, 1024), nn.ReLU(), nn.Dropout(p=0.2),\n nn.Linear(1024, 1024), nn.ReLU(), nn.Dropout(p=0.2),\n nn.Linear(1024, num_classes))\n\n def forward(self, x):\n batch_size = x.size(0)\n x = self.encoder(x).view(batch_size, -1)\n x = self.classifier(x)\n multiclass_proba = F.softmax(x, dim=1)\n multilabel_proba = torch.sigmoid(x)\n return {\n \"logits\": x,\n \"multiclass_proba\": multiclass_proba,\n \"multilabel_proba\": multilabel_proba\n }", "_____no_output_____" ], [ "model_config = {\n \"base_model_name\": \"resnet50\",\n \"pretrained\": False,\n \"num_classes\": 264\n}", "_____no_output_____" ], [ "BIRD_CODE = {\n 'aldfly': 0, 'ameavo': 1, 'amebit': 2, 'amecro': 3, 'amegfi': 4,\n 'amekes': 5, 'amepip': 6, 'amered': 7, 'amerob': 8, 'amewig': 9,\n 'amewoo': 10, 'amtspa': 11, 'annhum': 12, 'astfly': 13, 'baisan': 14,\n 'baleag': 15, 'balori': 16, 'banswa': 17, 'barswa': 18, 'bawwar': 19,\n 'belkin1': 20, 'belspa2': 21, 'bewwre': 22, 'bkbcuc': 23, 'bkbmag1': 24,\n 'bkbwar': 25, 'bkcchi': 26, 'bkchum': 27, 'bkhgro': 28, 'bkpwar': 29,\n 'bktspa': 30, 'blkpho': 31, 'blugrb1': 32, 'blujay': 33, 'bnhcow': 34,\n 'boboli': 35, 'bongul': 36, 'brdowl': 37, 'brebla': 38, 'brespa': 39,\n 'brncre': 40, 'brnthr': 41, 'brthum': 42, 'brwhaw': 43, 'btbwar': 44,\n 'btnwar': 45, 'btywar': 46, 'buffle': 47, 'buggna': 48, 'buhvir': 49,\n 'bulori': 50, 'bushti': 51, 'buwtea': 52, 'buwwar': 53, 'cacwre': 54,\n 'calgul': 55, 'calqua': 56, 'camwar': 57, 'cangoo': 58, 'canwar': 59,\n 'canwre': 60, 'carwre': 61, 'casfin': 62, 'caster1': 63, 'casvir': 64,\n 'cedwax': 65, 'chispa': 66, 'chiswi': 67, 'chswar': 68, 'chukar': 69,\n 'clanut': 70, 'cliswa': 71, 'comgol': 72, 'comgra': 73, 'comloo': 74,\n 'commer': 75, 'comnig': 76, 'comrav': 77, 'comred': 78, 'comter': 79,\n 'comyel': 80, 'coohaw': 81, 'coshum': 82, 'cowscj1': 83, 'daejun': 84,\n 'doccor': 85, 'dowwoo': 86, 'dusfly': 87, 'eargre': 88, 'easblu': 89,\n 'easkin': 90, 'easmea': 91, 'easpho': 92, 'eastow': 93, 'eawpew': 94,\n 'eucdov': 95, 'eursta': 96, 'evegro': 97, 'fiespa': 98, 'fiscro': 99,\n 'foxspa': 100, 'gadwal': 101, 'gcrfin': 102, 'gnttow': 103, 'gnwtea': 104,\n 'gockin': 105, 'gocspa': 106, 'goleag': 107, 'grbher3': 108, 'grcfly': 109,\n 'greegr': 110, 'greroa': 111, 'greyel': 112, 'grhowl': 113, 'grnher': 114,\n 'grtgra': 115, 'grycat': 116, 'gryfly': 117, 'haiwoo': 118, 'hamfly': 119,\n 'hergul': 120, 'herthr': 121, 'hoomer': 122, 'hoowar': 123, 'horgre': 124,\n 'horlar': 125, 'houfin': 126, 'houspa': 127, 'houwre': 128, 'indbun': 129,\n 'juntit1': 130, 'killde': 131, 'labwoo': 132, 'larspa': 133, 'lazbun': 134,\n 'leabit': 135, 'leafly': 136, 'leasan': 137, 'lecthr': 138, 'lesgol': 139,\n 'lesnig': 140, 'lesyel': 141, 'lewwoo': 142, 'linspa': 143, 'lobcur': 144,\n 'lobdow': 145, 'logshr': 146, 'lotduc': 147, 'louwat': 148, 'macwar': 149,\n 'magwar': 150, 'mallar3': 151, 'marwre': 152, 'merlin': 153, 'moublu': 154,\n 'mouchi': 155, 'moudov': 156, 'norcar': 157, 'norfli': 158, 'norhar2': 159,\n 'normoc': 160, 'norpar': 161, 'norpin': 162, 'norsho': 163, 'norwat': 164,\n 'nrwswa': 165, 'nutwoo': 166, 'olsfly': 167, 'orcwar': 168, 'osprey': 169,\n 'ovenbi1': 170, 'palwar': 171, 'pasfly': 172, 'pecsan': 173, 'perfal': 174,\n 'phaino': 175, 'pibgre': 176, 'pilwoo': 177, 'pingro': 178, 'pinjay': 179,\n 'pinsis': 180, 'pinwar': 181, 'plsvir': 182, 'prawar': 183, 'purfin': 184,\n 'pygnut': 185, 'rebmer': 186, 'rebnut': 187, 'rebsap': 188, 'rebwoo': 189,\n 'redcro': 190, 'redhea': 191, 'reevir1': 192, 'renpha': 193, 'reshaw': 194,\n 'rethaw': 195, 'rewbla': 196, 'ribgul': 197, 'rinduc': 198, 'robgro': 199,\n 'rocpig': 200, 'rocwre': 201, 'rthhum': 202, 'ruckin': 203, 'rudduc': 204,\n 'rufgro': 205, 'rufhum': 206, 'rusbla': 207, 'sagspa1': 208, 'sagthr': 209,\n 'savspa': 210, 'saypho': 211, 'scatan': 212, 'scoori': 213, 'semplo': 214,\n 'semsan': 215, 'sheowl': 216, 'shshaw': 217, 'snobun': 218, 'snogoo': 219,\n 'solsan': 220, 'sonspa': 221, 'sora': 222, 'sposan': 223, 'spotow': 224,\n 'stejay': 225, 'swahaw': 226, 'swaspa': 227, 'swathr': 228, 'treswa': 229,\n 'truswa': 230, 'tuftit': 231, 'tunswa': 232, 'veery': 233, 'vesspa': 234,\n 'vigswa': 235, 'warvir': 236, 'wesblu': 237, 'wesgre': 238, 'weskin': 239,\n 'wesmea': 240, 'wessan': 241, 'westan': 242, 'wewpew': 243, 'whbnut': 244,\n 'whcspa': 245, 'whfibi': 246, 'whtspa': 247, 'whtswi': 248, 'wilfly': 249,\n 'wilsni1': 250, 'wiltur': 251, 'winwre3': 252, 'wlswar': 253, 'wooduc': 254,\n 'wooscj2': 255, 'woothr': 256, 'y00475': 257, 'yebfly': 258, 'yebsap': 259,\n 'yehbla': 260, 'yelwar': 261, 'yerwar': 262, 'yetvir': 263\n}\n\nINV_BIRD_CODE = {v: k for k, v in BIRD_CODE.items()}", "_____no_output_____" ], [ "train_path = DATA_PATH + 'train.csv'\ntrain = pd.read_csv(train_path)\n\nle = LabelEncoder()\nencoded = le.fit_transform(train['channels'].values)\ndecoded = le.inverse_transform(encoded)\ntrain['channels'] = encoded\n\nfor i in tqdm(range(len(train))):\n train['ebird_code'][i] = BIRD_CODE[train['ebird_code'][i]]\n \ntrain['filename'] = train['filename'].str.replace(\".mp3\", \"\")\n \ntrain.head()", " 0%| | 0/21375 [00:00<?, ?it/s]/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:10: SettingWithCopyWarning: \nA value is trying to be set on a copy of a slice from a DataFrame\n\nSee the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n # Remove the CWD from sys.path while we load stuff.\n100%|██████████| 21375/21375 [00:02<00:00, 9086.35it/s]\n" ], [ "mel_list = sorted(glob.glob(MEL_PATH + '*.npy'))\nmel_list = pd.Series(mel_list)\nlen(mel_list)", "_____no_output_____" ], [ "import joblib\ntarget_list = joblib.load(MEL_PATH+'target_list.pkl')\nfor i in tqdm(range(len(target_list))):\n target_list[i] = BIRD_CODE[target_list[i]]\nlen(target_list)", "100%|██████████| 455060/455060 [00:00<00:00, 2799960.07it/s]\n" ], [ "X_train_mel, X_valid_mel, target_train, taret_valid = train_test_split(mel_list, target_list, test_size=0.2, stratify=target_list)", "_____no_output_____" ], [ "class TrainDateset(torch.utils.data.Dataset):\n def __init__(self, mel_list, train, transform=None):\n self.transform = transform\n self.mel_list = mel_list\n self.data_num = len(mel_list)\n\n def __len__(self):\n return self.data_num\n\n def __getitem__(self, idx):\n if self.transform:\n pass\n# out_data = self.transform(self.data)[0][idx]\n# out_label = self.label[idx]\n else:\n# print(idx)\n out_data = np.array(np.load(mel_list[idx]))\n out_mel_list = mel_list[idx]\n out_label = target_list[idx]\n \n# out_label = self.label[idx]\n\n return out_data, out_label", "_____no_output_____" ], [ "train_dataset = TrainDateset(X_train_mel, target_train)\ntrain_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=128, shuffle=True)\n\nvalid_dataset = TrainDateset(X_valid_mel, taret_valid)\nvalid_dataloader = torch.utils.data.DataLoader(valid_dataset, batch_size=128, shuffle=True)", "_____no_output_____" ], [ "\n", "_____no_output_____" ], [ "\nWEIGHT_DECAY = 0.005\nLEARNING_RATE = 0.0001\nEPOCH = 100", "_____no_output_____" ], [ "device = 'cuda' if torch.cuda.is_available() else 'cpu'\nprint(torch.cuda.is_available())", "True\n" ], [ "net = ResNet('resnet50')\nnet = net.to(device)\ncriterion = nn.CrossEntropyLoss()\noptimizer = optim.Adam(net.parameters(), lr=LEARNING_RATE, weight_decay=WEIGHT_DECAY)", "_____no_output_____" ], [ "%%time\ntrain_losses = []\nvalid_losses = []\n\nfor epoch in tqdm(range(EPOCH)): # loop over the dataset multiple times\n\n train_loss = 0.0\n valid_loss = 0.0\n net.train()\n \n for i, data in enumerate(train_dataloader):\n # 第二引数は,スタート位置で,0なのでenumerate(trainloader)と同じ\n # https://docs.python.org/3/library/functions.html#enumerate\n\n # get the inputs\n inputs, labels = data\n inputs = inputs.to(device)\n labels = labels.to(device)\n\n # wrap them in Variable\n# inputs, labels = Variable(inputs), Variable(labels)\n\n # zero the parameter gradients\n optimizer.zero_grad()\n\n # forward + backward + optimize\n outputs = net(inputs)\n loss = criterion(outputs['logits'], labels)\n loss.backward()\n optimizer.step()\n\n # print statistics\n# running_loss += loss.data[0]\n train_loss += loss.to('cpu').detach().numpy().copy()\n print('[%d, %5d] train loss: %.3f' %\n (epoch + 1, i + 1, train_loss / (i+1)))\n train_losses.append(train_loss / (i+1))\n\n net.eval()\n \n for i, data in enumerate(valid_dataloader):\n # 第二引数は,スタート位置で,0なのでenumerate(trainloader)と同じ\n # https://docs.python.org/3/library/functions.html#enumerate\n\n # get the inputs\n inputs, labels = data\n inputs = inputs.to(device)\n labels = labels.to(device)\n\n # wrap them in Variable\n# inputs, labels = Variable(inputs), Variable(labels)\n\n # zero the parameter gradients\n optimizer.zero_grad()\n\n # forward + backward + optimize\n outputs = net(inputs)\n loss = criterion(outputs['logits'], labels)\n\n # print statistics\n# running_loss += loss.data[0]\n valid_loss += loss.to('cpu').detach().numpy().copy()\n print('[%d, %5d] valid loss: %.3f' %\n (epoch + 1, i + 1, valid_loss / (i+1)))\n valid_losses.append(valid_loss / (i+1))\n \n# break", " 0%| | 0/100 [00:00<?, ?it/s]" ] ], [ [ "https://qiita.com/derodero24/items/f4cc46f144f404054501", "_____no_output_____" ] ], [ [ "import cloudpickle", "_____no_output_____" ], [ "with open('model.pkl', 'wb') as f:\n cloudpickle.dump(net, f)", "_____no_output_____" ], [ "slack = slackweb.Slack(url=\"https://hooks.slack.com/services/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\")\nslack.notify(text=\"おわた\")", "_____no_output_____" ], [ "with open('model.pkl', 'rb') as f:\n net = cloudpickle.load(f)", "_____no_output_____" ] ], [ [ "## plot loss", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(16,5), dpi= 80)\nplt.plot(train_losses, color='tab:red', label='valid')\nplt.plot(valid_losses, color='tab:blue', label='train')\nplt.legend()", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
4aa439f2c4141e7019907dffe187d285874d51d7
201,477
ipynb
Jupyter Notebook
Course - 1: Introduction to Data Science in Python/resources/week-3/PandasIdioms_ed.ipynb
Skharwa1/Applied-Data-Science-with-Python-Specialization
c1778f2f265b75613718f4339c1527f9b42c96b0
[ "MIT" ]
9
2021-10-03T08:17:06.000Z
2022-03-24T10:41:10.000Z
Course - 1: Introduction to Data Science in Python/resources/week-3/PandasIdioms_ed.ipynb
Skharwa1/Applied-Data-Science-with-Python-Specialization
c1778f2f265b75613718f4339c1527f9b42c96b0
[ "MIT" ]
null
null
null
Course - 1: Introduction to Data Science in Python/resources/week-3/PandasIdioms_ed.ipynb
Skharwa1/Applied-Data-Science-with-Python-Specialization
c1778f2f265b75613718f4339c1527f9b42c96b0
[ "MIT" ]
22
2021-02-10T06:01:00.000Z
2022-03-21T11:50:42.000Z
35.748226
119
0.328276
[ [ [ "Python programmers will often suggest that there many ways the language can be used to solve a particular \nproblem. But that some are more appropriate than others. The best solutions are celebrated as Idiomatic\nPython and there are lots of great examples of this on StackOverflow and other websites.\n\nA sort of sub-language within Python, Pandas has its own set of idioms. We've alluded to some of these \nalready, such as using vectorization whenever possible, and not using iterative loops if you don't need to. \nSeveral developers and users within the Panda's community have used the term __pandorable__ for these\nidioms. I think it's a great term. So, I wanted to share with you a couple of key features of how you can\nmake your code pandorable.", "_____no_output_____" ] ], [ [ "# Let's start by bringing in our data processing libraries\nimport pandas as pd\nimport numpy as np\n# And we'll bring in some timing functionality too, from the timeit module\nimport timeit\n\n# And lets look at some census data from the US\ndf = pd.read_csv('datasets/census.csv')\ndf.head()", "_____no_output_____" ], [ "# The first of the pandas idioms I would like to talk about is called method chaining. The general idea behind\n# method chaining is that every method on an object returns a reference to that object. The beauty of this is\n# that you can condense many different operations on a DataFrame, for instance, into one line or at least one\n# statement of code.\n\n# Here's the pandorable way to write code with method chaining. In this code I'm going to pull out the state\n# and city names as a multiple index, and I'm going to do so only for data which has a summary level of 50,\n# which in this dataset is county-level data. I'll rename a column too, just to make it a bit more readable.\n(df.where(df['SUMLEV']==50)\n .dropna()\n .set_index(['STNAME','CTYNAME'])\n .rename(columns={'ESTIMATESBASE2010': 'Estimates Base 2010'}))", "_____no_output_____" ], [ "# Lets walk through this. First, we use the where() function on the dataframe and pass in a boolean mask which\n# is only true for those rows where the SUMLEV is equal to 50. This indicates in our source data that the data\n# is summarized at the county level. With the result of the where() function evaluated, we drop missing\n# values. Remember that .where() doesn't drop missing values by default. Then we set an index on the result of\n# that. In this case I've set it to the state name followed by the county name. Finally. I rename a column to\n# make it more readable. Note that instead of writing this all on one line, as I could have done, I began the\n# statement with a parenthesis, which tells python I'm going to span the statement over multiple lines for\n# readability.", "_____no_output_____" ], [ "# Here's a more traditional, non-pandorable way, of writing this. There's nothing wrong with this code in the\n# functional sense, you might even be able to understand it better as a new person to the language. It's just\n# not as pandorable as the first example.\n\n# First create a new dataframe from the original\ndf = df[df['SUMLEV']==50] # I'll use the overloaded indexing operator [] which drops nans\n# Update the dataframe to have a new index, we use inplace=True to do this in place\ndf.set_index(['STNAME','CTYNAME'], inplace=True)\n# Set the column names\ndf.rename(columns={'ESTIMATESBASE2010': 'Estimates Base 2010'})", "_____no_output_____" ], [ "# Now, the key with any good idiom is to understand when it isn't helping you. In this case, you can actually\n# time both methods and see which one runs faster\n\n# We can put the approach into a function and pass the function into the timeit function to count the time the\n# parameter number allows us to choose how many times we want to run the function. Here we will just set it to\n# 10\n\n# Lets write a wrapper for our first function\ndef first_approach():\n global df\n # And we'll just paste our code right here\n return (df.where(df['SUMLEV']==50)\n .dropna()\n .set_index(['STNAME','CTYNAME'])\n .rename(columns={'ESTIMATESBASE2010': 'Estimates Base 2010'}))\n\n# Read in our dataset anew\ndf = pd.read_csv('datasets/census.csv')\n\n# And now lets run it\ntimeit.timeit(first_approach, number=10)", "_____no_output_____" ], [ "# Now let's test the second approach. As you may notice, we use our global variable df in the function.\n# However, changing a global variable inside a function will modify the variable even in a global scope and we\n# do not want that to happen in this case. Therefore, for selecting summary levels of 50 only, I create a new\n# dataframe for those records\n\n# Let's run this for once and see how fast it is\ndef second_approach():\n global df\n new_df = df[df['SUMLEV']==50]\n new_df.set_index(['STNAME','CTYNAME'], inplace=True)\n return new_df.rename(columns={'ESTIMATESBASE2010': 'Estimates Base 2010'})\n\n# Read in our dataset anew\ndf = pd.read_csv('datasets/census.csv')\n\n# And now lets run it\ntimeit.timeit(second_approach, number=10)", "_____no_output_____" ], [ "# As you can see, the second approach is much faster! So, this is a particular example of a classic time\n# readability trade off.\n\n# You'll see lots of examples on stack overflow and in documentation of people using method chaining in their\n# pandas. And so, I think being able to read and understand the syntax is really worth your time. But keep in\n# mind that following what appears to be stylistic idioms might have performance issues that you need to\n# consider as well.", "_____no_output_____" ], [ "# Here's another pandas idiom. Python has a wonderful function called map, which is sort of a basis for\n# functional programming in the language. When you want to use map in Python, you pass it some function you\n# want called, and some iterable, like a list, that you want the function to be applied to. The results are\n# that the function is called against each item in the list, and there's a resulting list of all of the\n# evaluations of that function.\n\n# Pandas has a similar function called applymap. In applymap, you provide some function which should operate\n# on each cell of a DataFrame, and the return set is itself a DataFrame. Now I think applymap is fine, but I\n# actually rarely use it. Instead, I find myself often wanting to map across all of the rows in a DataFrame.\n# And pandas has a function that I use heavily there, called apply. Let's look at an example.", "_____no_output_____" ], [ "# Let's take a look at our census DataFrame. In this DataFrame, we have five columns for population estimates,\n# with each column corresponding with one year of estimates. It's quite reasonable to want to create some new\n# columns for minimum or maximum values, and the apply function is an easy way to do this.\n\n# First, we need to write a function which takes in a particular row of data, finds a minimum and maximum\n# values, and returns a new row of data nd returns a new row of data. We'll call this function min_max, this\n# is pretty straight forward. We can create some small slice of a row by projecting the population columns.\n# Then use the NumPy min and max functions, and create a new series with a label values represent the new\n# values we want to apply.\n\ndef min_max(row):\n data = row[['POPESTIMATE2010',\n 'POPESTIMATE2011',\n 'POPESTIMATE2012',\n 'POPESTIMATE2013',\n 'POPESTIMATE2014',\n 'POPESTIMATE2015']]\n return pd.Series({'min': np.min(data), 'max': np.max(data)})", "_____no_output_____" ], [ "# Then we just need to call apply on the DataFrame.\n\n# Apply takes the function and the axis on which to operate as parameters. Now, we have to be a bit careful,\n# we've talked about axis zero being the rows of the DataFrame in the past. But this parameter is really the\n# parameter of the index to use. So, to apply across all rows, which is applying on all columns, you pass axis\n# equal to 'columns'.\ndf.apply(min_max, axis='columns').head()", "_____no_output_____" ], [ "# Of course there's no need to limit yourself to returning a new series object. If you're doing this as part\n# of data cleaning your likely to find yourself wanting to add new data to the existing DataFrame. In that\n# case you just take the row values and add in new columns indicating the max and minimum scores. This is a\n# regular part of my workflow when bringing in data and building summary or descriptive statistics, and is\n# often used heavily with the merging of DataFrames.", "_____no_output_____" ], [ "# Here's an example where we have a revised version of the function min_max Instead of returning a separate\n# series to display the min and max we add two new columns in the original dataframe to store min and max\n\ndef min_max(row):\n data = row[['POPESTIMATE2010',\n 'POPESTIMATE2011',\n 'POPESTIMATE2012',\n 'POPESTIMATE2013',\n 'POPESTIMATE2014',\n 'POPESTIMATE2015']]\n # Create a new entry for max\n row['max'] = np.max(data)\n # Create a new entry for min\n row['min'] = np.min(data)\n return row\n# Now just apply the function across the dataframe\ndf.apply(min_max, axis='columns')", "_____no_output_____" ], [ "# Apply is an extremely important tool in your toolkit. The reason I introduced apply here is because you\n# rarely see it used with large function definitions, like we did. Instead, you typically see it used with\n# lambdas. To get the most of the discussions you'll see online, you're going to need to know how to at least\n# read lambdas.\n\n# Here's You can imagine how you might chain several apply calls with lambdas together to create a readable\n# yet succinct data manipulation script. One line example of how you might calculate the max of the columns\n# using the apply function.\nrows = ['POPESTIMATE2010', 'POPESTIMATE2011', 'POPESTIMATE2012', 'POPESTIMATE2013','POPESTIMATE2014', \n 'POPESTIMATE2015']\n# Now we'll just apply this across the dataframe with a lambda\ndf.apply(lambda x: np.max(x[rows]), axis=1).head()", "_____no_output_____" ], [ "# If you don't remember lambdas just pause the video for a moment and look up the syntax. A lambda is just an\n# unnamed function in python, in this case it takes a single parameter, x, and returns a single value, in this\n# case the maximum over all columns associated with row x.", "_____no_output_____" ], [ "# The beauty of the apply function is that it allows flexibility in doing whatever manipulation that you\n# desire, as the function you pass into apply can be any customized however you want. Let's say we want to\n# divide the states into four categories: Northeast, Midwest, South, and West We can write a customized\n# function that returns the region based on the state the state regions information is obtained from Wikipedia\n\ndef get_state_region(x):\n northeast = ['Connecticut', 'Maine', 'Massachusetts', 'New Hampshire', \n 'Rhode Island','Vermont','New York','New Jersey','Pennsylvania']\n midwest = ['Illinois','Indiana','Michigan','Ohio','Wisconsin','Iowa',\n 'Kansas','Minnesota','Missouri','Nebraska','North Dakota',\n 'South Dakota']\n south = ['Delaware','Florida','Georgia','Maryland','North Carolina',\n 'South Carolina','Virginia','District of Columbia','West Virginia',\n 'Alabama','Kentucky','Mississippi','Tennessee','Arkansas',\n 'Louisiana','Oklahoma','Texas']\n west = ['Arizona','Colorado','Idaho','Montana','Nevada','New Mexico','Utah',\n 'Wyoming','Alaska','California','Hawaii','Oregon','Washington']\n \n if x in northeast:\n return \"Northeast\"\n elif x in midwest:\n return \"Midwest\"\n elif x in south:\n return \"South\"\n else:\n return \"West\"", "_____no_output_____" ], [ "# Now we have the customized function, let's say we want to create a new column called Region, which shows the\n# state's region, we can use the customized function and the apply function to do so. The customized function\n# is supposed to work on the state name column STNAME. So we will set the apply function on the state name\n# column and pass the customized function into the apply function\ndf['state_region'] = df['STNAME'].apply(lambda x: get_state_region(x))", "_____no_output_____" ], [ "# Now let's see the results\ndf[['STNAME','state_region']].head()", "_____no_output_____" ] ], [ [ "So there are a couple of Pandas idioms. But I think there's many more, and I haven't talked about them here.\nSo here's an unofficial assignment for you. Go look at some of the top ranked questions on pandas on Stack \nOverflow, and look at how some of the more experienced authors, answer those questions. Do you see any \ninteresting patterns? Feel free to share them with myself and others in the class.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
4aa43ae939a747285f91e2c2b7067b6616b432ad
16,607
ipynb
Jupyter Notebook
NLTK/Yumin/analysis.ipynb
VincentCheng34/StudyOnPython
b3f905b2e77f6ccfdce675bb2596e6ac708859a7
[ "MIT" ]
1
2019-05-01T06:29:14.000Z
2019-05-01T06:29:14.000Z
NLTK/Yumin/analysis.ipynb
VincentCheng34/StudyOnPython
b3f905b2e77f6ccfdce675bb2596e6ac708859a7
[ "MIT" ]
null
null
null
NLTK/Yumin/analysis.ipynb
VincentCheng34/StudyOnPython
b3f905b2e77f6ccfdce675bb2596e6ac708859a7
[ "MIT" ]
null
null
null
26.152756
99
0.37713
[ [ [ "# 讀取字典", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nimport os", "_____no_output_____" ], [ "filepath = '/Volumes/backup_128G/z_repository/Yumin_data/玉敏_俄羅斯課本的研究'\n\nfile_dic = '華語八千詞(內含注音字型檔)/Chinese_8000W_20190515_v1.xlsx'\nbook_file = '實用漢語教科書2010_生詞表.xlsx'\nto_file = 'processed/chinese_8000Words_results.xlsx'\n\n# write_level_doc = '{0}/{1}'.format(filepath, to_level_doc)\nread_dic = '{0}/{1}'.format(filepath, file_dic)\nread_book = '{0}/{1}'.format(filepath, book_file)\nwrite_file = '{0}/{1}'.format(filepath, to_file)", "_____no_output_____" ], [ "dicDf = pd.DataFrame()\n\nwith pd.ExcelFile(read_dic) as reader:\n # read sheet by sheet\n for sheet in reader.sheet_names:\n# print(sheet)\n sheetDf = pd.read_excel(reader, sheet, header=None)\n sheetDf = sheetDf.fillna(0)\n\n dicDf = dicDf.append(sheetDf, ignore_index=True)\n\n# change to lowercase\nlen(dicDf.index)", "_____no_output_____" ], [ "dicDf.head()", "_____no_output_____" ], [ "dicList = {}\nfor idx in range(0, len(dicDf)):\n row = dicDf.loc[idx]\n \n dicWord = row[0]\n dicLevel = row[1]\n\n if dicWord not in dicList:\n dicList[dicWord] = [dicLevel]\n else:\n# print(dicWord, dicLevel)\n dicList[dicWord].append(dicLevel)\n# dicList", "_____no_output_____" ] ], [ [ "# 讀取待分析檔", "_____no_output_____" ] ], [ [ "bookDf = pd.read_excel(read_book)", "_____no_output_____" ], [ "bookDf.head()", "_____no_output_____" ], [ "wordDifferentLevel = []\n\ndef wordLevel(word):\n foundLevel = 9\n if word in dicList:\n foundLevel = dicList[word][0]\n \n return foundLevel", "_____no_output_____" ], [ "levelList = []\n\nfor idx in range(0, len(bookDf)):\n row = bookDf.loc[idx]\n \n chapter = row[0]\n wtype = row[1]\n word = row[3]\n \n level = wordLevel(word)\n levelList.append([word, level, wtype, chapter])\n# print(chapter, wtype, word)", "_____no_output_____" ], [ "levelDf = pd.DataFrame(levelList)\nlevelDf = levelDf.sort_values(by=[1, 3, 2, 0])\nlevelDf.head()", "_____no_output_____" ], [ "# levelDf.loc[levelDf[2] == 'A']", "_____no_output_____" ], [ "# levelDf.loc[levelDf[2] == 'B']", "_____no_output_____" ], [ "levelDf[~levelDf[2].isin(['A', 'B'])]", "_____no_output_____" ], [ "def statsLevel(INdf):\n levelCountList = []\n for level in range(1, 10):\n levelCount = INdf[1].loc[INdf[1] == level].count()\n levelCountList.append(levelCount)\n\n levelCountDf = pd.DataFrame(levelCountList)\n return levelCountDf", "_____no_output_____" ], [ "def statsLessonLevel(INdf):\n levels = list(range(1, 10))\n statDf = pd.DataFrame(levels)\n \n lessons = INdf[3].unique()\n lessons = np.sort(lessons)\n for lesson in lessons:\n lessonDf = INdf.loc[INdf[3] == lesson]\n statDf[lesson] = statsLevel(lessonDf)\n\n return statDf", "_____no_output_____" ], [ "headers = ['Word', 'Level', 'A/B', 'Lesson']\nwith pd.ExcelWriter(write_file) as writer:\n # 1.列出每一個詞的等級\n levelDf.to_excel(writer, 'All', index=False, header=headers)\n\n # 2.統計每一個等級共有多少字\n levels = list(range(1, 10))\n levelCountDf = pd.DataFrame(levels)\n ## A.主要詞彙的統計\n major = levelDf.loc[levelDf[2] == 'A']\n levelCountDf['A'] = statsLevel(major)\n ## B.補充詞彙的統計\n minor = levelDf.loc[levelDf[2] == 'B']\n levelCountDf['B'] = statsLevel(minor)\n ## C.主要詞彙+補充詞彙的統計\n levelCountDf['A/B'] = statsLevel(levelDf)\n \n levelCountDf.to_excel(writer, 'Stats', index=False, header=['Level', 'A', 'B', 'A/B'])\n\n # 3.統計每一個等級共有多少字 by lesson\n lessonDf = statsLessonLevel(levelDf)\n lessonDf.T.to_excel(writer, 'lessons', header=False)\n\n # 4.列出不在8000詞的生詞有哪些\n wordsNotIn = levelDf.loc[levelDf[1] == 9]\n wordsNotInDf = pd.DataFrame(wordsNotIn)\n \n wordsNotInDf.to_excel(writer, 'WordsNotIn', index=False, header=headers)\n\n writer.save()", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa43b30509f0abb82d523260b3e745370074d1a
43,028
ipynb
Jupyter Notebook
Notebooks/Great Pyramid.ipynb
hayasam/MachineLearning
282f80330e0732b4aac2bccb251cb1c78c7f045c
[ "MIT" ]
1
2020-12-22T07:42:15.000Z
2020-12-22T07:42:15.000Z
Notebooks/Great Pyramid.ipynb
hayasam/MachineLearning
282f80330e0732b4aac2bccb251cb1c78c7f045c
[ "MIT" ]
null
null
null
Notebooks/Great Pyramid.ipynb
hayasam/MachineLearning
282f80330e0732b4aac2bccb251cb1c78c7f045c
[ "MIT" ]
null
null
null
68.8448
20,180
0.773287
[ [ [ "# The Great Pyramid\n\nThis is an estimate of the number of people needed to raise stones to the top of the [great pyramid](https://en.wikipedia.org/wiki/Great_Pyramid_of_Giza) using basic physics, such as force, energy, and power. It relies solely on reasonable estimates of known dimensions of the great pyramid and typical human labor capacity. The analysis will show that it is possible for crews of workers to raise 2.5 ton limestones to almost any level using ropes alone. Each crew would stand on an unfinished level and pull wooden sleds carrying stones up the 51.86 degree incline of the pyramid. This solution does not require ramps, pulleys, levers or any other mechanical advantage. It only requires coordination, rope, and well fed crews. If a crew tires after raising a set of stones, they could be quickly replaced by another well rested crew. The analysis will estimate the minimum crew size, number of crews required, the rate at which stones can be raised, and the maneuvering area available at each level.\n\nThe dimensions of the great pyramid are shown below:\n\n![Pyramid](data/Khufu.png)\n\n| Parameter | Value |\n| ----- | ----:|\n| Total number of stones| 2.5 million |\n| Average mass of each stone | 2.5 tons |\n| Total build time | 20 years |\n| Power available per worker | 200 Calories/day |\n| Active build time | 3 months/year |\n| Pyramid slope | 51.86 degrees |\n| Pyramid height | 146.5 meters |\n| Pyramid base | 230 m |\n| Coefficient of friction | 0.3 |\n| Number of layers | 210 |\n\n| Course | Height | Amount of Material |\n| ------ |:------:| ------------------:|\n| 1 | 15m | 30% |\n| 2 | 30m | 22% |\n| 3 | 50m | 30% |\n| 4 | 100m | 15% |\n| 5 | 146m | 3% |", "_____no_output_____" ] ], [ [ "from math import *\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\n# All values are in SI (MKS) units\n\nlbm_per_kg = 2.20462\nnewtons_per_lbf = 4.44822\njoules_per_kcal = 4184\nsec_per_day = 24 * 3600\nwatts_per_hp = 746\n\n# Total number of stones\nN_s = 2.5e6\n\n# Mass of one stone in kg\nm_s = 2.5 * 2000 / lbm_per_kg\n\n# Total build time in seconds\nT_b = 20 * 365.25 * sec_per_day\n\n# Average available power per crew member in kilocalories (nutrition calorie)\nP_w_kcal = 200\n\n# Average available power on crew member in Watts\nP_w = P_w_kcal * joules_per_kcal / sec_per_day\n\n# Pyramid slope in radians\ntheta = 51.86*pi/180\n\n# Pyramid base length in meters\nl_b = 230\n\n# Coefficient of friction between limestone and wood sleds\nmu = 0.3\n\n# Acceleration of gravity in m/2^s\ng = 9.81\n\n# Number of layers\nN_l = 210\n\n# Height of pyramid in meters\nh_max = 146.5", "_____no_output_____" ] ], [ [ "# Pulling Force\n\nIt is possible for a crew of men to stand on top of one flat level and simply pull a single stone up the side of a pyramid covered with smooth casing stones. It is expected that smooth casing stones were added at the same time each layer of rough blocks were added, which is very likely. This simple approach does not require large ramps, elaborate machines, deep knowledge, or alien intervention. It just requires many crews of workers pulling on ropes attached to rough stones. Of course, a number of additional crews are needed to place stones and align stones properly, but the solutions to those problems are well documented.\n\nThis analysis focuses solely on the rigging problem of raising stones to the proper level just prior to final placement.\n\nThe [force required](https://en.wikipedia.org/wiki/Inclined_plane) to pull one stone up the side of the pyramid is\n\n$$ F_p = m_s g (sin \\theta + \\mu cos \\theta)$$\n\nWhere $m_s$ is the mass of one stone, $g$ is acceleration of gravity, $\\theta$ is the pyramid slope, and $\\mu$ is the coefficient of friction.\n\nGiven the parameters above, the pulling force is", "_____no_output_____" ] ], [ [ "F_p = m_s * g * (sin(theta) + mu*cos(theta))\nprint('%.4f N' % F_p)\nprint('%.4f lbf' % (F_p / newtons_per_lbf) )", "21620.8639 N\n4860.5653 lbf\n" ] ], [ [ "$$ F_p \\approx 21620 N $$\n\nor\n\n$$ F_p \\approx 4860 lbf $$\n\nThis is slightly less than the 5000 lb weight of each stone, which is due to the slope of incline and static friction. Dynamic friction is perhaps lower, so the actual pulling force while in motion may be less.", "_____no_output_____" ], [ "# Energy to Raise Stones\n\nEnergy is force times distance moved. The distance along the slope up to a height $h$ is\n\n$$ d = \\frac{h}{sin \\theta} $$\n\nGiven the force derived earlier, energy required to raise a single stone to a height $h$ is\n\n$$ E_s = \\frac{F_p h}{sin \\theta} $$\n\nFor all stones the total energy is\n\n$$ E_t = \\frac{F_p}{sin \\theta} \\sum_{i=1}^{m} h N_{blocks} $$\n\nAn approximate estimate for comparison is:\n\n$$ E_t = \\frac{F_p N_s}{sin \\theta} (15m \\times 0.3 + 30m \\times 0.22 + 50m \\times 0.3 + 100m \\times 0.15 + 146m \\times 0.03) $$\n\nThe total energy is estimate in two steps:\n * Compute the total volume to get average block volume\n * Compute energy per layer given average block volume\n\nThe iterative computation will be compared with the approximate estimate.\n\nThe total energy is", "_____no_output_____" ] ], [ [ "dh = h_max / N_l\ntotal_volume = 0\nh = 0\ntan_theta2 = tan(theta)**2\nfor i in range(N_l):\n th = (h_max - h)**2\n A_f = 4 * th / tan_theta2\n total_volume += dh * A_f\n h += dh\nprint('Total volume: %.3e m^3' % total_volume)\nblock_volume = total_volume/N_s\nprint('Block volume: %.3e m^3' % block_volume)\n\nE_t = 0\nh = 0\nfor i in range(N_l):\n th = (h_max - h)**2\n A_f = 4 * th / tan_theta2\n num_blocks = dh * A_f / block_volume\n E_t += F_p * num_blocks * h / sin(theta)\n h += dh\n \nprint('Total energy: %.2e Joules' % E_t)\nprint('Total energy: %.2e kcal' % (E_t/joules_per_kcal))\n\nE_t_approx = F_p * N_s * (15*0.3 + 30*0.22 + 50*0.3 + 100*0.15 + 146*0.03) / sin(theta)\nprint('Approximate: %.2e Joules' % E_t_approx)\nprint('Approximate: %.2e kcal' % (E_t_approx/joules_per_kcal))", "Total volume: 2.603e+06 m^3\nBlock volume: 1.041e+00 m^3\nTotal energy: 2.50e+12 Joules\nTotal energy: 5.97e+08 kcal\nApproximate: 3.13e+12 Joules\nApproximate: 7.47e+08 kcal\n" ] ], [ [ "The iterative estimate is somewhat less than the approximate energy, which is reasonable.\n\n$$ E_t \\approx 2.5 \\times 10^{12} J $$\n\nor\n\n$$ E_t \\approx 5.97 \\times 10^8 {kcal} $$", "_____no_output_____" ], [ "# Average Power\n\nThe average power required to raise all stones is\n\n$$ P_{avg} = \\frac{E_t}{T_b} $$", "_____no_output_____" ] ], [ [ "P_avg = E_t/T_b\nprint('%.2f W' % (P_avg))\nprint('%.2f HP' % (P_avg/watts_per_hp))", "3959.59 W\n5.31 HP\n" ] ], [ [ "In watts, the value is:\n\n$$ P_{avg} \\approx 3960 W $$\n\nIn horse power:\n\n$$ P_{avg} \\approx 5.31 {HP} $$\n\nThis surprisingly modest number is due to the 20 year build time for the pyramid. Even though the size of the pyramid is staggering, the build time is equally large. By inspection, we can imagine the number of workers needed to deliver this power, which is not as large as might be expected.\n\n5.3 horse power would be easily available using a few draught animals, but that would require coaxing animals to climb to high levels and repeatedly pulling over a significant distance. This presents several logistical challenges, which might explain why there is little evidence of animal power used to raise stones. Humans can stand in one place and pull ropes hand over hand with upper body power or two crews could alternate pulling one set of ropes using lower body power. Perhaps different techniques were used depending on available maneuvering area.", "_____no_output_____" ], [ "# Workforce Size\n\nHuman are not efficient machines, perhaps 20% thermal efficiency. Given a modest diet where 1000 calories are burned, one worker might deliver 200 calories/day of mechanical work. This is an average power of 9.7 Watts. Assuming work is performed during only one season (one quarter of a year), the total number of workers required to raise all blocks is given by\n\n$$ N_w = 4 \\frac{P_{avg}}{P_w} $$\n\nThe approximate number of workers is", "_____no_output_____" ] ], [ [ "N_w = 4 * P_avg / P_w\nprint('%d workers' % N_w)", "1635 workers\n" ] ], [ [ "$$ N_w \\approx 1635 $$\n\nOther estimates of total workforce are about 10 times this value, which makes sense given resting time, and many other tasks, such as cutting and transporting stones, finish work, food preparation, management, accounting, and other support activities.\n\nTo lift a single stone, a crew of workers would be required to raise each stone. Assuming each worker can pull 75 lbs, the size of a single lifting crew is\n\n$$ N_{lc} = \\frac{F_p}{75 lbf} $$\n\nThe number of workers in a lifting crew is", "_____no_output_____" ] ], [ [ "F_1p = 75 * newtons_per_lbf\nN_lc = F_p / F_1p\nprint('%.1f workers per lifting crew' % N_lc)", "64.8 workers per lifting crew\n" ] ], [ [ "$$ N_{lc} \\approx 65 $$\n\nThat's 65 workers per lifting crew. The total number of crews is\n\n$$ N_c = \\frac{N_w}{N_{lc}} $$", "_____no_output_____" ] ], [ [ "N_c = N_w / N_lc\nprint('%.1f crews' % N_c)", "25.2 crews\n" ] ], [ [ "Roughly 25 concurrent crews of 65 people are required just to raise all stones over 20 years.", "_____no_output_____" ], [ "# Stone Raising Rate\n\nAssuming all 25 crews are operating concurrently, it is possible to estimate the block raising rate. 200 calories per day of worker output is an average number. Humans are not machines and need rest, so in practice, crews may only raise blocks as little as 4 hours per day. Assuming all 200 calories per worker is delivered in a four hour shift, the available peak crew power would be six times the average daily power:\n\n$$ P_{cp} = 6 N_{lc} P_w$$", "_____no_output_____" ] ], [ [ "P_cp = 6 * N_lc * P_w\nprint('%.2f W' % (P_cp))\nprint('%.2f HP' % (P_cp/watts_per_hp))", "3766.04 W\n5.05 HP\n" ] ], [ [ "This value is about 3.8 kW or just a little over 5 horsepower for a crew of 65 workers. This suggests about 13 humans can do the same amount of work as one horse for four hours a day, which seems reasonable.\n\nThe average velocity of a single block raised by a crew is given by\n\n$$ v_{bc} = \\frac{P_{cp}}{F_p} $$", "_____no_output_____" ] ], [ [ "feet_per_meter = 3.28084\nv_bc = P_cp / F_p\nprint('%.3f m/s' % (v_bc))\nprint('%.3f ft/s' % (v_bc * feet_per_meter))", "0.174 m/s\n0.571 ft/s\n" ] ], [ [ "The rate along the slope is about 0.17 $m/s$ or 0.57 $ft/s$.\n\nTo raise one stone to a height h, the time required is\n\n$$ t = \\frac{h}{v_{bc} sin \\theta} $$", "_____no_output_____" ] ], [ [ "h = 30\nt = h/(v_bc * sin(theta))\nprint('%.1f seconds' % (t))\nprint('%.1f minutes' % (t/60))", "219.0 seconds\n3.6 minutes\n" ] ], [ [ "To raise one block to a height of 30m, which includes more than 50% of all stones, the time is about 219 seconds or 3.6 minutes. With all 25 crews operating concurrently, one stone could be raised every nine seconds or less.", "_____no_output_____" ], [ "# Logistics\n\nFitting 1635 workers on a level at one time requires room to maneuver. The area available is reduced higher up the pyramid. Assuming all 25 crews are operating concurrently and each worker requires at least $1 m^2$, the minimum area required is $A_c \\approx 1635 m^2$.\n\nThe available area at a height $h$ is\n\n$$ A_l = \\left(\\frac{2 (h_{max} - h)}{tan \\theta}\\right)^2 $$\n\nWhere $l_b$ is the length of the base of the pyramid.\n\nThe fraction of available maneuvering area is\n\n$$ r_m = \\frac{A_l-A_c}{A_l} $$\n\nA plot of available maneuvering area and completed volume is shown below.", "_____no_output_____" ] ], [ [ "A_c = N_w\ndh = h_max / N_l\nh = 0\ntan_theta2 = tan(theta)**2\nheights = []\nareas = []\nvolumes = []\nvolume = 0\nfor i in range(N_l):\n th = (h_max - h)**2\n A_l = 4 * th / tan_theta2\n volume += dh * A_l\n r_a = (A_l-A_c)/A_l\n heights.append(h)\n areas.append(100*r_a)\n volumes.append(100*(volume/total_volume))\n h += dh\n\nlimit = -40\nplt.plot(heights[0:limit], areas[0:limit], label='Maneuvering area', color='blue')\nplt.plot(heights[0:limit], volumes[0:limit], label='Completed volume', color='red')\nplt.ylabel('Percentage (%)')\nplt.xlabel('Height (m)')\nplt.legend(loc='best')\nplt.show()\n\nlimit = -66\nprint('At a height of %.1f m, %.1f %% of the pyramid is complete.' % (heights[limit], volumes[limit]))", "_____no_output_____" ] ], [ [ "Even at a height of 100m, where only 3% of the pyramid remains, more than two times the area required by all 25 lifting crews is still available. This should leave sufficient room for others to position stones after they have been lifted. At 117m, there is just enough room for all 25 crews, so stone placement will slow down. Fortunately, fewer stones are required at the highest levels.", "_____no_output_____" ], [ "# Ramps and Stone Size\n\nThis theory might explain why there is little evidence of external or internal ramps, simply because a smooth pyramid can act as the ramp itself. It might also explain how large granite blocks were hauled up to the kings chamber. Considering the required rate of block raising, a wide ramp is needed. Narrow ramps that can only support one or two blocks side by side seem like a bottleneck. Ramps with right angles require more time to rotate and orient blocks. Using the sides of the pyramid offers the largest ramp possible on all four sides, so the only limitation would be the number of workers that could be on top at any one time. Even if one set of crews becomes fatigued raising stones, they could be relieved by another crew later in the day. It is possible that two or more shifts of lifting crews were used to minimize fatigue or injury. If ropes were long enough, it is possible that workers could have walked down the opposite slope of the pyramid, using their own weight to counter the weight of stones they were attempting to lift.\n\nA similar energy analysis can be done using conventional shallow ramps to raise stones. Interestingly, a ramp with a 7% grade requires almost 5 times more energy to raise all 2.5 million stones than using the side of pyramid. Although a shallow ramp reduces the amount of force required to move stones, the distance travelled is much farther, so more energy is lost in friction. Additionally, a conventional ramp requires workers to climb the height of the pyramid along with the stone they are pulling, so they must lift their own weight in addition to the weight of the stone. This requires more energy, which is not used to lift stones. Clearly, it's a highly inefficient strategy.\n\nThe Egyptians were free to decide how big to make the rough limestone blocks. They could have made them small enough for one person to carry, but they chose not to. After many pyramid construction attempts, they decided that 2.5 ton blocks were small enough to handle without too much difficulty, so raising these stones had to be straightforward. It seems that simply dragging blocks up the side of a smooth pyramid is a straightforward solution that they could have easily developed on their own. It probably seemed so obvious to them that it made no sense to document it.", "_____no_output_____" ], [ "# Summary\n\nA crew of about 65 workers can raise 2.5 ton stones using simple ropes alone. Over a 20 year period, 25 concurrent crews totalling roughly 1625 workers are sufficient to raise all 2.5 million stones. There are a number of factors that could reduce the number of workers required. Friction could be reduced using available lubricants or particularly strong, well fed workers could have been selected for this critical role.\n\nBuilding the pyramids seems staggering to us today, but that may be due more to our short attention span and availability of powerful machines to do the heavy lifting. We don't stop to consider that a large, organized workforce, all pulling together at the same time, can do a lot of work. It's not magic, just dedication and arithmetic.\n\nIn the modern day, we expect a return on our investment in a reasonable time, perhaps five or ten years for large public works projects. For the pharoahs, 20 years was a completely acceptable delivery schedule for their investment and exit strategy. To achieve higher rates of return, we build powerful machines that could be operated by a single person. We just don't accept slow progress over a long period of time because our expectations and labor costs are so high. The pharoahs on the other hand, were in the opposite position. They had a large workforce that was willing dedicate themselves to a single cause over a significant part of their lifetime. This dedication is perhaps the real achievement we should admire.", "_____no_output_____" ], [ "Copyright (c) Madhu Siddalingaiah 2020", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ] ]
4aa445bf434892348514dac639e3691d8a67db02
159,950
ipynb
Jupyter Notebook
Data_Analysis/Fenu_Maldera/Esercizio_1.ipynb
andreasemeraro/MPM_Space_Sciences
b9171f8b926f6ab355c4d87b6f715944b29b05ec
[ "MIT" ]
null
null
null
Data_Analysis/Fenu_Maldera/Esercizio_1.ipynb
andreasemeraro/MPM_Space_Sciences
b9171f8b926f6ab355c4d87b6f715944b29b05ec
[ "MIT" ]
null
null
null
Data_Analysis/Fenu_Maldera/Esercizio_1.ipynb
andreasemeraro/MPM_Space_Sciences
b9171f8b926f6ab355c4d87b6f715944b29b05ec
[ "MIT" ]
null
null
null
306.417625
26,696
0.931666
[ [ [ "# Aeff toy MC\n", "_____no_output_____" ] ], [ [ "import numpy as np\nfrom matplotlib import pyplot as plt", "_____no_output_____" ], [ "\nplt.rcParams['text.usetex'] = False", "_____no_output_____" ] ], [ [ "define the function for the selection efficiency, as a function of log10(E): \n", "_____no_output_____" ] ], [ [ "def Eeff(logE):\n x0=2.7\n y=1-1./(np.exp((logE-x0)/0.5)+1) \n \n return y", "_____no_output_____" ] ], [ [ "<img src=\"plotEff.png\" alt=\"efficiency\" width=\"300px\">\n\n\n\ngeneratig random distributions using numpy arrays:", "_____no_output_____" ] ], [ [ "Nev=10000000 # n. of events we simulate\nE_MIN=100\nE_MAX=10000000\n\n# generate energies with E^-1 spectrum, using the inversion method \nu1=np.random.rand(Nev)\nenergies=E_MIN*np.exp(u1*np.log(E_MAX/E_MIN))\n\n#generate phi angles (even if this is not used at the moment)\nu2=np.random.rand(Nev)\nphi = 2*np.pi*u2\n\n#generate theta angles \nu3=np.random.rand(Nev)\ntheta= np.arccos(u3)\n\n#generate x,y impact points with -1<x<1, -1<y<1 (2x2 m^2) \nx= -1+2*np.random.rand(Nev)\ny=-1+2*np.random.rand(Nev)\n \n ", "_____no_output_____" ], [ "#theta1=[40/180 *np.pi for i in range(0,int(0.6*Nev))]", "_____no_output_____" ], [ "#theta2=[60/180 *np.pi for i in range(0,int(0.4*Nev))]", "_____no_output_____" ], [ "#theta=np.concatenate((theta1,theta2))", "_____no_output_____" ] ], [ [ "apply cuts using array masks:", "_____no_output_____" ] ], [ [ "#energy mask using the hit/miss method. \nu5=np.random.rand(Nev)\npe=Eeff(np.log10(energies))\ne_mask=u5<pe # keep the cases where u < y \n\n#combine all cuts:\ntotalMask=(e_mask& (x>-np.sqrt(2)/2)& (x<np.sqrt(2)/2) & (y>-np.sqrt(2)/2)& (y<np.sqrt(2)/2))# detector dimensions -0.5-0.5 on both axis", "_____no_output_____" ] ], [ [ "plot the distributions:\nEnergy: (note that the resulting histo include all cuts, not only on energy) ", "_____no_output_____" ] ], [ [ "plt.figure(1,figsize=(7,5))\nplt.hist(np.log10(energies[totalMask]), bins=50, alpha=1, histtype='step', label='final E')\nplt.hist(np.log10(energies), bins=50, alpha=1, histtype='step',label='simulated E')\nplt.legend()\nplt.xlabel('log10(E)')\nplt.ylabel('counts')\nplt.savefig(\"counts(E).png\")\nplt.show()\n", "_____no_output_____" ] ], [ [ "theta and phi:", "_____no_output_____" ] ], [ [ "plt.figure(2,figsize=(7,5))\nplt.hist(theta[totalMask], bins=50, alpha=1, histtype='step', label='theta final')\nplt.hist(theta, bins=50, alpha=1, histtype='step', label='theta simulated')\n#plt.figure(3)\nplt.hist(phi[totalMask], bins=50, alpha=1, histtype='step', label='phi final')\nplt.hist(phi, bins=50, alpha=1, histtype='step', label='phi simulated')\nplt.xlabel('[rad]')\nplt.ylabel('counts')\nplt.legend()\nplt.savefig(\"counts(angle).png\")\nplt.show()\n", "_____no_output_____" ] ], [ [ "x,y", "_____no_output_____" ] ], [ [ "plt.figure(3,figsize=(7,5))\nplt.hist(x[totalMask], bins=50, alpha=1, histtype='step', label='x final')\nplt.hist(x, bins=50, alpha=1, histtype='step', label='x simulated')\n#plt.figure(3)\nplt.hist(y[totalMask], bins=50, alpha=1, histtype='step', label='y final')\nplt.hist(y, bins=50, alpha=1, histtype='step', label='y simulated')\nplt.xlabel('[m]')\nplt.ylabel('counts')\nplt.legend()\nplt.savefig(\"counts(area).png\")\nplt.show()\n", "_____no_output_____" ] ], [ [ "create rensponse matrix:", "_____no_output_____" ] ], [ [ "nBins2d=40\nH,xbins,ybins=np.histogram2d(np.log10(energies[totalMask]),np.cos(theta[totalMask]),nBins2d)\n\n# histogram normaliztion\nnormFactor=Nev/(2*nBins2d**2) # the factor 4 comes form the area reatio\nH=H/(normFactor)\n# plot\n#y=[i/np.pi*180 for i in ybins]\nfig, ax1 = plt.subplots(figsize=(8, 5))\np=ax1.pcolormesh(xbins,ybins,H.T, cmap='rainbow')\nbar=fig.colorbar(p, ax=ax1, extend='both')\nbar.ax.set_ylabel('Aeff [m^2]', rotation=270,labelpad=13,fontsize=13)\nax1.set_xlabel('log10(E)',fontsize=14)\nax1.set_ylabel('theta[°]',fontsize=14)\nplt.savefig(\"Aeff_costheta.png\")\nplt.show()", "_____no_output_____" ], [ "y", "_____no_output_____" ], [ "ybins", "_____no_output_____" ], [ "nBins2d=40\nH,xbins,ybins=np.histogram2d(np.log10(energies[totalMask]),theta[totalMask],nBins2d)\n\n# histogram normaliztion\nnormFactor=Nev/(2*nBins2d**2) # the factor 4 comes form the area reatio\nH=H/(normFactor)\n# plot\ny=[i/np.pi*180 for i in ybins]\nfig, ax1 = plt.subplots(figsize=(8, 5))\np=ax1.pcolormesh(xbins,y,H.T, cmap='rainbow')\nbar=fig.colorbar(p, ax=ax1, extend='both')\nbar.ax.set_ylabel('Aeff [m^2]', rotation=270,labelpad=13,fontsize=13)\nax1.set_xlabel('log10(E)',fontsize=14)\nax1.set_ylabel('theta[°]',fontsize=14)\n#ax1.set_xscale(\"log\")\nplt.savefig(\"Aeff_theta.png\")\nplt.show()\n", "_____no_output_____" ], [ "# Finallly slect one bin in cos(theta) and plot Aeff vs log10(E) for that bin\n\nplt.figure(5)\nhalfxbin=(xbins[1]-xbins[0])/2.\nplt.plot(xbins[:-1]+halfxbin, H.T[18],label=\"40°\" )\nplt.plot(xbins[:-1]+halfxbin, H.T[27],label=\"60°\" )\nplt.legend()\nplt.xlabel('log10(E)')\nplt.ylabel('Aeff [m]')\nplt.savefig(\"40_60\")\nplt.show()\n", "_____no_output_____" ], [ "mean=[0.6*H.T[18][i]+0.4*H.T[27][i] for i in range(0,len(H.T[18]))]", "_____no_output_____" ], [ "# Finallly slect one bin in cos(theta) and plot Aeff vs log10(E) for that bin\n\nplt.figure(5)\nhalfxbin=(xbins[1]-xbins[0])/2.\nplt.plot(xbins[:-1]+halfxbin, mean )\nplt.title(\"Mean\" )\nplt.xlabel('log10(E)')\nplt.ylabel('Aeff [m]')\nplt.savefig(\"Mean\")\nplt.show()\n", "_____no_output_____" ], [ "# Finallly slect one bin in cos(theta) and plot Aeff vs log10(E) for that bin\n\nplt.figure(5)\nhalfxbin=(xbins[1]-xbins[0])/2.\nplt.plot(xbins[:-1]+halfxbin, H.T[18],label=\"40°\" )\nplt.plot(xbins[:-1]+halfxbin, H.T[27],label=\"60°\" )\nplt.plot(xbins[:-1]+halfxbin,mean,label=\"mean\" )\nplt.legend()\nplt.xlabel('log10(E)')\nplt.ylabel('Aeff [m]')\nplt.savefig(\"Aeff_all\")\nplt.show()\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa448fb6006b0276d9c394abde138291e86ce7e
6,433
ipynb
Jupyter Notebook
notebooks/CT_US_COVID_TESTS.ipynb
kowsiktm/COVID-19-data
272c6632475d3978cc55e651af19c00c7d94c382
[ "BSD-3-Clause" ]
null
null
null
notebooks/CT_US_COVID_TESTS.ipynb
kowsiktm/COVID-19-data
272c6632475d3978cc55e651af19c00c7d94c382
[ "BSD-3-Clause" ]
null
null
null
notebooks/CT_US_COVID_TESTS.ipynb
kowsiktm/COVID-19-data
272c6632475d3978cc55e651af19c00c7d94c382
[ "BSD-3-Clause" ]
null
null
null
28.977477
406
0.558215
[ [ [ "# Importing from the COVID Tracking Project\n\nThis script pulls data from the API provided by the [COVID Tracking Project](https://covidtracking.com/). They're collecting data from 50 US states, the District of Columbia, and five U.S. territories to provide the most comprehensive testing data. They attempt to include positive and negative results, pending tests and total people tested for each state or district currently reporting that data..", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport requests\nimport json\nimport datetime\nimport pycountry", "_____no_output_____" ], [ "# papermill parameters\noutput_folder = '../output/'", "_____no_output_____" ], [ "raw_response = requests.get(\"https://covidtracking.com/api/states/daily\").text\nraw_data = pd.DataFrame.from_dict(json.loads(raw_response))", "_____no_output_____" ] ], [ [ "### Data Quality\n1. Replace empty values with zero\n2. Convert \"date\" int column to \"Date\" datetime column\n4. Rename columns in order to match with other source\n5. Drop unnecessary columns\n6. Add \"Country/Region\" column, since the source contains data from US states, it can be hardcoded", "_____no_output_____" ] ], [ [ "data = raw_data.fillna(0)\ndata['Date'] = pd.to_datetime(data['date'].astype(str), format='%Y%m%d')\ndata = data.rename(\n columns={\n \"state\": \"ISO3166-2\",\n \"positive\": \"Positive\",\n \"negative\": \"Negative\",\n \"pending\": \"Pending\",\n \"death\": \"Death\",\n \"totalTestResults\": \"Total\",\n \"hospitalized\": \"Hospitalized\"\n })\ndata = data.drop(labels=['dateChecked', \"date\"], axis='columns')\ndata['Country/Region'] = \"United States\"\ndata['ISO3166-1'] = \"US\"", "_____no_output_____" ], [ "states = {k.code.replace(\"US-\", \"\"): k.name for k in pycountry.subdivisions.get(country_code=\"US\")}", "_____no_output_____" ], [ "data[\"Province/State\"] = data[\"ISO3166-2\"].apply(lambda x: states[x])", "_____no_output_____" ] ], [ [ "## Sorting data by Province/State before calculating the daily differences", "_____no_output_____" ] ], [ [ "sorted_data = data.sort_values(by=['Province/State'] + ['Date'], ascending=True)", "_____no_output_____" ], [ "sorted_data['Positive_Since_Previous_Day'] = sorted_data['Positive'] - sorted_data.groupby(['Province/State'])[\"Positive\"].shift(1, fill_value=0)\nsorted_data['Total_Since_Previous_Day'] = sorted_data['Total'] - sorted_data.groupby(['Province/State'])[\"Total\"].shift(1, fill_value=0)\nsorted_data['Negative_Since_Previous_Day'] = sorted_data['Negative'] - sorted_data.groupby(['Province/State'])[\"Negative\"].shift(1, fill_value=0)\nsorted_data['Pending_Since_Previous_Day'] = sorted_data['Pending'] - sorted_data.groupby(['Province/State'])[\"Pending\"].shift(1, fill_value=0)\nsorted_data['Death_Since_Previous_Day'] = sorted_data['Death'] - sorted_data.groupby(['Province/State'])[\"Death\"].shift(1, fill_value=0)\nsorted_data['Hospitalized_Since_Previous_Day'] = sorted_data['Hospitalized'] - sorted_data.groupby(['Province/State'])[\"Hospitalized\"].shift(1, fill_value=0)", "_____no_output_____" ] ], [ [ "## Rearrange columns", "_____no_output_____" ] ], [ [ "rearranged_data = sorted_data.filter(items=['Country/Region', 'Province/State', 'Date',\n 'Positive', 'Positive_Since_Previous_Day',\n 'Negative', 'Negative_Since_Previous_Day',\n 'Pending', 'Pending_Since_Previous_Day',\n 'Death', 'Death_Since_Previous_Day',\n 'Hospitalized', 'Hospitalized_Since_Previous_Day',\n 'Total', 'Total_Since_Previous_Day',\n 'ISO3166-1', 'ISO3166-2'])", "_____no_output_____" ] ], [ [ "## Add `Last_Update_Date`", "_____no_output_____" ] ], [ [ "rearranged_data.loc[:, \"Last_Update_Date\"] = datetime.datetime.utcnow()", "_____no_output_____" ] ], [ [ "## Export to CSV", "_____no_output_____" ] ], [ [ "rearranged_data.to_csv(output_folder + \"CT_US_COVID_TESTS.csv\", index=False)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4aa44d562d0fe49a09ff34f79b7c85bd8d59bf09
848,872
ipynb
Jupyter Notebook
day_5/seminar_10_time_series/seminar_10_time_series.ipynb
likzet/craft_of_data_visualization
95c8a0739f17a069477dcd5a9022b96535f2d61a
[ "MIT" ]
28
2018-12-28T10:19:55.000Z
2021-03-31T12:07:16.000Z
day_5/seminar_10_time_series/seminar_10_time_series.ipynb
likzet/craft_of_data_visualization
95c8a0739f17a069477dcd5a9022b96535f2d61a
[ "MIT" ]
null
null
null
day_5/seminar_10_time_series/seminar_10_time_series.ipynb
likzet/craft_of_data_visualization
95c8a0739f17a069477dcd5a9022b96535f2d61a
[ "MIT" ]
4
2019-01-17T20:31:44.000Z
2020-04-27T15:32:36.000Z
595.699649
132,752
0.946213
[ [ [ "# Time series analysis and visualization", "_____no_output_____" ] ], [ [ "# Hide all warnings\nimport warnings\nwarnings.simplefilter('ignore')\n\nimport numpy as np\nimport pandas as pd\n\n%matplotlib inline\nimport matplotlib.pyplot as plt\n\nimport statsmodels as sm\nimport statsmodels.api\n\nfrom tqdm import tqdm", "_____no_output_____" ], [ "from pylab import rcParams # Run-Control (default) parameters\n \nrcParams['figure.figsize'] = 16, 8\nrcParams['lines.linewidth'] = 4\nrcParams['font.size'] = 26", "_____no_output_____" ] ], [ [ "<br>", "_____no_output_____" ], [ "## Time series analysis is for\n\n* compact **dynamics description** of observable processes\n* interpretation of dynamics and **estimation of impulse response**\n* **forecasting** and simulation\n* solution **optimal control** problems", "_____no_output_____" ], [ "<br>", "_____no_output_____" ], [ "## The objective of time series analysis\n\nConstruct a model of time series for _current value_ of **endogeneous** variable $y_t$\n\n* by the _history_ of itself $$y_{:t} = (y_{t-1}, y_{t-2}, \\ldots)$$\n* by _current value_ of **exogeneous** variables $x_t$ and possibly by its _history_ too\n$$\n y_t \\approx \\text{model}\\bigl( t,\\, y_{:t},\\, x_t,\\, x_{:t} \\bigr)\n\\,. $$\n\nUsually one forecasts a single time step ahead.", "_____no_output_____" ], [ "<br>", "_____no_output_____" ], [ "## Difference from other Machine Learning tasks\n\n* Data are sequential\n * order of **time** has to be respected strictly due to not break the causality ", "_____no_output_____" ], [ "* Much attention to **extrapolation** — a forecast of future values related to observed sample\n * It is important to be sure that data do not leak from future to current and to past observations of train subsample during feature engineering and training the model", "_____no_output_____" ], [ "Thus features of the model can depend only on\n* **endogeneous** variables $y_{t-1}, y_{t-2}, \\ldots$, i.e. they are available to the moment $t-1$ _inclusively_\n* **exogeneous** variables $x_t, x_{t-1}, \\ldots$, i.e. they are available to the moment $t$ _inclusively_", "_____no_output_____" ], [ "<br>", "_____no_output_____" ], [ "## $CO_2$ concentration in atmosphere [dataset](https://www.co2.earth/weekly-co2)", "_____no_output_____" ] ], [ [ "dataset = pd.read_csv('./mauna_loa_atmospheric_c02.csv',\n index_col=None, usecols=['date', 'WMLCO2'])", "_____no_output_____" ], [ "dataset.head()", "_____no_output_____" ] ], [ [ "When you loads a time series within `Pandas` you have to set format of date and time explicitly", "_____no_output_____" ] ], [ [ "dataset['date'] = pd.to_datetime(dataset['date'], format='%Y-%m-%d')", "_____no_output_____" ] ], [ [ "Create the index for loaded data: it will be **weekly periodical index**. We will get data with regular frequency.", "_____no_output_____" ] ], [ [ "dataset = dataset.set_index('date').to_period('W')", "_____no_output_____" ], [ "dataset.head()", "_____no_output_____" ] ], [ [ "Plot dynamics of the time series", "_____no_output_____" ] ], [ [ "dataset.plot()\n\nplt.grid(which='major', axis='both')", "_____no_output_____" ] ], [ [ "Aggregate weekly data to monthly", "_____no_output_____" ] ], [ [ "dataset = dataset.to_timestamp()\n\ndataset = dataset.resample('M').mean()", "_____no_output_____" ], [ "dataset.head()", "_____no_output_____" ], [ "dataset.plot()\n\nplt.grid(which='major', axis='both')", "_____no_output_____" ] ], [ [ "Create summary statistics", "_____no_output_____" ] ], [ [ "print('Series {1}, Observations {0}'.format(*dataset.shape))\n\ndataset.describe().T.head()", "Series 1, Observations 526\n" ], [ "dataset.loc['1960':'1967'].plot()\n\nplt.grid(which='major', axis='both')", "_____no_output_____" ] ], [ [ "### Missed values", "_____no_output_____" ] ], [ [ "maginfy_slice = slice('1960', '1967')", "_____no_output_____" ] ], [ [ "Missed values can be filled by\n\n1) last known observable\n * **+** doesn't look through the future\n * **-** can't fill the beginning of the series\n * **-** doesn't account specificity of the series", "_____no_output_____" ] ], [ [ "dataset_ff = dataset.fillna(method='ffill')", "_____no_output_____" ], [ "dataset_ff.loc[maginfy_slice].plot()\n\nplt.grid(which='major', axis='both')", "_____no_output_____" ] ], [ [ "2) iterpolation of the neighboring values\n\n* **+** smooth peaks\n* **-** doesn't fill the ends of the series\n* **-** slightly look through the future", "_____no_output_____" ] ], [ [ "dataset_linterp = dataset.interpolate(method='linear')\n\ndataset_pinterp = dataset.interpolate(method='polynomial', order=2)", "_____no_output_____" ], [ "ax = dataset_pinterp.loc[maginfy_slice].plot()\n\ndataset_linterp.loc[maginfy_slice].plot(ax=ax, linewidth=4, alpha=0.7)\n\nplt.grid(which='major', axis='both')", "_____no_output_____" ] ], [ [ "3) exlude at all\n\n* **+** doesn't change the values\n* **-** break the regularity and related periodicity\n* **-** deplete the sampling", "_____no_output_____" ] ], [ [ "dataset_drop = dataset.dropna()", "_____no_output_____" ], [ "dataset_drop.loc[maginfy_slice].plot()\n\nplt.grid(which='major', axis='both')", "_____no_output_____" ] ], [ [ "4) estimate by probabilty model\n\n* **+** filling based on extracted patterns (learned dependencies)\n* **-** it is needed to specify the model and to train it", "_____no_output_____" ], [ "5) smooth by splines or by local kernel model\n* **+** explicitly accounts close in time observations\n* **+** allows to increase the frequency of observations (\"_resolution_\")\n* **+** allows to fill missed boundary values\n* **-** look through the future far\n* **-** it is needed to define th kernel and the model for extrapolation", "_____no_output_____" ], [ "Looking into the future can be ignorred if **missed values are minority**.\n\nBut if missed values are majority then it is needed to understand why it is happened in the sampling.", "_____no_output_____" ] ], [ [ "full_dataset = dataset_pinterp", "_____no_output_____" ] ], [ [ "Prepare train and test samplings in the ratio 3 to 1", "_____no_output_____" ] ], [ [ "holdout = full_dataset.loc['1991-01-01':]\n\ndataset = full_dataset.loc[:'1990-12-31']\n\nprint(len(dataset), len(holdout))", "394 132\n" ] ], [ [ "Make sure the parts don't intersect", "_____no_output_____" ] ], [ [ "pd.concat([\n dataset.tail(),\n holdout.head()\n], axis=1)", "_____no_output_____" ] ], [ [ "Store the bounds of the intervals explicitly", "_____no_output_____" ] ], [ [ "holdout_slice = slice(*holdout.index[[0, -1]])\n\nprint('Train sample from {} to {}'.format(*dataset.index[[0, -1]]))\n\nprint('Test sample from {} to {}'.format(holdout_slice.start, holdout_slice.stop))", "Train sample from 1958-03-31 00:00:00 to 1990-12-31 00:00:00\nTest sample from 1991-01-31 00:00:00 to 2001-12-31 00:00:00\n" ] ], [ [ "Select the column of target variable", "_____no_output_____" ] ], [ [ "target_column = 'WMLCO2'", "_____no_output_____" ], [ "fig = plt.figure()\nax = fig.add_subplot(111, xlabel='Date', ylabel='value', title=target_column) # 111 means 1 row 1 column 1st axes on the \"grid\"\n\n# plot dynamics of entire time series\nfull_dataset[target_column].plot(ax=ax)\n\n# highlight delayed interval for testing\nax.axvspan(holdout_slice.start, holdout_slice.stop,\n color='C1', alpha=0.25, zorder=-99)\n\nax.grid(which='major', axis='both');", "_____no_output_____" ] ], [ [ "<br>", "_____no_output_____" ], [ "# A property\n\n**Stationarity** is a property of a process $\\{y_t\\}_{t\\geq0}$ meaning\n> probabilistic interconnections in the set $(y_{t_1},\\,\\ldots,\\,y_{t_m})$ are invariant with respect to shift $s \\neq 0$. ", "_____no_output_____" ], [ "That means\n* **there are no special moments** in the time when statistical properties of observables are changing\n* patterns are stable in time and are determined by **indentation of observables** relative to each other:\n * mean, dispersion, and autocorrelation doesn't depend on moment of time", "_____no_output_____" ], [ "## A ghost property\n\nStochastic processes in real problems are **almost always non-stationary**\n* mean depends on time (there is a trend in the dynamics)\n* calendar events (holidays or vacations)\n* season periodicity\n * daily rhythm of power grid load\n * season temperature\n * yearly peak of monthly inflation in the beginning of year\n* unpredictable structural drift\n * political decisions\n * blackouts\n * hysteresis", "_____no_output_____" ], [ "Thus majority of time series especially economic, climatic, and financial are non-stationary.", "_____no_output_____" ], [ "<br>", "_____no_output_____" ], [ "# Visualization and diagnosis of non-stationarity", "_____no_output_____" ], [ "Visualization in time series analysis allows to\n* get preliminary picture of correlations\n* select reasonable strategy of validation a model\n* estimate if there is structural drift\n * leaps and gaps\n * clusters of intensive oscillations or periods of plateau\n* diagnose non-stationarity: trend, seasonality, etc.", "_____no_output_____" ], [ "### A plot of moving statistics", "_____no_output_____" ], [ "Moving statistics of a series within window of length $N$ allow to discover changes in time\n\n* **moving average** of time series level\n$$\n m_t = \\frac1{N} \\sum_{s=t-N+1}^t y_s\n$$\n* **moving standard deviation** (scatter)\n$$\n s_t = \\sqrt{s^2_t}\n \\,, \\quad\n s^2_t = \\frac1{N-1} \\sum_{s=t-N+1}^t (y_s - m_t)^2\n$$", "_____no_output_____" ] ], [ [ "rcParams['figure.figsize'] = 16, 10", "_____no_output_____" ], [ "def rolling_diagnostics(series, window=500):\n rolling = series.rolling(window)\n\n # Create top and bottom plots\n fig = plt.figure()\n ax_top = fig.add_subplot(211, title='Moving average', xlabel='Date', ylabel='value')\n ax_bottom = fig.add_subplot(212, title='Moving standard deviation',\n sharex=ax_top, xlabel='Date', ylabel='std.')\n\n # Plot the graphs\n # series itself and moving average\n rolling.mean().plot(ax=ax_top)\n series.plot(ax=ax_top, color='black', lw=2, alpha=.25, zorder=-10)\n ax_top.grid(which='major', axis='both')\n\n # moving std.\n rolling.std().plot(ax=ax_bottom)\n ax_bottom.grid(which='major', axis='both')\n\n fig.tight_layout()\n return fig", "_____no_output_____" ], [ "rolling_diagnostics(dataset[target_column], window=36);", "_____no_output_____" ] ], [ [ "The graphs show the trend in the dynamics of time series", "_____no_output_____" ], [ "<br>", "_____no_output_____" ], [ "### Rough estimate of seasonality", "_____no_output_____" ], [ "It is disarable to make season normalization relatively to trend.\nLet's discover seasonality, for example monthly", "_____no_output_____" ] ], [ [ "def monthly_seasonality_diagnostics(series, fraction=0.66, period='month'):\n # Use non-parametric local linear regression to preliminary estimate the trend\n trend = sm.api.nonparametric.lowess(series, np.r_[:len(series)],\n frac=fraction, it=5)\n\n # Aggregate by months and calculate average and standard deviation\n by = getattr(series.index, period, 'month')\n season_groupby = (series - trend[:, 1]).groupby(by)\n seas_mean, seas_std = season_groupby.mean(), season_groupby.std()\n\n # Create subplots\n fig = plt.figure()\n ax_top = fig.add_subplot(211, title='Trend', xlabel='Date')\n ax_bottom = fig.add_subplot(212, title='Seasonality', xlabel=period)\n\n # Plot the graphs\n # The series and the trend\n pd.Series(trend[:, 1], index=series.index).plot(ax=ax_top)\n series.plot(ax=ax_top, color=\"black\", lw=2, alpha=.25, zorder=-10)\n ax_top.grid(which=\"major\", axis=\"both\")\n\n # Seasonality and 90% normal confidence interval\n ax_bottom.plot(1 + np.r_[:len(seas_mean)], seas_mean, lw=2)\n ax_bottom.fill_between(1 + np.r_[:len(seas_mean)],\n seas_mean - 1.96 * seas_std,\n seas_mean + 1.96 * seas_std,\n zorder=-10, color=\"C1\", alpha=0.15)\n ax_bottom.grid(which=\"major\", axis=\"both\")\n\n\n fig.tight_layout()\n return fig", "_____no_output_____" ], [ "monthly_seasonality_diagnostics(dataset[target_column], fraction=0.33, period='month');", "_____no_output_____" ] ], [ [ "The graph shows the **monthly** seasonality in the dynamics", "_____no_output_____" ] ], [ [ "## TODO: check visually if there is weekly seasonality\n\n", "_____no_output_____" ] ], [ [ "<br>", "_____no_output_____" ], [ "### Total vs. partial autocorrelations", "_____no_output_____" ], [ "The functions estimate influence of observation of $h$ steps (_lags_) on the current observation, but they does it differently\n* **total autocorrelation** $\\rho_h$\n * shows cumulative impact $y_{t-h}$ to $y_t$ **via** influence on all intermediate $y_{t-j}$, $j=1,\\,...,\\,h-1$\n* **partial autocorrelation** $\\phi_h$\n * shows **net** (pure) impract $y_{t-h}$ to $y_t$ **excluding** influence on all intermediate $y_{t-j}$, $j=1,\\,...,\\,h-1$", "_____no_output_____" ] ], [ [ "from statsmodels.tsa.stattools import acf, pacf", "_____no_output_____" ], [ "from statsmodels.graphics.tsaplots import plot_acf, plot_pacf\n\n\ndef correlation_diagnostics(series, lags=60):\n fig = plt.figure(figsize=(20, 6))\n ax_left, ax_right = fig.subplots(\n nrows=1, ncols=2, sharey=True, sharex=True,\n subplot_kw={'xlabel': 'lag', 'ylim': (-1.1, 1.1)})\n\n # Use intrinsic statsmodels functions\n plot_acf(series, ax_left, lags=lags, zero=False, alpha=0.05,\n title='Sample autocorrelation', marker=None)\n\n plot_pacf(series, ax_right, lags=lags, zero=False, alpha=0.05,\n title='Sample partial autocorrelation', marker=None)\n\n fig.tight_layout()\n return fig", "_____no_output_____" ] ], [ [ "Let's explore sample autocorrelations of the series", "_____no_output_____" ] ], [ [ "correlation_diagnostics(dataset[target_column], lags=250);", "_____no_output_____" ] ], [ [ "* On the **left plot** autocorrelation of small lags is near to $1.0$ and decreases pretty slowly\n* On the **right plot** observations with lag $1$, $110$, $215$ has statistically non-null net effect\n\nIt is indication of very typical kind of non-stationarity: $y_t = y_{t-1} + \\ldots$.\n\nThat means it is observed strong dependance of the past (the history of a process).", "_____no_output_____" ], [ "---", "_____no_output_____" ], [ "# Key steps of model construction for time series\n\n* Stationarize a time series\n* Estimate parameter of the model\n* Visualize remains after stationarization\n * check if respect the model requirements\n* Validation of the model", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ] ]
4aa457bcc2529a07c81de1c329ff2b39e5d306de
125,389
ipynb
Jupyter Notebook
jupyter/projects_individual/project_text-analysis-with-BigARTM_ldinka.ipynb
ivan-magda/mlcourse_open_homeworks
bc67fe6b872655e8e5628ec14b01fde407c5eb3c
[ "MIT" ]
1
2018-10-24T08:35:29.000Z
2018-10-24T08:35:29.000Z
jupyter/projects_individual/project_text-analysis-with-BigARTM_ldinka.ipynb
ivan-magda/mlcourse_open_homeworks
bc67fe6b872655e8e5628ec14b01fde407c5eb3c
[ "MIT" ]
null
null
null
jupyter/projects_individual/project_text-analysis-with-BigARTM_ldinka.ipynb
ivan-magda/mlcourse_open_homeworks
bc67fe6b872655e8e5628ec14b01fde407c5eb3c
[ "MIT" ]
3
2019-10-03T22:32:24.000Z
2021-01-13T10:09:22.000Z
91.05955
37,980
0.79893
[ [ [ "<center>\n<img src=\"../../img/ods_stickers.jpg\">\n## Открытый курс по машинному обучению\n<center>Автор материала: Ефремова Дина (@ldinka).", "_____no_output_____" ], [ "# <center>Исследование возможностей BigARTM</center>\n\n## <center>Тематическое моделирование с помощью BigARTM</center>", "_____no_output_____" ], [ "#### Интро", "_____no_output_____" ], [ "BigARTM — библиотека, предназначенная для тематической категоризации текстов; делает разбиение на темы без «учителя».\n\nЯ собираюсь использовать эту библиотеку для собственных нужд в будущем, но так как она не предназначена для обучения с учителем, решила, что для начала ее стоит протестировать на какой-нибудь уже размеченной выборке. Для этих целей был использован датасет \"20 news groups\".\n\nИдея экперимента такова:\n- делим выборку на обучающую и тестовую;\n- обучаем модель на обучающей выборке;\n- «подгоняем» выделенные темы под действительные;\n- смотрим, насколько хорошо прошло разбиение;\n- тестируем модель на тестовой выборке.", "_____no_output_____" ], [ "#### Поехали!", "_____no_output_____" ], [ "**Внимание!** Данный проект был реализован с помощью Python 3.6 и BigARTM 0.9.0. Методы, рассмотренные здесь, могут отличаться от методов в других версиях библиотеки.", "_____no_output_____" ], [ "<img src=\"../../img/bigartm_logo.png\"/>", "_____no_output_____" ], [ "### <font color=\"lightgrey\">Не</font>множко теории", "_____no_output_____" ], [ "У нас есть словарь терминов $W = \\{w \\in W\\}$, который представляет из себя мешок слов, биграмм или n-грамм;\n\nЕсть коллекция документов $D = \\{d \\in D\\}$, где $d \\subset W$;\n\nЕсть известное множество тем $T = \\{t \\in T\\}$;\n\n$n_{dw}$ — сколько раз термин $w$ встретился в документе $d$;\n\n$n_{d}$ — длина документа $d$.", "_____no_output_____" ], [ "Мы считаем, что существует матрица $\\Phi$ распределения терминов $w$ в темах $t$: (фи) $\\Phi = (\\phi_{wt})$\n\nи матрица распределения тем $t$ в документах $d$: (тета) $\\Theta = (\\theta_{td})$,\n\nпереумножение которых дает нам тематическую модель, или, другими словами, представление наблюдаемого условного распределения $p(w|d)$ терминов $w$ в документах $d$ коллекции $D$:\n\n<center>$\\large p(w|d) = \\Phi \\Theta$</center>\n\n<center>$$\\large p(w|d) = \\sum_{t \\in T} \\phi_{wt} \\theta_{td}$$</center>\n\nгде $\\phi_{wt} = p(w|t)$ — вероятности терминов $w$ в каждой теме $t$\n\nи $\\theta_{td} = p(t|d)$ — вероятности тем $t$ в каждом документе $d$.", "_____no_output_____" ], [ "<img src=\"../../img/phi_theta.png\"/>", "_____no_output_____" ], [ "Нам известны наблюдаемые частоты терминов в документах, это:\n\n<center>$ \\large \\hat{p}(w|d) = \\frac {n_{dw}} {n_{d}} $</center>", "_____no_output_____" ], [ "Таким образом, наша задача тематического моделирования становится задачей стохастического матричного разложения матрицы $\\hat{p}(w|d)$ на стохастические матрицы $\\Phi$ и $\\Theta$.\n\nНапомню, что матрица является стохастической, если каждый ее столбец представляет дискретное распределение вероятностей, сумма значений каждого столбца равна 1.", "_____no_output_____" ], [ "Воспользовавшись принципом максимального правдоподобия, т. е. максимизируя логарифм правдоподобия, мы получим:\n\n<center>$\n\\begin{cases}\n\\sum_{d \\in D} \\sum_{w \\in d} n_{dw} \\ln \\sum_{t \\in T} \\phi_{wt} \\theta_{td} \\rightarrow \\max\\limits_{\\Phi,\\Theta};\\\\\n\\sum_{w \\in W} \\phi_{wt} = 1, \\qquad \\phi_{wt}\\geq0;\\\\\n\\sum_{t \\in T} \\theta_{td} = 1, \\quad\\quad\\;\\; \\theta_{td}\\geq0.\n\\end{cases}\n$</center>", "_____no_output_____" ], [ "Чтобы из множества решений выбрать наиболее подходящее, введем критерий регуляризации $R(\\Phi, \\Theta)$:\n\n<center>$\n\\begin{cases}\n\\sum_{d \\in D} \\sum_{w \\in d} n_{dw} \\ln \\sum_{t \\in T} \\phi_{wt} \\theta_{td} + R(\\Phi, \\Theta) \\rightarrow \\max\\limits_{\\Phi,\\Theta};\\\\\n\\sum_{w \\in W} \\phi_{wt} = 1, \\qquad \\phi_{wt}\\geq0;\\\\\n\\sum_{t \\in T} \\theta_{td} = 1, \\quad\\quad\\;\\; \\theta_{td}\\geq0.\n\\end{cases}\n$</center>", "_____no_output_____" ], [ "Два наиболее известных частных случая этой системы уравнений:\n- **PLSA**, вероятностный латентный семантический анализ, когда $R(\\Phi, \\Theta) = 0$\n- **LDA**, латентное размещение Дирихле:\n$$R(\\Phi, \\Theta) = \\sum_{t,w} (\\beta_{w} - 1) \\ln \\phi_{wt} + \\sum_{d,t} (\\alpha_{t} - 1) \\ln \\theta_{td} $$\nгде $\\beta_{w} > 0$, $\\alpha_{t} > 0$ — параметры регуляризатора.", "_____no_output_____" ], [ "Однако оказывается запас неединственности решения настолько большой, что на модель можно накладывать сразу несколько ограничений, такой подход называется **ARTM**, или аддитивной регуляризацией тематических моделей:\n\n<center>$\n\\begin{cases}\n\\sum_{d,w} n_{dw} \\ln \\sum_{t} \\phi_{wt} \\theta_{td} + \\sum_{i=1}^k \\tau_{i} R_{i}(\\Phi, \\Theta) \\rightarrow \\max\\limits_{\\Phi,\\Theta};\\\\\n\\sum_{w \\in W} \\phi_{wt} = 1, \\qquad \\phi_{wt}\\geq0;\\\\\n\\sum_{t \\in T} \\theta_{td} = 1, \\quad\\quad\\;\\; \\theta_{td}\\geq0.\n\\end{cases}\n$</center>\n\nгде $\\tau_{i}$ — коэффициенты регуляризации.", "_____no_output_____" ], [ "Теперь давайте познакомимся с библиотекой BigARTM и разберем еще некоторые аспекты тематического моделирования на ходу.", "_____no_output_____" ], [ "Если Вас очень сильно заинтересовала теоретическая часть категоризации текстов и тематического моделирования, рекомендую посмотреть видеолекции из курса Яндекса на Coursera «Поиск структуры в данных» четвертой недели: <a href=\"https://www.coursera.org/learn/unsupervised-learning/home/week/4\">Тематическое моделирование</a>.", "_____no_output_____" ], [ "### BigARTM", "_____no_output_____" ], [ "#### Установка", "_____no_output_____" ], [ "Естественно, для начала работы с библиотекой ее надо установить. Вот несколько видео, которые рассказывают, как это сделать в зависимости от вашей операционной системы:\n- <a href=\"https://www.coursera.org/learn/unsupervised-learning/lecture/qmsFm/ustanovka-bigartm-v-windows\">Установка BigARTM в Windows</a>\n- <a href=\"https://www.coursera.org/learn/unsupervised-learning/lecture/zPyO0/ustanovka-bigartm-v-linux-mint\">Установка BigARTM в Linux</a>\n- <a href=\"https://www.coursera.org/learn/unsupervised-learning/lecture/nuIhL/ustanovka-bigartm-v-mac-os-x\">Установка BigARTM в Mac OS X</a>\n\nЛибо можно воспользоваться инструкцией с официального сайта, которая, скорее всего, будет гораздо актуальнее: <a href=\"https://bigartm.readthedocs.io/en/stable/installation/index.html\">здесь</a>. Там же указано, как можно установить BigARTM в качестве <a href=\"https://bigartm.readthedocs.io/en/stable/installation/docker.html\">Docker-контейнера</a>.", "_____no_output_____" ], [ "#### Использование BigARTM", "_____no_output_____" ] ], [ [ "import artm\nimport re\nimport numpy as np\nimport seaborn as sns; sns.set()\n\nfrom sklearn.metrics import classification_report, confusion_matrix\nfrom sklearn.preprocessing import normalize\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import accuracy_score\nfrom matplotlib import pyplot as plt\n%matplotlib inline", "_____no_output_____" ], [ "artm.version()", "_____no_output_____" ] ], [ [ "Скачаем датасет ***the 20 news groups*** с заранее известным количеством категорий новостей:", "_____no_output_____" ] ], [ [ "from sklearn.datasets import fetch_20newsgroups", "_____no_output_____" ], [ "newsgroups = fetch_20newsgroups('../../data/news_data')", "_____no_output_____" ], [ "newsgroups['target_names']", "_____no_output_____" ] ], [ [ "Приведем данные к формату *Vowpal Wabbit*. Так как BigARTM не рассчитан на обучение с учителем, то мы поступим следующим образом:\n- обучим модель на всем корпусе текстов;\n- выделим ключевые слова тем и по ним определим, к какой теме они скорее всего относятся;\n- сравним наши полученные результаты разбиения с истинными значенями.", "_____no_output_____" ] ], [ [ "TEXT_FIELD = \"text\"", "_____no_output_____" ], [ "def to_vw_format(document, label=None):\n return str(label or '0') + ' |' + TEXT_FIELD + ' ' + ' '.join(re.findall('\\w{3,}', document.lower())) + '\\n'", "<input>:2: DeprecationWarning: invalid escape sequence \\w\n<ipython-input-643-9a4f2d073677>:2: DeprecationWarning: invalid escape sequence \\w\n return str(label or '0') + ' |' + TEXT_FIELD + ' ' + ' '.join(re.findall('\\w{3,}', document.lower())) + '\\n'\n" ], [ "all_documents = newsgroups['data']\nall_targets = newsgroups['target']\nlen(newsgroups['target'])", "_____no_output_____" ], [ "train_documents, test_documents, train_labels, test_labels = \\\n train_test_split(all_documents, all_targets, random_state=7)\n\nwith open('../../data/news_data/20news_train_mult.vw', 'w') as vw_train_data:\n for text, target in zip(train_documents, train_labels):\n vw_train_data.write(to_vw_format(text, target))\nwith open('../../data/news_data/20news_test_mult.vw', 'w') as vw_test_data:\n for text in test_documents:\n vw_test_data.write(to_vw_format(text))", "_____no_output_____" ] ], [ [ "Загрузим данные в необходимый для BigARTM формат:", "_____no_output_____" ] ], [ [ "batch_vectorizer = artm.BatchVectorizer(data_path=\"../../data/news_data/20news_train_mult.vw\",\n data_format=\"vowpal_wabbit\",\n target_folder=\"news_batches\")", "_____no_output_____" ] ], [ [ "Данные в BigARTM загружаются порционно, укажем в \n- *data_path* путь к обучающей выборке,\n- *data_format* — формат наших данных, может быть:\n * *bow_n_wd* — это вектор $n_{wd}$ в виду массива *numpy.ndarray*, также необходимо передать соответствующий словарь терминов, где ключ — это индекс вектора *numpy.ndarray* $n_{wd}$, а значение — соответствующий токен.\n ```python\n batch_vectorizer = artm.BatchVectorizer(data_format='bow_n_wd',\n n_wd=n_wd,\n vocabulary=vocabulary)\n ```\n * *vowpal_wabbit* — формат Vowpal Wabbit;\n * *bow_uci* — UCI формат (например, с *vocab.my_collection.txt* и *docword.my_collection.txt* файлами):\n ```python\n batch_vectorizer = artm.BatchVectorizer(data_path='',\n data_format='bow_uci',\n collection_name='my_collection',\n target_folder='my_collection_batches')\n ```\n * *batches* — данные, уже сконверченные в батчи с помощью BigARTM;\n- *target_folder* — путь для сохранения батчей.\n\nПока это все параметры, что нам нужны для загрузки наших данных.\n\nПосле того, как BigARTM создал батчи из данных, можно использовать их для загрузки:", "_____no_output_____" ] ], [ [ "batch_vectorizer = artm.BatchVectorizer(data_path=\"news_batches\", data_format='batches')", "_____no_output_____" ] ], [ [ "Инициируем модель с известным нам количеством тем. Количество тем — это гиперпараметр, поэтому если он заранее нам неизвестен, то его необходимо настраивать, т. е. брать такое количество тем, при котором разбиение кажется наиболее удачным.\n\n**Важно!** У нас 20 предметных тем, однако некоторые из них довольно узкоспециализированны и смежны, как например 'comp.os.ms-windows.misc' и 'comp.windows.x', или 'comp.sys.ibm.pc.hardware' и 'comp.sys.mac.hardware', тогда как другие размыты и всеобъемлющи: talk.politics.misc' и 'talk.religion.misc'.\n\nСкорее всего, нам не удастся в чистом виде выделить все 20 тем — некоторые из них окажутся слитными, а другие наоборот раздробятся на более мелкие. Поэтому мы попробуем построить 40 «предметных» тем и одну фоновую. Чем больше вы будем строить категорий, тем лучше мы сможем подстроиться под данные, однако это довольно трудоемкое занятие сидеть потом и распределять в получившиеся темы по реальным категориям (<strike>я правда очень-очень задолбалась!</strike>).\n\nЗачем нужны фоновые темы? Дело в том, что наличие общей лексики в темах приводит к плохой ее интерпретируемости. Выделив общую лексику в отдельную тему, мы сильно снизим ее количество в предметных темах, таким образом оставив там лексическое ядро, т. е. ключевые слова, которые данную тему характеризуют. Также этим преобразованием мы снизим коррелированность тем, они станут более независимыми и различимыми.", "_____no_output_____" ] ], [ [ "T = 41\nmodel_artm = artm.ARTM(num_topics=T,\n topic_names=[str(i) for i in range(T)],\n class_ids={TEXT_FIELD:1}, \n num_document_passes=1,\n reuse_theta=True,\n cache_theta=True,\n seed=4)", "_____no_output_____" ] ], [ [ "Передаем в модель следующие параметры:\n- *num_topics* — количество тем;\n- *topic_names* — названия тем;\n- *class_ids* — название модальности и ее вес. Дело в том, что кроме самих текстов, в данных может содержаться такая информация, как автор, изображения, ссылки на другие документы и т. д., по которым также можно обучать модель;\n- *num_document_passes* — количество проходов при обучении модели;\n- *reuse_theta* — переиспользовать ли матрицу $\\Theta$ с предыдущей итерации;\n- *cache_theta* — сохранить ли матрицу $\\Theta$ в модели, чтобы в дальнейшем ее использовать.\n\nДалее необходимо создать словарь; передадим ему какое-нибудь название, которое будем использовать в будущем для работы с этим словарем.", "_____no_output_____" ] ], [ [ "DICTIONARY_NAME = 'dictionary'\n\ndictionary = artm.Dictionary(DICTIONARY_NAME)\ndictionary.gather(batch_vectorizer.data_path)", "_____no_output_____" ] ], [ [ "Инициализируем модель с тем именем словаря, что мы передали выше, можно зафиксировать *random seed* для вопроизводимости результатов:", "_____no_output_____" ] ], [ [ "np.random.seed(1)\nmodel_artm.initialize(DICTIONARY_NAME)", "_____no_output_____" ] ], [ [ "Добавим к модели несколько метрик:\n- перплексию (*PerplexityScore*), чтобы индентифицировать сходимость модели\n * Перплексия — это известная в вычислительной лингвистике мера качества модели языка. Можно сказать, что это мера неопределенности или различности слов в тексте.\n- специальный *score* ключевых слов (*TopTokensScore*), чтобы в дальнейшем мы могли идентифицировать по ним наши тематики;\n- разреженность матрицы $\\Phi$ (*SparsityPhiScore*);\n- разреженность матрицы $\\Theta$ (*SparsityThetaScore*).", "_____no_output_____" ] ], [ [ "model_artm.scores.add(artm.PerplexityScore(name='perplexity_score',\n dictionary=DICTIONARY_NAME))\nmodel_artm.scores.add(artm.SparsityPhiScore(name='sparsity_phi_score', class_id=\"text\"))\nmodel_artm.scores.add(artm.SparsityThetaScore(name='sparsity_theta_score'))\nmodel_artm.scores.add(artm.TopTokensScore(name=\"top_words\", num_tokens=15, class_id=TEXT_FIELD))", "_____no_output_____" ] ], [ [ "Следующая операция *fit_offline* займет некоторое время, мы будем обучать модель в режиме *offline* в 40 проходов. Количество проходов влияет на сходимость модели: чем их больше, тем лучше сходится модель.", "_____no_output_____" ] ], [ [ "%%time\n\nmodel_artm.fit_offline(batch_vectorizer=batch_vectorizer, num_collection_passes=40)", "CPU times: user 3min 47s, sys: 14.9 s, total: 4min 2s\nWall time: 1min 49s\n" ] ], [ [ "Построим график сходимости модели и увидим, что модель сходится довольно быстро:", "_____no_output_____" ] ], [ [ "plt.plot(model_artm.score_tracker[\"perplexity_score\"].value);", "_____no_output_____" ] ], [ [ "Выведем значения разреженности матриц:", "_____no_output_____" ] ], [ [ "print('Phi', model_artm.score_tracker[\"sparsity_phi_score\"].last_value)\nprint('Theta', model_artm.score_tracker[\"sparsity_theta_score\"].last_value)", "Phi 0.9166713356971741\nTheta 0.14179398119449615\n" ] ], [ [ "После того, как модель сошлась, добавим к ней регуляризаторы. Для начала сглаживающий регуляризатор — это *SmoothSparsePhiRegularizer* с большим положительным коэффициентом $\\tau$, который нужно применить только к фоновой теме, чтобы выделить в нее как можно больше общей лексики. Пусть тема с последним индексом будет фоновой, передадим в *topic_names* этот индекс:", "_____no_output_____" ] ], [ [ "model_artm.regularizers.add(artm.SmoothSparsePhiRegularizer(name='SparsePhi', \n tau=1e5, \n dictionary=dictionary, \n class_ids=TEXT_FIELD,\n topic_names=str(T-1)))", "_____no_output_____" ] ], [ [ "Дообучим модель, сделав 20 проходов по ней с новым регуляризатором:", "_____no_output_____" ] ], [ [ "%%time\n\nmodel_artm.fit_offline(batch_vectorizer=batch_vectorizer, num_collection_passes=20)", "CPU times: user 2min 2s, sys: 8.44 s, total: 2min 11s\nWall time: 1min 3s\n" ] ], [ [ "Выведем значения разреженности матриц, заметим, что значение для $\\Theta$ немного увеличилось:", "_____no_output_____" ] ], [ [ "print('Phi', model_artm.score_tracker[\"sparsity_phi_score\"].last_value)\nprint('Theta', model_artm.score_tracker[\"sparsity_theta_score\"].last_value)", "Phi 0.9079725742340088\nTheta 0.25141066312789917\n" ] ], [ [ "Теперь добавим к модели разреживающий регуляризатор, это тот же *SmoothSparsePhiRegularizer* резуляризатор, только с отрицательным значением $\\tau$ и примененный ко всем предметным темам:", "_____no_output_____" ] ], [ [ "model_artm.regularizers.add(artm.SmoothSparsePhiRegularizer(name='SparsePhi2', \n tau=-5e5, \n dictionary=dictionary, \n class_ids=TEXT_FIELD,\n topic_names=[str(i) for i in range(T-1)]),\n overwrite=True)", "_____no_output_____" ], [ "%%time\n\nmodel_artm.fit_offline(batch_vectorizer=batch_vectorizer, num_collection_passes=20)", "CPU times: user 2min 2s, sys: 8.46 s, total: 2min 10s\nWall time: 1min 2s\n" ] ], [ [ "Видим, что значения разреженности увеличились еще больше:", "_____no_output_____" ] ], [ [ "print(model_artm.score_tracker[\"sparsity_phi_score\"].last_value)\nprint(model_artm.score_tracker[\"sparsity_theta_score\"].last_value)", "0.9571115374565125\n0.900256335735321\n" ] ], [ [ "Посмотрим, сколько категорий-строк матрицы $\\Theta$ после регуляризации осталось, т. е. не занулилось/выродилось. И это одна категория:", "_____no_output_____" ] ], [ [ "len(model_artm.score_tracker[\"top_words\"].last_tokens.keys())", "_____no_output_____" ] ], [ [ "Теперь выведем ключевые слова тем, чтобы определить, каким образом прошло разбиение, и сделать соответствие с нашим начальным списком тем:", "_____no_output_____" ] ], [ [ "for topic_name in model_artm.score_tracker[\"top_words\"].last_tokens.keys():\n tokens = model_artm.score_tracker[\"top_words\"].last_tokens\n res_str = topic_name + ': ' + ', '.join(tokens[topic_name])\n print(res_str)", "0: adaptec, aspi4dos, 1542, dma, scsiha, cdrom, fielder, fulk, acne, buffering, qemm, gaetti, inode, zeos, 1542a\n1: shagen, istanbul, igor, emma, salonica, aiu, osmanli, ankara, ermeni, donot, nun, flax, umumiye, nezareti, mecmuasi\n2: ground, wire, circuit, wiring, radar, audio, voltage, neutral, electronics, amp, detector, outlets, outlet, circuits, electrical\n3: team, hockey, play, leafs, flyers, period, detroit, fans, cup, montreal, wings, rangers, goal, playoffs, played\n4: vitamin, infected, carcinogenic, candida, kidney, oxalic, clubbing, theseus, sasghm, oxalate, unx, tray, transgenic, nutrition, magnesium\n5: bike, dod, ride, motorcycle, bikes, bmw, riding, rider, helmet, dog, motorcycles, bnr, behanna, harley, ama\n6: tiff, den, tobacco, os2, smokeless, sphere, 3do, coli, lyme, o157, quicktime, photography, radius, jpeg, thrush\n7: pts, bos, tor, det, nyr, chi, nyi, pit, jets, que, phi, ott, edm, har, cal\n8: israel, jews, turkish, armenian, israeli, armenians, turks, armenia, turkey, killed, serdar, jewish, argic, arab, greek\n9: weiss, tyre, roehm, mob, papa, shouting, theater, bandits, landreneau, waddles, plaintiffs, inductive, yehuda, shomron, ambulance\n10: engine, boyle, cactus, torque, automotive, integra, exhaust, welty, volvo, diesel, balltown, clutch, brake, tires, rally\n11: graphics, image, files, images, format, pub, package, font, 128, processing, tar, fonts, virtual, animation, zip\n12: astronaut, econ, wip, fls, shea, acad, eder, pilot, tiger, 616, derby, dani, csc2imd, candidates, lupica\n13: eternal, homosexuality, salvation, deity, enviroleague, archbishop, schism, senses, scrolls, ezekiel, gerry, ahmadiyya, hfsi, godhead, schismatic\n14: downs, mating, graphite, baeyer, gamet, guncer, buckminster, postulates, ntfs, ruckman, anik, dlb, rapist, floggings, religiously\n15: intercon, amanda, larson, k_p, phones, u_c, precision, clarinet, pmetzger, physicist, caronni, unified, templeton, sug, metzger\n16: window, motif, server, x11r5, xterm, myers, widget, x11, xlib, contrib, lcs, x11r4, rockefeller, echo, export\n17: sleeve, promo, jazz, capitol, bitzm, adlib, mccartney, coupons, joltes, goucher, packaging, dsu, litana, dina, obninsk\n18: engines, egalon, dietz, ke4zv, sirtf, het, een, balloon, oliveira, sunrise, sunset, claudio, ozone, luna, worden\n19: key, encryption, clipper, chip, privacy, security, keys, escrow, des, nsa, secure, algorithm, crypto, anonymous, pgp\n20: sandvik, objective, kent, koresh, newton, biblical, malcolm, magi, kendig, ksand, alink, royalroads, mormons, commandments, hudson\n21: atheists, religion, atheism, keith, islam, morality, religious, moral, islamic, atheist, livesey, caltech, mathew, solntze, wpd\n22: windows, dos, disk, mac, rom, nubus, borland, risc, adapter, desktop, slot, feature, cica, paradox, ethernet\n23: allah, kryptonite, hussein, marital, lucio, crutches, fasad, mischief, kuwait, macalstr, bicycle, shatim, acooper, vaughan, ww2\n24: msg, pitt, banks, geb, gordon, medical, disease, patients, health, food, cancer, pain, aids, hiv, doctor\n25: 6ei, 75u, s0g, 0el, 0em, 24e, 6um, sc_, p45, scx, rlk, s0t, _lw, m45, 6umu\n26: god, jesus, christian, christians, bible, church, christ, faith, christianity, truth, sin, rutgers, belief, lord, catholic\n27: gun, guns, firearms, weapons, fbi, batf, fire, handgun, atf, criminals, rkba, firearm, assault, nra, roby\n28: khalsa, jdmooney, tquinn, heartland, wrangler, kocrsv01, delcoelect, mooney, csuohio, cutlass, collisions, tick, namao, jcksnste, acf1\n29: car, cars, callison, autos, ford, uoknor, sho, uokmax, trunk, chevy, aftermarket, mph, drain, probe, buick\n30: greece, nazis, forged, henrik, erzurum, amehdi, arabs, honeywell, hojali, istanbul, struggle, interim, helicopter, sadikov, camps\n31: baseball, players, games, cubs, braves, pitching, runs, phillies, alomar, sox, roger, mets, hit, hitter, morris\n32: lds, caligiuri, taoism, mcconkie, mormon, reuss, teaches, lexicon, beast, masonic, freemasonry, baptist, psyrobtw, commandment, masonry\n33: president, stephanopoulos, clinton, cramer, optilink, tax, jobs, drugs, gay, clayton, insurance, sexual, secretary, health, republicans\n34: nhl, season, league, teams, ahl, sharks, winnipeg, draft, edmonton, hartford, defenseman, tampa, oilers, bay, canucks\n35: drive, scsi, apple, card, drives, monitor, ide, controller, bus, ram, bios, modem, simms, floppy, quadra\n36: sale, shipping, offer, condition, forsale, wolverine, obo, excellent, cover, 1st, hulk, hiram, comics, ghost, lens\n37: space, nasa, launch, orbit, moon, shuttle, satellite, lunar, henry, earth, jpl, alaska, digex, spacecraft, mission\n38: militia, crime, stratus, waco, cdt, handguns, compound, homicide, constitution, homicides, veal, deaths, amendment, arms, utkvm1\n39: centerline, toyota, frost, wagon, convertible, jimf, blah, taurus, dumbest, mustang, dodge, mileage, injector, pockets, odometer\n40: the, and, that, for, you, from, edu, this, are, not, have, with, was, but, they\n" ] ], [ [ "Далее мы будем подгонять разбиение под действительные темы с помощью *confusion matrix*.", "_____no_output_____" ] ], [ [ "target_dict = {\n 'alt.atheism': 0,\n 'comp.graphics': 1,\n 'comp.os.ms-windows.misc': 2,\n 'comp.sys.ibm.pc.hardware': 3,\n 'comp.sys.mac.hardware': 4,\n 'comp.windows.x': 5,\n 'misc.forsale': 6,\n 'rec.autos': 7,\n 'rec.motorcycles': 8,\n 'rec.sport.baseball': 9,\n 'rec.sport.hockey': 10,\n 'sci.crypt': 11,\n 'sci.electronics': 12,\n 'sci.med': 13,\n 'sci.space': 14,\n 'soc.religion.christian': 15,\n 'talk.politics.guns': 16,\n 'talk.politics.mideast': 17,\n 'talk.politics.misc': 18,\n 'talk.religion.misc': 19\n}", "_____no_output_____" ], [ "mixed = [\n 'comp.sys.ibm.pc.hardware',\n 'talk.politics.mideast',\n 'sci.electronics',\n 'rec.sport.hockey',\n\n 'sci.med',\n 'rec.motorcycles',\n 'comp.graphics',\n 'rec.sport.hockey',\n\n 'talk.politics.mideast',\n 'talk.religion.misc',\n 'rec.autos',\n 'comp.graphics',\n\n 'sci.space',\n 'soc.religion.christian',\n 'comp.os.ms-windows.misc',\n 'sci.crypt',\n\n 'comp.windows.x',\n 'misc.forsale',\n 'sci.space',\n 'sci.crypt',\n\n 'talk.religion.misc',\n 'alt.atheism',\n 'comp.os.ms-windows.misc',\n 'alt.atheism',\n \n 'sci.med',\n 'comp.os.ms-windows.misc',\n 'soc.religion.christian',\n 'talk.politics.guns',\n\n 'rec.autos',\n 'rec.autos',\n 'talk.politics.mideast',\n 'rec.sport.baseball',\n\n 'talk.religion.misc',\n 'talk.politics.misc',\n 'rec.sport.hockey',\n 'comp.sys.mac.hardware',\n\n 'misc.forsale',\n 'sci.space',\n 'talk.politics.guns',\n 'rec.autos',\n \n '-'\n]", "_____no_output_____" ] ], [ [ "Построим небольшой отчет о правильности нашего разбиения:", "_____no_output_____" ] ], [ [ "theta_train = model_artm.get_theta()\nmodel_labels = []\nkeys = np.sort([int(i) for i in theta_train.keys()])\nfor i in keys:\n max_val = 0\n max_idx = 0\n for j in theta_train[i].keys():\n if j == str(T-1):\n continue\n if theta_train[i][j] > max_val:\n max_val = theta_train[i][j]\n max_idx = j\n topic = mixed[int(max_idx)]\n if topic == '-':\n print(i, '-')\n label = target_dict[topic]\n model_labels.append(label)", "_____no_output_____" ], [ "print(classification_report(train_labels, model_labels))", " precision recall f1-score support\n\n 0 0.92 0.93 0.92 360\n 1 0.56 0.99 0.71 436\n 2 0.64 0.53 0.58 439\n 3 0.44 0.14 0.22 458\n 4 0.40 0.66 0.50 430\n 5 0.82 0.44 0.57 450\n 6 0.86 0.71 0.78 449\n 7 0.77 0.92 0.84 465\n 8 0.95 0.91 0.93 440\n 9 0.99 0.89 0.94 452\n 10 0.95 0.99 0.97 446\n 11 0.88 1.00 0.93 437\n 12 0.81 0.90 0.85 439\n 13 0.95 0.65 0.77 427\n 14 0.85 0.93 0.89 446\n 15 0.86 0.98 0.92 440\n 16 0.92 1.00 0.96 420\n 17 0.98 0.99 0.98 406\n 18 0.99 0.76 0.86 353\n 19 0.80 0.75 0.77 292\n\navg / total 0.81 0.80 0.79 8485\n\n" ], [ "print(classification_report(train_labels, model_labels))", " precision recall f1-score support\n\n 0 0.92 0.93 0.92 360\n 1 0.56 0.99 0.71 436\n 2 0.64 0.53 0.58 439\n 3 0.44 0.14 0.22 458\n 4 0.40 0.66 0.50 430\n 5 0.82 0.44 0.57 450\n 6 0.86 0.71 0.78 449\n 7 0.77 0.92 0.84 465\n 8 0.95 0.91 0.93 440\n 9 0.99 0.89 0.94 452\n 10 0.95 0.99 0.97 446\n 11 0.88 1.00 0.93 437\n 12 0.81 0.90 0.85 439\n 13 0.95 0.65 0.77 427\n 14 0.85 0.93 0.89 446\n 15 0.86 0.98 0.92 440\n 16 0.92 1.00 0.96 420\n 17 0.98 0.99 0.98 406\n 18 0.99 0.76 0.86 353\n 19 0.80 0.75 0.77 292\n\navg / total 0.81 0.80 0.79 8485\n\n" ], [ "mat = confusion_matrix(train_labels, model_labels)\nsns.heatmap(mat.T, annot=True, fmt='d', cbar=False)\nplt.xlabel('True label')\nplt.ylabel('Predicted label');", "/Users/ldinka/miniconda2/envs/py36/lib/python3.6/site-packages/seaborn/matrix.py:143: DeprecationWarning: elementwise == comparison failed; this will raise an error in the future.\n if xticklabels == []:\n" ], [ "accuracy_score(train_labels, model_labels)", "_____no_output_____" ] ], [ [ "Нам удалось добиться 80% *accuracy*. По матрице ответов мы видим, что для модели темы *comp.sys.ibm.pc.hardware* и *comp.sys.mac.hardware* практически не различимы (<strike>честно говоря, для меня тоже</strike>), в остальном все более или менее прилично.\n\nПроверим модель на тестовой выборке:", "_____no_output_____" ] ], [ [ "batch_vectorizer_test = artm.BatchVectorizer(data_path=\"../../data/news_data/20news_test_mult.vw\",\n data_format=\"vowpal_wabbit\",\n target_folder=\"news_batches_test\")", "_____no_output_____" ], [ "theta_test = model_artm.transform(batch_vectorizer_test)", "_____no_output_____" ], [ "test_score = []\nfor i in range(len(theta_test.keys())):\n max_val = 0\n max_idx = 0\n for j in theta_test[i].keys():\n if j == str(T-1):\n continue\n if theta_test[i][j] > max_val:\n max_val = theta_test[i][j]\n max_idx = j\n topic = mixed[int(max_idx)]\n label = target_dict[topic]\n test_score.append(label)", "_____no_output_____" ], [ "print(classification_report(test_labels, test_score))", " precision recall f1-score support\n\n 0 0.87 0.88 0.88 120\n 1 0.51 0.86 0.64 148\n 2 0.63 0.43 0.52 152\n 3 0.20 0.02 0.03 132\n 4 0.36 0.68 0.47 148\n 5 0.87 0.51 0.64 143\n 6 0.88 0.52 0.65 136\n 7 0.84 0.83 0.84 129\n 8 0.91 0.95 0.93 158\n 9 0.87 0.94 0.91 145\n 10 0.91 0.97 0.94 154\n 11 0.85 0.97 0.91 158\n 12 0.74 0.82 0.78 152\n 13 0.97 0.74 0.84 167\n 14 0.89 0.91 0.90 147\n 15 0.77 0.93 0.84 159\n 16 0.87 0.94 0.90 126\n 17 0.85 0.98 0.91 158\n 18 0.89 0.81 0.85 112\n 19 0.75 0.49 0.60 85\n\navg / total 0.77 0.77 0.76 2829\n\n" ], [ "mat = confusion_matrix(test_labels, test_score)\nsns.heatmap(mat.T, annot=True, fmt='d', cbar=False)\nplt.xlabel('True label')\nplt.ylabel('Predicted label');", "/Users/ldinka/miniconda2/envs/py36/lib/python3.6/site-packages/seaborn/matrix.py:143: DeprecationWarning: elementwise == comparison failed; this will raise an error in the future.\n if xticklabels == []:\n" ], [ "accuracy_score(test_labels, test_score)", "_____no_output_____" ] ], [ [ "Итого почти 77%, незначительно хуже, чем на обучающей.", "_____no_output_____" ], [ "**Вывод:** безумно много времени пришлось потратить на подгонку категорий к реальным темам, но в итоге я осталась довольна результатом. Такие смежные темы, как *alt.atheism*/*soc.religion.christian*/*talk.religion.misc* или *talk.politics.guns*/*talk.politics.mideast*/*talk.politics.misc* разделились вполне неплохо. Думаю, что я все-таки попробую использовать BigARTM в будущем для своих <strike>корыстных</strike> целей.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ] ]
4aa45f9e97b07d8a116b06817be4963fe5be2509
55,512
ipynb
Jupyter Notebook
module2-convolutional-neural-networks/LS_DS_432_Convolution_Neural_Networks_Assignment.ipynb
vishnuyar/DS-Unit-4-Sprint-3-Deep-Learning
cc6bf09bc5d88c82ac0af7159f3fbac50491c738
[ "MIT" ]
null
null
null
module2-convolutional-neural-networks/LS_DS_432_Convolution_Neural_Networks_Assignment.ipynb
vishnuyar/DS-Unit-4-Sprint-3-Deep-Learning
cc6bf09bc5d88c82ac0af7159f3fbac50491c738
[ "MIT" ]
null
null
null
module2-convolutional-neural-networks/LS_DS_432_Convolution_Neural_Networks_Assignment.ipynb
vishnuyar/DS-Unit-4-Sprint-3-Deep-Learning
cc6bf09bc5d88c82ac0af7159f3fbac50491c738
[ "MIT" ]
null
null
null
72
316
0.645338
[ [ [ "<img align=\"left\" src=\"https://lever-client-logos.s3.amazonaws.com/864372b1-534c-480e-acd5-9711f850815c-1524247202159.png\" width=200>\n<br></br>\n<br></br>\n\n## *Data Science Unit 4 Sprint 3 Assignment 2*\n# Convolutional Neural Networks (CNNs)", "_____no_output_____" ], [ "# Assignment\n\nLoad a pretrained network from Keras, [ResNet50](https://tfhub.dev/google/imagenet/resnet_v1_50/classification/1) - a 50 layer deep network trained to recognize [1000 objects](https://storage.googleapis.com/download.tensorflow.org/data/ImageNetLabels.txt). Starting usage:\n\n```python\nimport numpy as np\n\nfrom tensorflow.keras.applications.resnet50 import ResNet50\nfrom tensorflow.keras.preprocessing import image\nfrom tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions\n\nResNet50 = ResNet50(weights='imagenet')\nfeatures = model.predict(x)\n\n```\n\nNext you will need to remove the last layer from the ResNet model. Here, we loop over the layers to use the sequential API. There are easier ways to add and remove layers using the Keras functional API, but doing so introduces other complexities. \n\n```python\n# Remote the Last Layer of ResNEt\nResNet50._layers.pop(0)\n\n# Out New Model\nmodel = Sequential()\n\n# Add Pre-trained layers of Old Model to New Model\nfor layer in ResNet50.layers:\n model.add(layer)\n\n# Turn off additional training of ResNet Layers for speed of assignment\nfor layer in model.layers:\n layer.trainable = False\n\n# Add New Output Layer to Model\nmodel.add(Dense(1, activation='sigmoid'))\n```\n\nYour assignment is to apply the transfer learning above to classify images of Mountains (`./data/mountain/*`) and images of forests (`./data/forest/*`). Treat mountains as the postive class (1) and the forest images as the negative (zero). \n\nSteps to complete assignment: \n1. Load in Image Data into numpy arrays (`X`) \n2. Create a `y` for the labels\n3. Train your model with pretrained layers from resnet\n4. Report your model's accuracy", "_____no_output_____" ] ], [ [ "import numpy as np\nimport os\n\nfrom tensorflow.keras.applications.resnet50 import ResNet50\nfrom tensorflow.keras.preprocessing import image\nfrom tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions\nfrom tensorflow.keras import datasets\nfrom tensorflow.keras.models import Sequential, Model # <- May Use\nfrom tensorflow.keras.layers import Dense, Conv2D, MaxPooling2D, Flatten,GlobalAveragePooling2D", "_____no_output_____" ], [ "def process_img_path(img_path):\n return image.load_img(img_path, target_size=(224, 224))", "_____no_output_____" ], [ "imagedata = []\nlabeldata = []\nfor label in ['mountain','forest']:\n files = os.listdir(f'./data/{label}')\n for file in files:\n if file[-3:] == 'jpg':\n img_path = f'./data/{label}/{file}'\n img = process_img_path(img_path)\n img_array = image.img_to_array(img)\n imagedata.append(img_array)\n #Label 1 for Mountain and 0 for Forest\n if(label=='mountain'):\n labeldata.append(1)\n else:\n labeldata.append(0)\n ", "_____no_output_____" ], [ "imagedata = np.array(imagedata)\nlabeldata = np.array(labeldata)\nimagedata.shape,labeldata.shape", "_____no_output_____" ], [ "resnet = ResNet50(weights='imagenet', include_top=False)", "/home/ec2-user/anaconda3/envs/tensorflow_p36/lib/python3.6/site-packages/keras_applications/resnet50.py:265: UserWarning: The output shape of `ResNet50(include_top=False)` has been changed since Keras 2.2.0.\n warnings.warn('The output shape of `ResNet50(include_top=False)` '\n" ], [ "def img_contains_banana(img):\n x = image.img_to_array(img)\n x = np.expand_dims(x, axis=0)\n x = preprocess_input(x)\n model = ResNet50(weights='imagenet')\n features = model.predict(x)\n results = decode_predictions(features, top=3)[0]\n print(results)\n for entry in results:\n if entry[1] == 'banana':\n return entry[2]\n return 0.0", "_____no_output_____" ], [ "for layer in resnet.layers:\n layer.trainable = False", "_____no_output_____" ], [ "x = resnet.output\nx = GlobalAveragePooling2D()(x) # This layer is a really fancy flatten\nx = Dense(1024, activation='relu')(x)\npredictions = Dense(1, activation='sigmoid')(x)\nmodel = Model(resnet.input, predictions)", "_____no_output_____" ], [ "from sklearn.model_selection import train_test_split", "_____no_output_____" ], [ "X_train,X_test,y_train,y_test = train_test_split(imagedata,labeldata,stratify=labeldata,test_size=0.2)\nX_train.shape,X_test.shape,y_train.shape,y_test.shape", "_____no_output_____" ], [ "model.compile(loss='binary_crossentropy',\n optimizer='adam',\n metrics=['accuracy'])", "W1112 21:14:39.362797 140127424894784 deprecation.py:323] From /home/ec2-user/anaconda3/envs/tensorflow_p36/lib/python3.6/site-packages/tensorflow/python/ops/nn_impl.py:180: add_dispatch_support.<locals>.wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse tf.where in 2.0, which has the same broadcast rule as np.where\n" ], [ "model.summary()", "Model: \"model\"\n__________________________________________________________________________________________________\nLayer (type) Output Shape Param # Connected to \n==================================================================================================\ninput_4 (InputLayer) [(None, None, None, 0 \n__________________________________________________________________________________________________\nconv1_pad (ZeroPadding2D) (None, None, None, 3 0 input_4[0][0] \n__________________________________________________________________________________________________\nconv1 (Conv2D) (None, None, None, 6 9472 conv1_pad[0][0] \n__________________________________________________________________________________________________\nbn_conv1 (BatchNormalization) (None, None, None, 6 256 conv1[0][0] \n__________________________________________________________________________________________________\nactivation_147 (Activation) (None, None, None, 6 0 bn_conv1[0][0] \n__________________________________________________________________________________________________\npool1_pad (ZeroPadding2D) (None, None, None, 6 0 activation_147[0][0] \n__________________________________________________________________________________________________\nmax_pooling2d_3 (MaxPooling2D) (None, None, None, 6 0 pool1_pad[0][0] \n__________________________________________________________________________________________________\nres2a_branch2a (Conv2D) (None, None, None, 6 4160 max_pooling2d_3[0][0] \n__________________________________________________________________________________________________\nbn2a_branch2a (BatchNormalizati (None, None, None, 6 256 res2a_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_148 (Activation) (None, None, None, 6 0 bn2a_branch2a[0][0] \n__________________________________________________________________________________________________\nres2a_branch2b (Conv2D) (None, None, None, 6 36928 activation_148[0][0] \n__________________________________________________________________________________________________\nbn2a_branch2b (BatchNormalizati (None, None, None, 6 256 res2a_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_149 (Activation) (None, None, None, 6 0 bn2a_branch2b[0][0] \n__________________________________________________________________________________________________\nres2a_branch2c (Conv2D) (None, None, None, 2 16640 activation_149[0][0] \n__________________________________________________________________________________________________\nres2a_branch1 (Conv2D) (None, None, None, 2 16640 max_pooling2d_3[0][0] \n__________________________________________________________________________________________________\nbn2a_branch2c (BatchNormalizati (None, None, None, 2 1024 res2a_branch2c[0][0] \n__________________________________________________________________________________________________\nbn2a_branch1 (BatchNormalizatio (None, None, None, 2 1024 res2a_branch1[0][0] \n__________________________________________________________________________________________________\nadd_48 (Add) (None, None, None, 2 0 bn2a_branch2c[0][0] \n bn2a_branch1[0][0] \n__________________________________________________________________________________________________\nactivation_150 (Activation) (None, None, None, 2 0 add_48[0][0] \n__________________________________________________________________________________________________\nres2b_branch2a (Conv2D) (None, None, None, 6 16448 activation_150[0][0] \n__________________________________________________________________________________________________\nbn2b_branch2a (BatchNormalizati (None, None, None, 6 256 res2b_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_151 (Activation) (None, None, None, 6 0 bn2b_branch2a[0][0] \n__________________________________________________________________________________________________\nres2b_branch2b (Conv2D) (None, None, None, 6 36928 activation_151[0][0] \n__________________________________________________________________________________________________\nbn2b_branch2b (BatchNormalizati (None, None, None, 6 256 res2b_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_152 (Activation) (None, None, None, 6 0 bn2b_branch2b[0][0] \n__________________________________________________________________________________________________\nres2b_branch2c (Conv2D) (None, None, None, 2 16640 activation_152[0][0] \n__________________________________________________________________________________________________\nbn2b_branch2c (BatchNormalizati (None, None, None, 2 1024 res2b_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_49 (Add) (None, None, None, 2 0 bn2b_branch2c[0][0] \n activation_150[0][0] \n__________________________________________________________________________________________________\nactivation_153 (Activation) (None, None, None, 2 0 add_49[0][0] \n__________________________________________________________________________________________________\nres2c_branch2a (Conv2D) (None, None, None, 6 16448 activation_153[0][0] \n__________________________________________________________________________________________________\nbn2c_branch2a (BatchNormalizati (None, None, None, 6 256 res2c_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_154 (Activation) (None, None, None, 6 0 bn2c_branch2a[0][0] \n__________________________________________________________________________________________________\nres2c_branch2b (Conv2D) (None, None, None, 6 36928 activation_154[0][0] \n__________________________________________________________________________________________________\nbn2c_branch2b (BatchNormalizati (None, None, None, 6 256 res2c_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_155 (Activation) (None, None, None, 6 0 bn2c_branch2b[0][0] \n__________________________________________________________________________________________________\nres2c_branch2c (Conv2D) (None, None, None, 2 16640 activation_155[0][0] \n__________________________________________________________________________________________________\nbn2c_branch2c (BatchNormalizati (None, None, None, 2 1024 res2c_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_50 (Add) (None, None, None, 2 0 bn2c_branch2c[0][0] \n activation_153[0][0] \n__________________________________________________________________________________________________\nactivation_156 (Activation) (None, None, None, 2 0 add_50[0][0] \n__________________________________________________________________________________________________\nres3a_branch2a (Conv2D) (None, None, None, 1 32896 activation_156[0][0] \n__________________________________________________________________________________________________\nbn3a_branch2a (BatchNormalizati (None, None, None, 1 512 res3a_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_157 (Activation) (None, None, None, 1 0 bn3a_branch2a[0][0] \n__________________________________________________________________________________________________\nres3a_branch2b (Conv2D) (None, None, None, 1 147584 activation_157[0][0] \n__________________________________________________________________________________________________\nbn3a_branch2b (BatchNormalizati (None, None, None, 1 512 res3a_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_158 (Activation) (None, None, None, 1 0 bn3a_branch2b[0][0] \n__________________________________________________________________________________________________\nres3a_branch2c (Conv2D) (None, None, None, 5 66048 activation_158[0][0] \n__________________________________________________________________________________________________\nres3a_branch1 (Conv2D) (None, None, None, 5 131584 activation_156[0][0] \n__________________________________________________________________________________________________\nbn3a_branch2c (BatchNormalizati (None, None, None, 5 2048 res3a_branch2c[0][0] \n__________________________________________________________________________________________________\nbn3a_branch1 (BatchNormalizatio (None, None, None, 5 2048 res3a_branch1[0][0] \n__________________________________________________________________________________________________\nadd_51 (Add) (None, None, None, 5 0 bn3a_branch2c[0][0] \n bn3a_branch1[0][0] \n__________________________________________________________________________________________________\nactivation_159 (Activation) (None, None, None, 5 0 add_51[0][0] \n__________________________________________________________________________________________________\nres3b_branch2a (Conv2D) (None, None, None, 1 65664 activation_159[0][0] \n__________________________________________________________________________________________________\nbn3b_branch2a (BatchNormalizati (None, None, None, 1 512 res3b_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_160 (Activation) (None, None, None, 1 0 bn3b_branch2a[0][0] \n__________________________________________________________________________________________________\nres3b_branch2b (Conv2D) (None, None, None, 1 147584 activation_160[0][0] \n__________________________________________________________________________________________________\nbn3b_branch2b (BatchNormalizati (None, None, None, 1 512 res3b_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_161 (Activation) (None, None, None, 1 0 bn3b_branch2b[0][0] \n__________________________________________________________________________________________________\nres3b_branch2c (Conv2D) (None, None, None, 5 66048 activation_161[0][0] \n__________________________________________________________________________________________________\nbn3b_branch2c (BatchNormalizati (None, None, None, 5 2048 res3b_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_52 (Add) (None, None, None, 5 0 bn3b_branch2c[0][0] \n activation_159[0][0] \n__________________________________________________________________________________________________\nactivation_162 (Activation) (None, None, None, 5 0 add_52[0][0] \n__________________________________________________________________________________________________\nres3c_branch2a (Conv2D) (None, None, None, 1 65664 activation_162[0][0] \n__________________________________________________________________________________________________\nbn3c_branch2a (BatchNormalizati (None, None, None, 1 512 res3c_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_163 (Activation) (None, None, None, 1 0 bn3c_branch2a[0][0] \n__________________________________________________________________________________________________\nres3c_branch2b (Conv2D) (None, None, None, 1 147584 activation_163[0][0] \n__________________________________________________________________________________________________\nbn3c_branch2b (BatchNormalizati (None, None, None, 1 512 res3c_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_164 (Activation) (None, None, None, 1 0 bn3c_branch2b[0][0] \n__________________________________________________________________________________________________\nres3c_branch2c (Conv2D) (None, None, None, 5 66048 activation_164[0][0] \n__________________________________________________________________________________________________\nbn3c_branch2c (BatchNormalizati (None, None, None, 5 2048 res3c_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_53 (Add) (None, None, None, 5 0 bn3c_branch2c[0][0] \n activation_162[0][0] \n__________________________________________________________________________________________________\nactivation_165 (Activation) (None, None, None, 5 0 add_53[0][0] \n__________________________________________________________________________________________________\nres3d_branch2a (Conv2D) (None, None, None, 1 65664 activation_165[0][0] \n__________________________________________________________________________________________________\nbn3d_branch2a (BatchNormalizati (None, None, None, 1 512 res3d_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_166 (Activation) (None, None, None, 1 0 bn3d_branch2a[0][0] \n__________________________________________________________________________________________________\nres3d_branch2b (Conv2D) (None, None, None, 1 147584 activation_166[0][0] \n__________________________________________________________________________________________________\nbn3d_branch2b (BatchNormalizati (None, None, None, 1 512 res3d_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_167 (Activation) (None, None, None, 1 0 bn3d_branch2b[0][0] \n__________________________________________________________________________________________________\nres3d_branch2c (Conv2D) (None, None, None, 5 66048 activation_167[0][0] \n__________________________________________________________________________________________________\nbn3d_branch2c (BatchNormalizati (None, None, None, 5 2048 res3d_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_54 (Add) (None, None, None, 5 0 bn3d_branch2c[0][0] \n activation_165[0][0] \n__________________________________________________________________________________________________\nactivation_168 (Activation) (None, None, None, 5 0 add_54[0][0] \n__________________________________________________________________________________________________\nres4a_branch2a (Conv2D) (None, None, None, 2 131328 activation_168[0][0] \n__________________________________________________________________________________________________\nbn4a_branch2a (BatchNormalizati (None, None, None, 2 1024 res4a_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_169 (Activation) (None, None, None, 2 0 bn4a_branch2a[0][0] \n__________________________________________________________________________________________________\nres4a_branch2b (Conv2D) (None, None, None, 2 590080 activation_169[0][0] \n__________________________________________________________________________________________________\nbn4a_branch2b (BatchNormalizati (None, None, None, 2 1024 res4a_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_170 (Activation) (None, None, None, 2 0 bn4a_branch2b[0][0] \n__________________________________________________________________________________________________\nres4a_branch2c (Conv2D) (None, None, None, 1 263168 activation_170[0][0] \n__________________________________________________________________________________________________\nres4a_branch1 (Conv2D) (None, None, None, 1 525312 activation_168[0][0] \n__________________________________________________________________________________________________\nbn4a_branch2c (BatchNormalizati (None, None, None, 1 4096 res4a_branch2c[0][0] \n__________________________________________________________________________________________________\nbn4a_branch1 (BatchNormalizatio (None, None, None, 1 4096 res4a_branch1[0][0] \n__________________________________________________________________________________________________\nadd_55 (Add) (None, None, None, 1 0 bn4a_branch2c[0][0] \n bn4a_branch1[0][0] \n__________________________________________________________________________________________________\nactivation_171 (Activation) (None, None, None, 1 0 add_55[0][0] \n__________________________________________________________________________________________________\nres4b_branch2a (Conv2D) (None, None, None, 2 262400 activation_171[0][0] \n__________________________________________________________________________________________________\nbn4b_branch2a (BatchNormalizati (None, None, None, 2 1024 res4b_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_172 (Activation) (None, None, None, 2 0 bn4b_branch2a[0][0] \n__________________________________________________________________________________________________\nres4b_branch2b (Conv2D) (None, None, None, 2 590080 activation_172[0][0] \n__________________________________________________________________________________________________\nbn4b_branch2b (BatchNormalizati (None, None, None, 2 1024 res4b_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_173 (Activation) (None, None, None, 2 0 bn4b_branch2b[0][0] \n__________________________________________________________________________________________________\nres4b_branch2c (Conv2D) (None, None, None, 1 263168 activation_173[0][0] \n__________________________________________________________________________________________________\nbn4b_branch2c (BatchNormalizati (None, None, None, 1 4096 res4b_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_56 (Add) (None, None, None, 1 0 bn4b_branch2c[0][0] \n activation_171[0][0] \n__________________________________________________________________________________________________\nactivation_174 (Activation) (None, None, None, 1 0 add_56[0][0] \n__________________________________________________________________________________________________\nres4c_branch2a (Conv2D) (None, None, None, 2 262400 activation_174[0][0] \n__________________________________________________________________________________________________\nbn4c_branch2a (BatchNormalizati (None, None, None, 2 1024 res4c_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_175 (Activation) (None, None, None, 2 0 bn4c_branch2a[0][0] \n__________________________________________________________________________________________________\nres4c_branch2b (Conv2D) (None, None, None, 2 590080 activation_175[0][0] \n__________________________________________________________________________________________________\nbn4c_branch2b (BatchNormalizati (None, None, None, 2 1024 res4c_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_176 (Activation) (None, None, None, 2 0 bn4c_branch2b[0][0] \n__________________________________________________________________________________________________\nres4c_branch2c (Conv2D) (None, None, None, 1 263168 activation_176[0][0] \n__________________________________________________________________________________________________\nbn4c_branch2c (BatchNormalizati (None, None, None, 1 4096 res4c_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_57 (Add) (None, None, None, 1 0 bn4c_branch2c[0][0] \n activation_174[0][0] \n__________________________________________________________________________________________________\nactivation_177 (Activation) (None, None, None, 1 0 add_57[0][0] \n__________________________________________________________________________________________________\nres4d_branch2a (Conv2D) (None, None, None, 2 262400 activation_177[0][0] \n__________________________________________________________________________________________________\nbn4d_branch2a (BatchNormalizati (None, None, None, 2 1024 res4d_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_178 (Activation) (None, None, None, 2 0 bn4d_branch2a[0][0] \n__________________________________________________________________________________________________\nres4d_branch2b (Conv2D) (None, None, None, 2 590080 activation_178[0][0] \n__________________________________________________________________________________________________\nbn4d_branch2b (BatchNormalizati (None, None, None, 2 1024 res4d_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_179 (Activation) (None, None, None, 2 0 bn4d_branch2b[0][0] \n__________________________________________________________________________________________________\nres4d_branch2c (Conv2D) (None, None, None, 1 263168 activation_179[0][0] \n__________________________________________________________________________________________________\nbn4d_branch2c (BatchNormalizati (None, None, None, 1 4096 res4d_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_58 (Add) (None, None, None, 1 0 bn4d_branch2c[0][0] \n activation_177[0][0] \n__________________________________________________________________________________________________\nactivation_180 (Activation) (None, None, None, 1 0 add_58[0][0] \n__________________________________________________________________________________________________\nres4e_branch2a (Conv2D) (None, None, None, 2 262400 activation_180[0][0] \n__________________________________________________________________________________________________\nbn4e_branch2a (BatchNormalizati (None, None, None, 2 1024 res4e_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_181 (Activation) (None, None, None, 2 0 bn4e_branch2a[0][0] \n__________________________________________________________________________________________________\nres4e_branch2b (Conv2D) (None, None, None, 2 590080 activation_181[0][0] \n__________________________________________________________________________________________________\nbn4e_branch2b (BatchNormalizati (None, None, None, 2 1024 res4e_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_182 (Activation) (None, None, None, 2 0 bn4e_branch2b[0][0] \n__________________________________________________________________________________________________\nres4e_branch2c (Conv2D) (None, None, None, 1 263168 activation_182[0][0] \n__________________________________________________________________________________________________\nbn4e_branch2c (BatchNormalizati (None, None, None, 1 4096 res4e_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_59 (Add) (None, None, None, 1 0 bn4e_branch2c[0][0] \n activation_180[0][0] \n__________________________________________________________________________________________________\nactivation_183 (Activation) (None, None, None, 1 0 add_59[0][0] \n__________________________________________________________________________________________________\nres4f_branch2a (Conv2D) (None, None, None, 2 262400 activation_183[0][0] \n__________________________________________________________________________________________________\nbn4f_branch2a (BatchNormalizati (None, None, None, 2 1024 res4f_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_184 (Activation) (None, None, None, 2 0 bn4f_branch2a[0][0] \n__________________________________________________________________________________________________\nres4f_branch2b (Conv2D) (None, None, None, 2 590080 activation_184[0][0] \n__________________________________________________________________________________________________\nbn4f_branch2b (BatchNormalizati (None, None, None, 2 1024 res4f_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_185 (Activation) (None, None, None, 2 0 bn4f_branch2b[0][0] \n__________________________________________________________________________________________________\nres4f_branch2c (Conv2D) (None, None, None, 1 263168 activation_185[0][0] \n__________________________________________________________________________________________________\nbn4f_branch2c (BatchNormalizati (None, None, None, 1 4096 res4f_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_60 (Add) (None, None, None, 1 0 bn4f_branch2c[0][0] \n activation_183[0][0] \n__________________________________________________________________________________________________\nactivation_186 (Activation) (None, None, None, 1 0 add_60[0][0] \n__________________________________________________________________________________________________\nres5a_branch2a (Conv2D) (None, None, None, 5 524800 activation_186[0][0] \n__________________________________________________________________________________________________\nbn5a_branch2a (BatchNormalizati (None, None, None, 5 2048 res5a_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_187 (Activation) (None, None, None, 5 0 bn5a_branch2a[0][0] \n__________________________________________________________________________________________________\nres5a_branch2b (Conv2D) (None, None, None, 5 2359808 activation_187[0][0] \n__________________________________________________________________________________________________\nbn5a_branch2b (BatchNormalizati (None, None, None, 5 2048 res5a_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_188 (Activation) (None, None, None, 5 0 bn5a_branch2b[0][0] \n__________________________________________________________________________________________________\nres5a_branch2c (Conv2D) (None, None, None, 2 1050624 activation_188[0][0] \n__________________________________________________________________________________________________\nres5a_branch1 (Conv2D) (None, None, None, 2 2099200 activation_186[0][0] \n__________________________________________________________________________________________________\nbn5a_branch2c (BatchNormalizati (None, None, None, 2 8192 res5a_branch2c[0][0] \n__________________________________________________________________________________________________\nbn5a_branch1 (BatchNormalizatio (None, None, None, 2 8192 res5a_branch1[0][0] \n__________________________________________________________________________________________________\nadd_61 (Add) (None, None, None, 2 0 bn5a_branch2c[0][0] \n bn5a_branch1[0][0] \n__________________________________________________________________________________________________\nactivation_189 (Activation) (None, None, None, 2 0 add_61[0][0] \n__________________________________________________________________________________________________\nres5b_branch2a (Conv2D) (None, None, None, 5 1049088 activation_189[0][0] \n__________________________________________________________________________________________________\nbn5b_branch2a (BatchNormalizati (None, None, None, 5 2048 res5b_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_190 (Activation) (None, None, None, 5 0 bn5b_branch2a[0][0] \n__________________________________________________________________________________________________\nres5b_branch2b (Conv2D) (None, None, None, 5 2359808 activation_190[0][0] \n__________________________________________________________________________________________________\nbn5b_branch2b (BatchNormalizati (None, None, None, 5 2048 res5b_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_191 (Activation) (None, None, None, 5 0 bn5b_branch2b[0][0] \n__________________________________________________________________________________________________\nres5b_branch2c (Conv2D) (None, None, None, 2 1050624 activation_191[0][0] \n__________________________________________________________________________________________________\nbn5b_branch2c (BatchNormalizati (None, None, None, 2 8192 res5b_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_62 (Add) (None, None, None, 2 0 bn5b_branch2c[0][0] \n activation_189[0][0] \n__________________________________________________________________________________________________\nactivation_192 (Activation) (None, None, None, 2 0 add_62[0][0] \n__________________________________________________________________________________________________\nres5c_branch2a (Conv2D) (None, None, None, 5 1049088 activation_192[0][0] \n__________________________________________________________________________________________________\nbn5c_branch2a (BatchNormalizati (None, None, None, 5 2048 res5c_branch2a[0][0] \n__________________________________________________________________________________________________\nactivation_193 (Activation) (None, None, None, 5 0 bn5c_branch2a[0][0] \n__________________________________________________________________________________________________\nres5c_branch2b (Conv2D) (None, None, None, 5 2359808 activation_193[0][0] \n__________________________________________________________________________________________________\nbn5c_branch2b (BatchNormalizati (None, None, None, 5 2048 res5c_branch2b[0][0] \n__________________________________________________________________________________________________\nactivation_194 (Activation) (None, None, None, 5 0 bn5c_branch2b[0][0] \n__________________________________________________________________________________________________\nres5c_branch2c (Conv2D) (None, None, None, 2 1050624 activation_194[0][0] \n__________________________________________________________________________________________________\nbn5c_branch2c (BatchNormalizati (None, None, None, 2 8192 res5c_branch2c[0][0] \n__________________________________________________________________________________________________\nadd_63 (Add) (None, None, None, 2 0 bn5c_branch2c[0][0] \n activation_192[0][0] \n__________________________________________________________________________________________________\nactivation_195 (Activation) (None, None, None, 2 0 add_63[0][0] \n__________________________________________________________________________________________________\nglobal_average_pooling2d_1 (Glo (None, 2048) 0 activation_195[0][0] \n__________________________________________________________________________________________________\ndense_2 (Dense) (None, 1024) 2098176 global_average_pooling2d_1[0][0] \n__________________________________________________________________________________________________\ndense_3 (Dense) (None, 1) 1025 dense_2[0][0] \n==================================================================================================\nTotal params: 25,686,913\nTrainable params: 2,099,201\nNon-trainable params: 23,587,712\n__________________________________________________________________________________________________\n" ], [ "model.fit(X_train, y_train,\n batch_size=20,\n epochs=15,\n validation_data=(X_test, y_test))", "Train on 561 samples, validate on 141 samples\nEpoch 1/15\n561/561 [==============================] - 85s 151ms/sample - loss: 0.2482 - acc: 0.9091 - val_loss: 0.1766 - val_acc: 0.9362\nEpoch 2/15\n561/561 [==============================] - 77s 137ms/sample - loss: 0.2083 - acc: 0.9430 - val_loss: 0.0734 - val_acc: 0.9787\nEpoch 3/15\n561/561 [==============================] - 77s 138ms/sample - loss: 0.0277 - acc: 0.9840 - val_loss: 0.0836 - val_acc: 0.9787\nEpoch 4/15\n561/561 [==============================] - 79s 141ms/sample - loss: 0.2426 - acc: 0.9572 - val_loss: 0.0038 - val_acc: 1.0000\nEpoch 5/15\n561/561 [==============================] - 77s 137ms/sample - loss: 0.0379 - acc: 0.9840 - val_loss: 0.0286 - val_acc: 0.9858\nEpoch 6/15\n561/561 [==============================] - 78s 140ms/sample - loss: 0.0430 - acc: 0.9857 - val_loss: 0.0040 - val_acc: 1.0000\nEpoch 7/15\n561/561 [==============================] - 77s 137ms/sample - loss: 0.0792 - acc: 0.9768 - val_loss: 0.1613 - val_acc: 0.9362\nEpoch 8/15\n561/561 [==============================] - 78s 140ms/sample - loss: 0.0507 - acc: 0.9822 - val_loss: 0.0091 - val_acc: 1.0000\nEpoch 9/15\n561/561 [==============================] - 76s 136ms/sample - loss: 0.0055 - acc: 0.9982 - val_loss: 0.0031 - val_acc: 1.0000\nEpoch 10/15\n561/561 [==============================] - 168s 300ms/sample - loss: 0.1123 - acc: 0.9643 - val_loss: 0.0028 - val_acc: 1.0000\nEpoch 11/15\n561/561 [==============================] - 297s 530ms/sample - loss: 0.1797 - acc: 0.9519 - val_loss: 0.3173 - val_acc: 0.9078\nEpoch 12/15\n561/561 [==============================] - 295s 526ms/sample - loss: 0.3040 - acc: 0.9412 - val_loss: 0.1008 - val_acc: 0.9716\nEpoch 13/15\n561/561 [==============================] - 297s 529ms/sample - loss: 0.1839 - acc: 0.9661 - val_loss: 0.0943 - val_acc: 0.9716\nEpoch 14/15\n561/561 [==============================] - 297s 530ms/sample - loss: 0.0920 - acc: 0.9822 - val_loss: 0.0416 - val_acc: 0.9787\nEpoch 15/15\n561/561 [==============================] - 296s 528ms/sample - loss: 0.5647 - acc: 0.9144 - val_loss: 0.2513 - val_acc: 0.9574\n" ] ], [ [ "# Resources and Stretch Goals\n\nStretch goals\n- Enhance your code to use classes/functions and accept terms to search and classes to look for in recognizing the downloaded images (e.g. download images of parties, recognize all that contain balloons)\n- Check out [other available pretrained networks](https://tfhub.dev), try some and compare\n- Image recognition/classification is somewhat solved, but *relationships* between entities and describing an image is not - check out some of the extended resources (e.g. [Visual Genome](https://visualgenome.org/)) on the topic\n- Transfer learning - using images you source yourself, [retrain a classifier](https://www.tensorflow.org/hub/tutorials/image_retraining) with a new category\n- (Not CNN related) Use [piexif](https://pypi.org/project/piexif/) to check out the metadata of images passed in to your system - see if they're from a national park! (Note - many images lack GPS metadata, so this won't work in most cases, but still cool)\n\nResources\n- [Deep Residual Learning for Image Recognition](https://arxiv.org/abs/1512.03385) - influential paper (introduced ResNet)\n- [YOLO: Real-Time Object Detection](https://pjreddie.com/darknet/yolo/) - an influential convolution based object detection system, focused on inference speed (for applications to e.g. self driving vehicles)\n- [R-CNN, Fast R-CNN, Faster R-CNN, YOLO](https://towardsdatascience.com/r-cnn-fast-r-cnn-faster-r-cnn-yolo-object-detection-algorithms-36d53571365e) - comparison of object detection systems\n- [Common Objects in Context](http://cocodataset.org/) - a large-scale object detection, segmentation, and captioning dataset\n- [Visual Genome](https://visualgenome.org/) - a dataset, a knowledge base, an ongoing effort to connect structured image concepts to language", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
4aa46465523dc3546a4024721ce2f3c5a7415684
9,453
ipynb
Jupyter Notebook
assignments/A10/A10_Q2.ipynb
eds-uga/csci1360e-su18
78c7b4c2f31127232412524e58d8da76118987ab
[ "MIT" ]
null
null
null
assignments/A10/A10_Q2.ipynb
eds-uga/csci1360e-su18
78c7b4c2f31127232412524e58d8da76118987ab
[ "MIT" ]
null
null
null
assignments/A10/A10_Q2.ipynb
eds-uga/csci1360e-su18
78c7b4c2f31127232412524e58d8da76118987ab
[ "MIT" ]
null
null
null
29.086154
415
0.589019
[ [ [ "# Question 2\n\nYou're an aspiring computational biologist, working with some alveolar (lung) cells to study some of the cellular machinery involved in disease progression. You've tagged the proteins you're interested in, run your experiment, and collected your data from the confocal microscope in your advisor's lab.\n\nUnfortunately, someone didn't properly secure the confocal microscope, because some dust or something got shaken loose during your imaging slot and it seems to have corrupted your images!\n\n<img src=\"noisy.png\" width=\"60%\" />\n\nYou don't have enough time to completely re-do the experiments, so you'll need to use your computational skills to clean up the data post-acquisition.", "_____no_output_____" ], [ "### Part A\n\nThe `scipy.ndimage` submodule has lots of \"filters\" you can use to process your images. In the lecture we saw how the Gaussian filter worked for smoothing; we'll use that again here, in addition to a median filter.\n\nThe functions you'll want to use are [**`ndimage.gaussian_filter`**](http://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.gaussian_filter.html#scipy.ndimage.gaussian_filter) and [**`ndimage.median_filter`**](http://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.median_filter.html#scipy.ndimage.median_filter). Check out their respective documentation pages to see how to use them.\n\nThe upshot is both functions have 2 required arguments: the first is the image (of course), and the second is an integer that indicates the filter size; for the Gaussian filter, this argument is **`sigma`**; for the median filter, this argument is **`size`**.\n\n**Experiment with both filters, and with a few filter sizes. Plot the results of your filters using `plt.imshow()`, which has already been imported for you.** Make sure you post the results! Create new cells if you need to, but please try to show multiple plots of your different \"experiments\" (different filters with different parameter values, and the resulting images).", "_____no_output_____" ] ], [ [ "%matplotlib inline\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport scipy.ndimage as ndimage\n\nimg = ndimage.imread(\"noisy.png\", flatten = True)\n\n### BEGIN SOLUTION\n\n### END SOLUTION", "_____no_output_____" ] ], [ [ "### Part B\n\nCompare and constrast the two types of filters (Gaussian and median). Are there similarities between their effects? Are there differences? How do the filter sizes affect the outputs? Can you speculate as to how these filters work under-the-hood?", "_____no_output_____" ], [ "### Part C\n\nUse your function from Question 1, Part B to count the number of cells in this image.\n\nWrite a function which:\n\n - is named `count_cells`\n - takes 3 arguments: a NumPy image, an optional median filter size (default: 5), and an optional pixel threshold (default: 0)\n - returns 1 number: the number of cells found in the image\n \nIt's pretty much the same deal as Part B on the last question, except this time we're also performing a median filter on the image to try and get rid of some of the noise in the image.\n\nThe threshold procedure is also different. Before, you simply set any pixel below a certain value to 0. In this case, you're still doing that, but in addition **you will also set all *other* pixels to 1.** This is known as **binarization**: every pixel in the entire image is either a 1 or a 0.\n\nYou can use `scipy.ndimage`, `skimage`, `numpy`, and `matplotlib`, but no other built-in functions or imported Python packages.", "_____no_output_____" ] ], [ [ "import scipy.ndimage as ndimage\nimg = ndimage.imread(\"noisy.png\", flatten = True)\nt1 = 30\ns1 = 5\na1 = 33\nassert a1 == count_cells(img, s1, t1)", "_____no_output_____" ], [ "img = ndimage.imread(\"noisy.png\", flatten = True)\nt2 = 30\ns2 = 20\na2 = 21\nassert a2 == count_cells(img, s2, t2)", "_____no_output_____" ], [ "img = ndimage.imread(\"noisy.png\", flatten = True)\nt3 = 100\ns3 = 5\na3 = 97\nassert a3 == count_cells(img, s3, t3)", "_____no_output_____" ], [ "img = ndimage.imread(\"noisy.png\", flatten = True)\nt4 = 100\ns4 = 20\na4 = 21\nassert a4 == count_cells(img, s4, t4)", "_____no_output_____" ] ], [ [ "### Part D\n\nUsing the function you created in the previous question, re-run the cell counter, but this time on the *original* noisy image. Run it a few times, changing the pixel threshold you set (but using the original noisy image each time). How does the number of objects your function finds change with the pixel threshold?\n\nNow run it on a *filtered* image, but change the filter size. Make it really small and count the number of objects. Make it really large and count the number of objects. Keep the pixel threshold constant for this. How does the number of objects your function finds change with the filter size?\n\nPut your code in the box below, and write your responses in the box below that.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ] ]
4aa469d840a5d57577e17f42afbd0cea3d00d79f
21,789
ipynb
Jupyter Notebook
doc/tutorials/SIR-X.ipynb
1091478765p/open-sirv
7833d6fa131f743b319bce0b329479e1af18d4c0
[ "MIT" ]
6
2020-03-28T20:59:41.000Z
2021-04-24T08:09:15.000Z
doc/tutorials/SIR-X.ipynb
1091478765p/open-sirv
7833d6fa131f743b319bce0b329479e1af18d4c0
[ "MIT" ]
71
2020-03-29T15:10:27.000Z
2022-03-12T00:47:54.000Z
doc/tutorials/SIR-X.ipynb
1091478765p/open-sirv
7833d6fa131f743b319bce0b329479e1af18d4c0
[ "MIT" ]
8
2020-04-04T21:15:58.000Z
2021-04-29T15:34:37.000Z
33.470046
475
0.608472
[ [ [ "# SIR-X", "_____no_output_____" ], [ "This notebook exemplifies how Open-SIR can be used to fit the SIR-X model by [Maier and Dirk (2020)](https://science.sciencemag.org/content/early/2020/04/07/science.abb4557.full) to existing data and make predictions. The SIR-X model is a standard generalization of the Susceptible-Infectious-Removed (SIR) model, which includes the influence of exogenous factors such as policy changes, lockdown of the whole population and quarantine of the infectious individuals.\n\nThe Open-SIR implementation of the SIR-X model will be validated reproducing the parameter fitting published in the [supplementary material](https://science.sciencemag.org/cgi/content/full/science.abb4557/DC1) of the original article published by [Maier and Brockmann (2020)](https://science.sciencemag.org/content/early/2020/04/07/science.abb4557.full). For simplicity, the validation will be performed only for the city of Guangdong, China.", "_____no_output_____" ], [ "## Import modules", "_____no_output_____" ] ], [ [ "# Uncomment this cell to activate black code formatter in the notebook\n# %load_ext nb_black", "_____no_output_____" ], [ "# Import packages\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport numpy as np\n\n%matplotlib inline", "_____no_output_____" ] ], [ [ "## Data sourcing", "_____no_output_____" ], [ "We will source data from the repository of the [John Hopkins University COVID-19 dashboard] (https://coronavirus.jhu.edu/map.html) published formally as a correspondence in [The Lancet](https://www.thelancet.com/journals/laninf/article/PIIS1473-3099(20)30120-1/fulltext#seccestitle10). This time series data contains the number of reported cases $C(t)$ per day for a number of cities.\n\n", "_____no_output_____" ] ], [ [ "# Source data from John Hokpins university reposotiry\n# jhu_link = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/who_covid_19_situation_reports/who_covid_19_sit_rep_time_series/who_covid_19_sit_rep_time_series.csv\"\njhu_link = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_confirmed_global.csv\"\njhu_df = pd.read_csv(jhu_link)\n# Explore the dataset\njhu_df.head(10)", "_____no_output_____" ] ], [ [ "It is observed that the column \"Province/States\" contains the name of the cities, and since the forth column a time series stamp (or index) is provided to record daily data of reported cases. Additionally, there are many days without recorded data for a number of chinese cities. This won't be an issue for parameter fitting as Open-SIR doesn't require uniform spacement of the observed data.", "_____no_output_____" ], [ "### Data preparation\n\nIn the following lines, the time series for Guangdong reported cases $C(t)$ is extracted from the original dataframe. Thereafter, the columns are converted to a pandas date time index in order to perform further data preparation steps.", "_____no_output_____" ] ], [ [ "China = jhu_df[jhu_df[jhu_df.columns[1]] == \"China\"]\ncity_name = \"Guangdong\"\ncity = China[China[\"Province/State\"] == city_name]\ncity = city.drop(columns=[\"Province/State\", \"Country/Region\", \"Lat\", \"Long\"])\ntime_index = pd.to_datetime(city.columns)\ndata = city.values\n# Visualize the time\nts = pd.Series(data=city.values[0], index=time_index)", "_____no_output_____" ] ], [ [ "Using the function ts.plot() a quick visualization of the dataset is obtained:", "_____no_output_____" ] ], [ [ "ts.plot()\nplt.title(\"Guangdong COVID-19 cases\")\nplt.ylabel(\"$C(t)$: Number of reported cases\", size=12)\nplt.show()", "_____no_output_____" ] ], [ [ "Data cleaning", "_____no_output_____" ] ], [ [ "ts_clean = ts.dropna()\n# Extract data\nts_fit = ts_clean[\"2020-01-21\":\"2020-02-12\"]\n# Convert index to numeric\nts_num = pd.to_numeric(ts_fit.index)\nt0 = ts_num[0]\n# Convert datetime to days\nt_days = (ts_num - t0) / (10 ** 9 * 86400)\nt_days = t_days.astype(int).values\n# t_days is an input for SIR", "_____no_output_____" ], [ "# Define the X number\nnX = ts_fit.values # Number of infected\nN = 104.3e6 # Population size of Guangdong", "_____no_output_____" ] ], [ [ "Exploration of the dataset", "_____no_output_____" ] ], [ [ "ts_fit.plot(style=\"ro\")\nplt.xlabel(\"Number of infected\")\nplt.show()", "_____no_output_____" ] ], [ [ "### Setting up SIR and SIR-X models", "_____no_output_____" ], [ "The population $N$ of the city is a necessary input for the model. In this notebook, this was hardocded, but it can be sourced directly from a web source.\n\nNote that whilst the SIR model estimates directly the number of infected people, $N I(t)$, SIR-X estimates the number of infected people based on the number of tested cases that are in quarantine or in an hospital $N X(t)$", "_____no_output_____" ] ], [ [ "# These lines are required only if opensir wasn't installed using pip install, or if opensir is running in the pipenv virtual environment\nimport sys\n\npath_opensir = \"../../\"\nsys.path.append(path_opensir)\n\n# Import SIR and SIRX models\nfrom opensir.models import SIR, SIRX\n\nnX = ts_fit.values # Number of observed infections of the time series\nN = 104.3e6 # Population size of Guangdong\nparams = [0.95, 0.38]\nw0 = (N - nX[0], nX[0], 0)\n\nG_sir = SIR()\nG_sir.set_params(p=params, initial_conds=w0)\nG_sir.fit_input = 2\nG_sir.fit(t_days, nX)\nG_sir.solve(t_days[-1], t_days[-1] + 1)\nt_SIR = G_sir.fetch()[:, 0]\nI_SIR = G_sir.fetch()[:, 2]", "_____no_output_____" ] ], [ [ "### Try to fit a SIR model to Guangdong data", "_____no_output_____" ] ], [ [ "ax = plt.axes()\nax.tick_params(axis=\"both\", which=\"major\", labelsize=14)\nplt.plot(t_SIR, I_SIR)\nplt.plot(t_days, nX, \"ro\")\nplt.show()", "_____no_output_____" ] ], [ [ "The SIR model is clearly not appropriate to fit this data, as it cannot resolve the effect of exogeneous containment efforts such as quarantines or lockdown. We will repeat the process with a SIR-X model.", "_____no_output_____" ], [ "### Fit SIR-X to Guangdong Data", "_____no_output_____" ] ], [ [ "g_sirx = SIRX()\nparams = [6.2 / 8, 1 / 8, 0.05, 0.05, 5]\n# X_0 can be directly ontained from the statistics\nn_x0 = nX[0] # Number of people tested positive\nn_i0 = nX[0]\n\nw0 = (N - n_x0 - n_i0, n_i0, 0, n_x0)\ng_sirx.set_params(p=params, initial_conds=w0)\n# Fit all parameters\nfit_index = [False, False, True, True, True]\ng_sirx.fit(t_days, nX, fit_index=fit_index)\ng_sirx.solve(t_days[-1], t_days[-1] + 1)\nt_sirx = g_sirx.fetch()[:, 0]\ninf_sirx = g_sirx.fetch()[:, 4]", "_____no_output_____" ], [ "plt.figure(figsize=[6, 6])\nax = plt.axes()\nplt.plot(t_sirx, inf_sirx, \"b-\", linewidth=2)\nplt.plot(t_SIR, I_SIR, \"g-\", linewidth=2)\nplt.plot(t_days, nX, \"ro\")\nplt.legend(\n [\"SIR-X model fit\", \"SIR model fit\", \"Number of reported cases\"], fontsize=13\n)\nplt.title(\"SARS-CoV-2 evolution in Guangdong, China\", size=15)\nplt.xlabel(\"Days\", fontsize=14)\nplt.ylabel(\"COVID-19 confirmed cases\", fontsize=14)\nax.tick_params(axis=\"both\", which=\"major\", labelsize=14)\nplt.show()", "_____no_output_____" ] ], [ [ "After fitting the parameters, the effective infectious period $T_{I,eff}$ and the effective reproduction rate $R_{0,eff}$ can be obtained from the model properties\n\n$$T_{I,eff} = (\\beta + \\kappa + \\kappa_0)^{-1}$$\n$$R_{0,eff} = \\alpha T_{I,eff}$$\n\nAditionally, the Public containment leverage $P$ and the quarantine probability $Q$ can be calculated through:\n\n$$P = \\frac{\\kappa_0}{\\kappa_0 + \\kappa}$$\n$$Q = \\frac{\\kappa_0 + \\kappa}{\\beta + \\kappa_0 + \\kappa}$$", "_____no_output_____" ] ], [ [ "print(\"Effective infectious period T_I_eff = %.2f days \" % g_sirx.t_inf_eff)\nprint(\n \"Effective reproduction rate R_0_eff = %.2f, Maier and Brockmann = %.2f\"\n % (g_sirx.r0_eff, 3.02)\n)\nprint(\n \"Public containment leverage = %.2f, Maier and Brockmann = %.2f\"\n % (g_sirx.pcl, 0.75)\n)\nprint(\n \"Quarantine probability = %.2f, Maier and Brockmann = %.2f\" % (g_sirx.q_prob, 0.51)\n)", "_____no_output_____" ] ], [ [ "### Make predictions using `model.predict`", "_____no_output_____" ] ], [ [ "# Make predictions and visualize\n# Obtain the results 14 days after the train data ends\nsirx_pred = g_sirx.predict(14)\nprint(\"T n_S \\t n_I \\tn_R \\tn_X\")\nfor i in sirx_pred:\n print(*i.astype(int))", "_____no_output_____" ] ], [ [ "Prepare date time index to plot predictions", "_____no_output_____" ] ], [ [ "# Import datetime module from the standard library\nimport datetime\n\n# Obtain the last day from the data used to train the model\nlast_time = ts_fit.index[-1]\n# Create a date time range based on the number of rows of the prediction\nnumdays = sirx_pred.shape[0]\nday_zero = datetime.datetime(last_time.year, last_time.month, last_time.day)\ndate_list = [day_zero + datetime.timedelta(days=x) for x in range(numdays)]", "_____no_output_____" ] ], [ [ "Plot predictions", "_____no_output_____" ] ], [ [ "# Extract figure and axes\nfig, ax = plt.subplots(figsize=[5, 5])\n# Create core plot attributes\nplt.plot(date_list, sirx_pred[:, 4], color=\"blue\", linewidth=2)\nplt.title(\"Prediction of Guangdong Cases\", size=14)\nplt.ylabel(\"Number of infected\", size=14)\n# Remove trailing space\nplt.xlim(date_list[0], date_list[-1])\n# Limit the amount of data displayed\nax.xaxis.set_major_locator(plt.MaxNLocator(3))\n# Increase the size of the ticks\nax.tick_params(labelsize=12)\nplt.show()", "_____no_output_____" ] ], [ [ "### Calculation of predictive confidence intervals\n\nThe confidence intervals on the predictions of the SIR-X model can be calculated using a block cross validation. This technique is widely used in Time Series Analysis. In the open-sir API, the function `model.ci_block_cv` calculates the average mean squared error of the predictions, a list of the rolling mean squared errors and the list of parameters which shows how much each parameter changes taking different number of days for making predictions.\n\nThe three first parameters are the same as the fit function, while the last two parameters are the `lags` and the `min_sample`. The `lags` parameter indicates how many periods in the future will be forecasted in order to calculate the mean squared error of the model prediction. The `min_sample` parameter indicates the initial number of observations and days that will be taken to perform the block cross validation.\n\nIn the following example, `model.ci_block_cv` is used to estimate the average mean squared error of *1-day* predictions taking *6* observations as the starting point of the cross validation. For Guangdong, a `min_sample=6` higher than the default 3 is required to handle well the missing data. This way, both the data on the four first days, and two days after the data starts again, are considered for cross validation.", "_____no_output_____" ] ], [ [ "# Calculate confidence intervals\nmse_avg, mse_list, p_list, pred_data = g_sirx.block_cv(lags=1, min_sample=6)", "_____no_output_____" ] ], [ [ "If it is assumed that the residuals distribute normally, then a good estimation of a 95% confidence interval on the one-day prediction of the number of confirmed cases is \n\n$$\\sigma \\sim \\mathrm{MSE} \\rightarrow n_{X,{t+1}} \\sim \\hat{n}_{X,{t+1}} \\pm 2 \\sigma$$ \n\nWhere $n_{X,{t+1}}$ is the real number of confirmed cases in the next day, and $\\hat{n}_{X,{t+1}}$ is the estimation using the SIR-X model using cross validation. We can use the `PredictionResults` instance `pred_data` functionality to explore the mean-squared errors and the predictions confidence intervals:", "_____no_output_____" ] ], [ [ "pred_data.print_mse()", "_____no_output_____" ] ], [ [ "The predictive accuracy of the model is quite impressive, even for 9-day predictions. Let's take advantage of the relatively low mean squared error to forecast a 10 days horizon with confidence intervals using `pred_data.plot_predictions(n_days=9)`", "_____no_output_____" ] ], [ [ "pred_data.plot_pred_ci(n_days=9)", "_____no_output_____" ] ], [ [ "If it is assumed that the residuals distribute normally, then a good estimation of a 95% confidence interval on the one-day prediction of the number of confirmed cases is \n\n$$\\sigma \\sim \\mathrm{MSE} \\rightarrow n_{X,{t+1}} \\sim \\hat{n}_{X,{t+1}} \\pm 2 \\sigma$$ \n\nWhere $n_{X,{t+1}}$ is the real number of confirmed cases in the next day, and $\\hat{n}_{X,{t+1}}$ is the estimation using the SIR-X model using cross validation. We use solve to make a 1-day prediction and append the 95% confidence interval.", "_____no_output_____" ] ], [ [ "# Predict\ng_sirx.solve(t_days[-1] + 1, t_days[-1] + 2)\nn_X_tplusone = g_sirx.fetch()[-1, 4]\nprint(\"Estimation of n_X_{t+1} = %.0f +- %.0f \" % (n_X_tplusone, 2 * mse_avg[0]))", "_____no_output_____" ], [ "# Transform parameter list into a DataFrame\npar_block_cv = pd.DataFrame(p_list)\n# Rename dataframe columns based on SIR-X parameter names\npar_block_cv.columns = g_sirx.PARAMS\n# Add the day. Note that we take the days from min_sample until the end of the array, as days\n# 0,1,2 are used for the first sampling in the block cross-validation\npar_block_cv[\"Day\"] = t_days[5:]\n# Explore formatted dataframe for parametric analysis\npar_block_cv.head(len(p_list))", "_____no_output_____" ], [ "plt.figure(figsize=[5, 5])\nax = plt.axes()\nax.tick_params(axis=\"both\", which=\"major\", labelsize=14)\nplt.plot(mse_list[0], \"ro\")\nplt.xlabel(\"Number of days used to predict the next day\", size=14)\nplt.ylabel(\"MSE\", size=14)\nplt.show()", "_____no_output_____" ] ], [ [ "There is an outlier on day 1, as this is when the missing date starts. A more reliable approach would be to take the last 8 values of the mean squared error to calculate a new average assuming that there will be no more missing data.", "_____no_output_____" ], [ "#### Variation of fitted parameters\n\nFinally, it is possible to observe how the model parameters change as more days and number of confirmed cases are introduced in the block cross validation. ", "_____no_output_____" ], [ "It is clear to observe that after day 15 all parameters except kappa begin to converge. Therefore, care must be taken when performing inference over the parameter kappa.", "_____no_output_____" ], [ "### Long term prediction\nNow we can use the model to predict when the peak will occur and what will be the maximum number of infected", "_____no_output_____" ] ], [ [ "# Predict\nplt.figure(figsize=[6, 6])\nax = plt.axes()\nax.tick_params(axis=\"both\", which=\"major\", labelsize=14)\ng_sirx.solve(40, 41)\n# Plot\nplt.plot(g_sirx.fetch()[:, 4], \"b-\", linewidth=2) # X(t)\nplt.plot(g_sirx.fetch()[:, 2], \"b--\", linewidth=2) # I(t)\nplt.xlabel(\"Day\", size=14)\nplt.ylabel(\"Number of people\", size=14)\nplt.legend([\"X(t): Confirmed\", \"I(t) = Infected\"], fontsize=13)\nplt.title(city_name)\nplt.show()", "_____no_output_____" ] ], [ [ "The model was trained with a limited amount of data. It is clear to observe that since the measures took place in Guangdong, at least 6 weeks of quarantine were necessary to control the pandemics. Note that a limitation of this model is that it predicts an equilibrium where the number of infected, denoted by the yellow line in the figure above, is 0 after a short time. In reality, this amount will decrease to a small number.\n\nAfter the peak of infections is reached, it is necessary to keep the quarantine and effective contact tracing for at least 30 days more.", "_____no_output_____" ], [ "### Validate long term plot using model.plot()\n\nThe function `model.plot()` offers a handy way to visualize model fitting and predictions. Custom visualizations can be validated against the `model.plot()` function.", "_____no_output_____" ] ], [ [ "g_sirx.plot()", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ] ]
4aa46a2bd6af3fae1a8c59b4067a85e5687d9dff
338,993
ipynb
Jupyter Notebook
03_classification-exercises.ipynb
jpthompson18/handson-ml-practice
bca4689576e44943d4d45998f640c7309b5c0308
[ "Apache-2.0" ]
null
null
null
03_classification-exercises.ipynb
jpthompson18/handson-ml-practice
bca4689576e44943d4d45998f640c7309b5c0308
[ "Apache-2.0" ]
null
null
null
03_classification-exercises.ipynb
jpthompson18/handson-ml-practice
bca4689576e44943d4d45998f640c7309b5c0308
[ "Apache-2.0" ]
null
null
null
175.09969
71,936
0.908739
[ [ [ "**Chapter 3 – Classification**\n\n_This notebook contains all the sample code and solutions to the exercises in chapter 3._", "_____no_output_____" ], [ "# Setup", "_____no_output_____" ], [ "First, let's make sure this notebook works well in both python 2 and 3, import a few common modules, ensure MatplotLib plots figures inline and prepare a function to save the figures:", "_____no_output_____" ] ], [ [ "# To support both python 2 and python 3\nfrom __future__ import division, print_function, unicode_literals\n\n# Common imports\nimport numpy as np\nimport os\n\n# to make this notebook's output stable across runs\nnp.random.seed(42)\n\n# To plot pretty figures\n%matplotlib inline\nimport matplotlib\nimport matplotlib.pyplot as plt\nplt.rcParams['axes.labelsize'] = 14\nplt.rcParams['xtick.labelsize'] = 12\nplt.rcParams['ytick.labelsize'] = 12\n\n# Where to save the figures\nPROJECT_ROOT_DIR = \".\"\nCHAPTER_ID = \"classification\"\n\ndef save_fig(fig_id, tight_layout=True):\n path = os.path.join(PROJECT_ROOT_DIR, \"images\", CHAPTER_ID, fig_id + \".png\")\n print(\"Saving figure\", fig_id)\n if tight_layout:\n plt.tight_layout()\n plt.savefig(path, format='png', dpi=300)", "_____no_output_____" ] ], [ [ "# MNIST", "_____no_output_____" ] ], [ [ "from sklearn.datasets import fetch_mldata\ntry:\n mnist = fetch_mldata('MNIST original')\nexcept Exception as ex: \n from six.moves import urllib\n from scipy.io import loadmat\n import os\n\n mnist_path = os.path.join(\".\", \"datasets\", \"mnist-original.mat\")\n\n # download dataset from github.\n mnist_alternative_url = \"https://github.com/amplab/datascience-sp14/raw/master/lab7/mldata/mnist-original.mat\"\n response = urllib.request.urlopen(mnist_alternative_url)\n with open(mnist_path, \"wb\") as f:\n content = response.read()\n f.write(content)\n\n mnist_raw = loadmat(mnist_path)\n mnist = {\n \"data\": mnist_raw[\"data\"].T,\n \"target\": mnist_raw[\"label\"][0],\n \"COL_NAMES\": [\"label\", \"data\"],\n \"DESCR\": \"mldata.org dataset: mnist-original\",\n }\n print(\"Done!\")", "_____no_output_____" ], [ "X, y = mnist[\"data\"], mnist[\"target\"]\nX.shape", "_____no_output_____" ], [ "y.shape", "_____no_output_____" ], [ "28*28", "_____no_output_____" ], [ "%matplotlib inline\nimport matplotlib\nimport matplotlib.pyplot as plt\n\nsome_digit = X[36000]\nsome_digit_image = some_digit.reshape(28, 28)\nplt.imshow(some_digit_image, cmap = matplotlib.cm.binary,\n interpolation=\"nearest\")\nplt.axis(\"off\")\n\nsave_fig(\"some_digit_plot\")\nplt.show()", "Saving figure some_digit_plot\n" ], [ "def plot_digit(data):\n image = data.reshape(28, 28)\n plt.imshow(image, cmap = matplotlib.cm.binary,\n interpolation=\"nearest\")\n plt.axis(\"off\")", "_____no_output_____" ], [ "# EXTRA\ndef plot_digits(instances, images_per_row=10, **options):\n size = 28\n images_per_row = min(len(instances), images_per_row)\n images = [instance.reshape(size,size) for instance in instances]\n n_rows = (len(instances) - 1) // images_per_row + 1\n row_images = []\n n_empty = n_rows * images_per_row - len(instances)\n images.append(np.zeros((size, size * n_empty)))\n for row in range(n_rows):\n rimages = images[row * images_per_row : (row + 1) * images_per_row]\n row_images.append(np.concatenate(rimages, axis=1))\n image = np.concatenate(row_images, axis=0)\n plt.imshow(image, cmap = matplotlib.cm.binary, **options)\n plt.axis(\"off\")", "_____no_output_____" ], [ "plt.figure(figsize=(9,9))\nexample_images = np.r_[X[:12000:600], X[13000:30600:600], X[30600:60000:590]]\nplot_digits(example_images, images_per_row=10)\nsave_fig(\"more_digits_plot\")\nplt.show()", "Saving figure more_digits_plot\n" ], [ "y[36000]", "_____no_output_____" ], [ "X_train, X_test, y_train, y_test = X[:60000], X[60000:], y[:60000], y[60000:]", "_____no_output_____" ], [ "import numpy as np\n\nshuffle_index = np.random.permutation(60000)\nX_train, y_train = X_train[shuffle_index], y_train[shuffle_index]", "_____no_output_____" ] ], [ [ "# Binary classifier", "_____no_output_____" ] ], [ [ "y_train_5 = (y_train == 5)\ny_test_5 = (y_test == 5)", "_____no_output_____" ], [ "from sklearn.linear_model import SGDClassifier\n\nsgd_clf = SGDClassifier(max_iter=5, random_state=42)\nsgd_clf.fit(X_train, y_train_5)", "_____no_output_____" ], [ "sgd_clf.predict([some_digit])", "_____no_output_____" ], [ "from sklearn.model_selection import cross_val_score\ncross_val_score(sgd_clf, X_train, y_train_5, cv=3, scoring=\"accuracy\")", "_____no_output_____" ], [ "from sklearn.model_selection import StratifiedKFold\nfrom sklearn.base import clone\n\nskfolds = StratifiedKFold(n_splits=3, random_state=42)\n\nfor train_index, test_index in skfolds.split(X_train, y_train_5):\n clone_clf = clone(sgd_clf)\n X_train_folds = X_train[train_index]\n y_train_folds = (y_train_5[train_index])\n X_test_fold = X_train[test_index]\n y_test_fold = (y_train_5[test_index])\n\n clone_clf.fit(X_train_folds, y_train_folds)\n y_pred = clone_clf.predict(X_test_fold)\n n_correct = sum(y_pred == y_test_fold)\n print(n_correct / len(y_pred))", "0.9502\n0.96565\n0.96495\n" ], [ "from sklearn.base import BaseEstimator\nclass Never5Classifier(BaseEstimator):\n def fit(self, X, y=None):\n pass\n def predict(self, X):\n return np.zeros((len(X), 1), dtype=bool)", "_____no_output_____" ], [ "never_5_clf = Never5Classifier()\ncross_val_score(never_5_clf, X_train, y_train_5, cv=3, scoring=\"accuracy\")", "_____no_output_____" ], [ "from sklearn.model_selection import cross_val_predict\n\ny_train_pred = cross_val_predict(sgd_clf, X_train, y_train_5, cv=3)", "_____no_output_____" ], [ "from sklearn.metrics import confusion_matrix\n\nconfusion_matrix(y_train_5, y_train_pred)", "_____no_output_____" ], [ "y_train_perfect_predictions = y_train_5", "_____no_output_____" ], [ "confusion_matrix(y_train_5, y_train_perfect_predictions)", "_____no_output_____" ], [ "from sklearn.metrics import precision_score, recall_score\n\nprecision_score(y_train_5, y_train_pred)", "_____no_output_____" ], [ "4344 / (4344 + 1307)", "_____no_output_____" ], [ "recall_score(y_train_5, y_train_pred)", "_____no_output_____" ], [ "4344 / (4344 + 1077)", "_____no_output_____" ], [ "from sklearn.metrics import f1_score\nf1_score(y_train_5, y_train_pred)", "_____no_output_____" ], [ "4344 / (4344 + (1077 + 1307)/2)", "_____no_output_____" ], [ "y_scores = sgd_clf.decision_function([some_digit])\ny_scores", "_____no_output_____" ], [ "threshold = 0\ny_some_digit_pred = (y_scores > threshold)", "_____no_output_____" ], [ "y_some_digit_pred", "_____no_output_____" ], [ "threshold = 200000\ny_some_digit_pred = (y_scores > threshold)\ny_some_digit_pred", "_____no_output_____" ], [ "y_scores = cross_val_predict(sgd_clf, X_train, y_train_5, cv=3,\n method=\"decision_function\")", "_____no_output_____" ] ], [ [ "Note: there was an [issue](https://github.com/scikit-learn/scikit-learn/issues/9589) in Scikit-Learn 0.19.0 (fixed in 0.19.1) where the result of `cross_val_predict()` was incorrect in the binary classification case when using `method=\"decision_function\"`, as in the code above. The resulting array had an extra first dimension full of 0s. Just in case you are using 0.19.0, we need to add this small hack to work around this issue:", "_____no_output_____" ] ], [ [ "y_scores.shape", "_____no_output_____" ], [ "# hack to work around issue #9589 in Scikit-Learn 0.19.0\nif y_scores.ndim == 2:\n y_scores = y_scores[:, 1]", "_____no_output_____" ], [ "from sklearn.metrics import precision_recall_curve\n\nprecisions, recalls, thresholds = precision_recall_curve(y_train_5, y_scores)", "_____no_output_____" ], [ "def plot_precision_recall_vs_threshold(precisions, recalls, thresholds):\n plt.plot(thresholds, precisions[:-1], \"b--\", label=\"Precision\", linewidth=2)\n plt.plot(thresholds, recalls[:-1], \"g-\", label=\"Recall\", linewidth=2)\n plt.xlabel(\"Threshold\", fontsize=16)\n plt.legend(loc=\"upper left\", fontsize=16)\n plt.ylim([0, 1])\n\nplt.figure(figsize=(8, 4))\nplot_precision_recall_vs_threshold(precisions, recalls, thresholds)\nplt.xlim([-700000, 700000])\nsave_fig(\"precision_recall_vs_threshold_plot\")\nplt.show()", "Saving figure precision_recall_vs_threshold_plot\n" ], [ "(y_train_pred == (y_scores > 0)).all()", "_____no_output_____" ], [ "y_train_pred_90 = (y_scores > 70000)", "_____no_output_____" ], [ "precision_score(y_train_5, y_train_pred_90)", "_____no_output_____" ], [ "recall_score(y_train_5, y_train_pred_90)", "_____no_output_____" ], [ "def plot_precision_vs_recall(precisions, recalls):\n plt.plot(recalls, precisions, \"b-\", linewidth=2)\n plt.xlabel(\"Recall\", fontsize=16)\n plt.ylabel(\"Precision\", fontsize=16)\n plt.axis([0, 1, 0, 1])\n\nplt.figure(figsize=(8, 6))\nplot_precision_vs_recall(precisions, recalls)\nsave_fig(\"precision_vs_recall_plot\")\nplt.show()", "Saving figure precision_vs_recall_plot\n" ] ], [ [ "# ROC curves", "_____no_output_____" ] ], [ [ "from sklearn.metrics import roc_curve\n\nfpr, tpr, thresholds = roc_curve(y_train_5, y_scores)", "_____no_output_____" ], [ "def plot_roc_curve(fpr, tpr, label=None):\n plt.plot(fpr, tpr, linewidth=2, label=label)\n plt.plot([0, 1], [0, 1], 'k--')\n plt.axis([0, 1, 0, 1])\n plt.xlabel('False Positive Rate', fontsize=16)\n plt.ylabel('True Positive Rate', fontsize=16)\n\nplt.figure(figsize=(8, 6))\nplot_roc_curve(fpr, tpr)\nsave_fig(\"roc_curve_plot\")\nplt.show()", "Saving figure roc_curve_plot\n" ], [ "from sklearn.metrics import roc_auc_score\n\nroc_auc_score(y_train_5, y_scores)", "_____no_output_____" ], [ "from sklearn.ensemble import RandomForestClassifier\nforest_clf = RandomForestClassifier(random_state=42)\ny_probas_forest = cross_val_predict(forest_clf, X_train, y_train_5, cv=3,\n method=\"predict_proba\")", "_____no_output_____" ], [ "y_scores_forest = y_probas_forest[:, 1] # score = proba of positive class\nfpr_forest, tpr_forest, thresholds_forest = roc_curve(y_train_5,y_scores_forest)", "_____no_output_____" ], [ "plt.figure(figsize=(8, 6))\nplt.plot(fpr, tpr, \"b:\", linewidth=2, label=\"SGD\")\nplot_roc_curve(fpr_forest, tpr_forest, \"Random Forest\")\nplt.legend(loc=\"lower right\", fontsize=16)\nsave_fig(\"roc_curve_comparison_plot\")\nplt.show()", "Saving figure roc_curve_comparison_plot\n" ], [ "roc_auc_score(y_train_5, y_scores_forest)", "_____no_output_____" ], [ "y_train_pred_forest = cross_val_predict(forest_clf, X_train, y_train_5, cv=3)\nprecision_score(y_train_5, y_train_pred_forest)", "_____no_output_____" ], [ "recall_score(y_train_5, y_train_pred_forest)", "_____no_output_____" ] ], [ [ "# Multiclass classification", "_____no_output_____" ] ], [ [ "sgd_clf.fit(X_train, y_train)\nsgd_clf.predict([some_digit])", "_____no_output_____" ], [ "some_digit_scores = sgd_clf.decision_function([some_digit])\nsome_digit_scores", "_____no_output_____" ], [ "np.argmax(some_digit_scores)", "_____no_output_____" ], [ "sgd_clf.classes_", "_____no_output_____" ], [ "sgd_clf.classes_[5]", "_____no_output_____" ], [ "from sklearn.multiclass import OneVsOneClassifier\novo_clf = OneVsOneClassifier(SGDClassifier(max_iter=5, random_state=42))\novo_clf.fit(X_train, y_train)\novo_clf.predict([some_digit])", "_____no_output_____" ], [ "len(ovo_clf.estimators_)", "_____no_output_____" ], [ "forest_clf.fit(X_train, y_train)\nforest_clf.predict([some_digit])", "_____no_output_____" ], [ "forest_clf.predict_proba([some_digit])", "_____no_output_____" ], [ "cross_val_score(sgd_clf, X_train, y_train, cv=3, scoring=\"accuracy\")", "_____no_output_____" ], [ "from sklearn.preprocessing import StandardScaler\nscaler = StandardScaler()\nX_train_scaled = scaler.fit_transform(X_train.astype(np.float64))\ncross_val_score(sgd_clf, X_train_scaled, y_train, cv=3, scoring=\"accuracy\")", "_____no_output_____" ], [ "y_train_pred = cross_val_predict(sgd_clf, X_train_scaled, y_train, cv=3)\nconf_mx = confusion_matrix(y_train, y_train_pred)\nconf_mx", "_____no_output_____" ], [ "def plot_confusion_matrix(matrix):\n \"\"\"If you prefer color and a colorbar\"\"\"\n fig = plt.figure(figsize=(8,8))\n ax = fig.add_subplot(111)\n cax = ax.matshow(matrix)\n fig.colorbar(cax)", "_____no_output_____" ], [ "plt.matshow(conf_mx, cmap=plt.cm.gray)\nsave_fig(\"confusion_matrix_plot\", tight_layout=False)\nplt.show()", "Saving figure confusion_matrix_plot\n" ], [ "row_sums = conf_mx.sum(axis=1, keepdims=True)\nnorm_conf_mx = conf_mx / row_sums", "_____no_output_____" ], [ "np.fill_diagonal(norm_conf_mx, 0)\nplt.matshow(norm_conf_mx, cmap=plt.cm.gray)\nsave_fig(\"confusion_matrix_errors_plot\", tight_layout=False)\nplt.show()", "Saving figure confusion_matrix_errors_plot\n" ], [ "cl_a, cl_b = 3, 5\nX_aa = X_train[(y_train == cl_a) & (y_train_pred == cl_a)]\nX_ab = X_train[(y_train == cl_a) & (y_train_pred == cl_b)]\nX_ba = X_train[(y_train == cl_b) & (y_train_pred == cl_a)]\nX_bb = X_train[(y_train == cl_b) & (y_train_pred == cl_b)]\n\nplt.figure(figsize=(8,8))\nplt.subplot(221); plot_digits(X_aa[:25], images_per_row=5)\nplt.subplot(222); plot_digits(X_ab[:25], images_per_row=5)\nplt.subplot(223); plot_digits(X_ba[:25], images_per_row=5)\nplt.subplot(224); plot_digits(X_bb[:25], images_per_row=5)\nsave_fig(\"error_analysis_digits_plot\")\nplt.show()", "Saving figure error_analysis_digits_plot\n" ] ], [ [ "# Multilabel classification", "_____no_output_____" ] ], [ [ "from sklearn.neighbors import KNeighborsClassifier\n\n# y_train_large = (y_train >= 7)\n# y_train_odd = (y_train % 2 == 1)\n# y_multilabel = np.c_[y_train_large, y_train_odd]\n\n# knn_clf = KNeighborsClassifier()\n# knn_clf.fit(X_train, y_multilabel)", "_____no_output_____" ], [ "# knn_clf.predict([some_digit])", "_____no_output_____" ] ], [ [ "**Warning**: the following cell may take a very long time (possibly hours depending on your hardware).", "_____no_output_____" ] ], [ [ "# y_train_knn_pred = cross_val_predict(knn_clf, X_train, y_multilabel, cv=3, n_jobs=-1)\n# f1_score(y_multilabel, y_train_knn_pred, average=\"macro\")", "_____no_output_____" ] ], [ [ "# Multioutput classification", "_____no_output_____" ] ], [ [ "noise = np.random.randint(0, 100, (len(X_train), 784))\nX_train_mod = X_train + noise\nnoise = np.random.randint(0, 100, (len(X_test), 784))\nX_test_mod = X_test + noise\ny_train_mod = X_train\ny_test_mod = X_test", "_____no_output_____" ], [ "some_index = 5500\nplt.subplot(121); plot_digit(X_test_mod[some_index])\nplt.subplot(122); plot_digit(y_test_mod[some_index])\nsave_fig(\"noisy_digit_example_plot\")\nplt.show()", "Saving figure noisy_digit_example_plot\n" ], [ "# knn_clf.fit(X_train_mod, y_train_mod)\n# clean_digit = knn_clf.predict([X_test_mod[some_index]])\n# plot_digit(clean_digit)\n# save_fig(\"cleaned_digit_example_plot\")", "_____no_output_____" ] ], [ [ "# Extra material", "_____no_output_____" ], [ "## Dummy (ie. random) classifier", "_____no_output_____" ] ], [ [ "from sklearn.dummy import DummyClassifier\ndmy_clf = DummyClassifier()\ny_probas_dmy = cross_val_predict(dmy_clf, X_train, y_train_5, cv=3, method=\"predict_proba\")\ny_scores_dmy = y_probas_dmy[:, 1]", "_____no_output_____" ], [ "fprr, tprr, thresholdsr = roc_curve(y_train_5, y_scores_dmy)\nplot_roc_curve(fprr, tprr)", "_____no_output_____" ] ], [ [ "## KNN classifier", "_____no_output_____" ] ], [ [ "from sklearn.neighbors import KNeighborsClassifier\n# knn_clf = KNeighborsClassifier(n_jobs=2, weights='distance', n_neighbors=4)\n# knn_clf.fit(X_train, y_train)", "_____no_output_____" ], [ "# y_knn_pred = knn_clf.predict(X_test)", "_____no_output_____" ], [ "from sklearn.metrics import accuracy_score\n# accuracy_score(y_test, y_knn_pred)", "_____no_output_____" ], [ "from scipy.ndimage.interpolation import shift\ndef shift_digit(digit_array, dx, dy, new=0):\n return shift(digit_array.reshape(28, 28), [dy, dx], cval=new).reshape(784)\n\nplot_digit(shift_digit(some_digit, 5, 1, new=100))", "_____no_output_____" ], [ "X_train_expanded = [X_train]\ny_train_expanded = [y_train]\nfor dx, dy in ((1, 0), (-1, 0), (0, 1), (0, -1)):\n shifted_images = np.apply_along_axis(shift_digit, axis=1, arr=X_train, dx=dx, dy=dy)\n X_train_expanded.append(shifted_images)\n y_train_expanded.append(y_train)\n\nX_train_expanded = np.concatenate(X_train_expanded)\ny_train_expanded = np.concatenate(y_train_expanded)\nX_train_expanded.shape, y_train_expanded.shape", "_____no_output_____" ], [ "# knn_clf.fit(X_train_expanded, y_train_expanded)", "_____no_output_____" ], [ "# y_knn_expanded_pred = knn_clf.predict(X_test)", "_____no_output_____" ], [ "# accuracy_score(y_test, y_knn_expanded_pred)", "_____no_output_____" ], [ "# ambiguous_digit = X_test[2589]\n# knn_clf.predict_proba([ambiguous_digit])", "_____no_output_____" ], [ "# plot_digit(ambiguous_digit)", "_____no_output_____" ] ], [ [ "# Exercise solutions", "_____no_output_____" ], [ "# Exercise 1", "_____no_output_____" ] ], [ [ "from sklearn.model_selection import GridSearchCV\n\nknn2 = KNeighborsClassifier()\nsearch_space = [{\"weights\" : [\"uniform\", \"distance\"],\n \"n_neighbors\" : [2, 4, 6, 8]}]\nclf = GridSearchCV(knn2, search_space, cv=3, n_jobs=2)", "_____no_output_____" ], [ "best_model = clf.fit(X_train_scaled, y_train)", "_____no_output_____" ], [ "predictions = best_model.predict(X_train_scaled)", "_____no_output_____" ], [ "cross_val_score(best_model, X_train_scaled, y_train, cv=3, scoring=\"accuracy\")", "_____no_output_____" ] ], [ [ "# Exercise 2", "_____no_output_____" ] ], [ [ "def shift_image(arr, direction):\n direction_dict = {\"up\" : [-1, 0],\n \"down\" : [1, 0],\n \"left\" : [0, -1],\n \"right\" : [0, 1]}\n return shift(arr.reshape(28,28), direction_dict['direction']).reshape(784)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
4aa470cf067fe58b6fa8acb79b551029f325c56e
225,068
ipynb
Jupyter Notebook
notebooks/investigate_ha_cachito_fitting.ipynb
abostroem/asassn15oz
ade090096b61b155c86108d1945bb7b4522365b8
[ "BSD-3-Clause" ]
null
null
null
notebooks/investigate_ha_cachito_fitting.ipynb
abostroem/asassn15oz
ade090096b61b155c86108d1945bb7b4522365b8
[ "BSD-3-Clause" ]
3
2019-02-24T23:24:33.000Z
2019-02-24T23:25:12.000Z
notebooks/investigate_ha_cachito_fitting.ipynb
abostroem/asassn15oz
ade090096b61b155c86108d1945bb7b4522365b8
[ "BSD-3-Clause" ]
null
null
null
396.945326
50,664
0.936459
[ [ [ "import os\n\nimport numpy as np\nimport yaml\nfrom astropy.io import ascii as asc\nfrom astropy.time import Time\nimport astropy.units as u\nimport astropy.constants as c\nfrom astropy.modeling import models, fitting\n\nfrom matplotlib import pyplot as plt\n%matplotlib inline\n\nimport supernova", "_____no_output_____" ], [ "TEST_FILE_DIR = '../data/line_info/testing/'\nFIG_DIR = '../figures/'\nDATA_DIR = '../data/line_info'", "_____no_output_____" ], [ "HA = 6563.0\nSiII = 6355.0\nFeII = 5169.0\nIR_dates = Time(['2015-09-05','2015-10-05', '2015-10-10'])", "_____no_output_____" ], [ "sn15oz = supernova.LightCurve2('asassn-15oz')\ntexpl = Time(sn15oz.jdexpl, format='jd')", "_____no_output_____" ], [ "old_fitting = asc.read(os.path.join(TEST_FILE_DIR, 'HA-cachito_old.tab'))\nnew_fit_together = asc.read(os.path.join(TEST_FILE_DIR, 'HA-cachito.tab'))\nnew_fit_cachito = asc.read(os.path.join(TEST_FILE_DIR, 'cachito.tab'))\nnew_fit_HA = asc.read(os.path.join(TEST_FILE_DIR, 'HA.tab'))", "_____no_output_____" ], [ "def calc_velocity(obs_wl, rest_wl):\n velocity = c.c*(obs_wl/rest_wl - 1)\n return velocity", "_____no_output_____" ], [ "fig = plt.figure(figsize=[10, 5])\nax_HA = fig.add_subplot(1,2,1)\nax_cachito = fig.add_subplot(1,2,2)\n\nax_HA.plot((Time(old_fitting['date'])-texpl).value, -1*calc_velocity(old_fitting['vel1'], HA).to(u.km/u.s), 'o', label='old fit') \nax_HA.plot((Time(new_fit_together['date'])-texpl).value, -1*calc_velocity(new_fit_together['vel1'], HA).to(u.km/u.s), 's', label='new fit together') \nax_HA.plot((Time(new_fit_HA['date'])-texpl).value, -1*calc_velocity(new_fit_HA['vel0'], HA).to(u.km/u.s), '^', label='new fit separate') \nax_HA.set_ylim(7500, 12000)\nax_HA.set_xticks(np.arange(0, 90, 10))\nax_HA.legend()\nax_HA.set_title(r'H-$\\alpha$ Velocity')\n\nax_cachito.plot((Time(old_fitting['date'])-texpl).value, -1*calc_velocity(old_fitting['vel0'], HA).to(u.km/u.s), 'o', label='old fit') \nax_cachito.plot((Time(new_fit_together['date'])-texpl).value, -1*calc_velocity(new_fit_together['vel0'], HA).to(u.km/u.s), 's', label='new fit together') \nax_cachito.plot((Time(new_fit_cachito['date'])-texpl).value, -1*calc_velocity(new_fit_cachito['vel0'], HA).to(u.km/u.s), '^', label='new fit separate') \nax_cachito.set_xticks(np.arange(0, 90, 10))\nax_cachito.grid()\nax_cachito.set_title('Cachito Velocity (if Hydrogen)')\nax_cachito.vlines((IR_dates-texpl).value, linestyle='--', ymin=10000, ymax=20000, label='IR spectra')\nax_cachito.legend(loc='lower left')\nplt.savefig(os.path.join(FIG_DIR, 'HA-cachito_velocity_test.pdf'))", "_____no_output_____" ], [ "fig = plt.figure(figsize=[10, 5])\nax_HA = fig.add_subplot(1,1,1)\nax_cachito = ax_HA.twinx()\n\n#ax_HA.plot((Time(old_fitting['date'])-texpl).value, -1*calc_velocity(old_fitting['vel1'], HA).to(u.km/u.s), 'o', label='old fit') \n#ax_HA.plot((Time(new_fit_together['date'])-texpl).value, -1*calc_velocity(new_fit_together['vel1'], HA).to(u.km/u.s), 's', label='new fit together') \nax_HA.plot((Time(new_fit_HA['date'])-texpl).value, -1*calc_velocity(new_fit_HA['vel0'], HA).to(u.km/u.s), '^', label='new fit separate') \nax_HA.set_ylim(7500, 12000)\nax_HA.set_xticks(np.arange(0, 90, 10))\nax_HA.legend()\nax_HA.set_title(r'H-$\\alpha$/Cachito Velocity')\n\n#ax_cachito.plot((Time(old_fitting['date'])-texpl).value, -1*calc_velocity(old_fitting['vel0'], HA).to(u.km/u.s), 'co', label='old fit') \n#ax_cachito.plot((Time(new_fit_together['date'])-texpl).value, -1*calc_velocity(new_fit_together['vel0'], HA).to(u.km/u.s), 'rs', label='new fit together') \nax_cachito.plot((Time(new_fit_cachito['date'])-texpl).value, -1*calc_velocity(new_fit_cachito['vel0'], HA).to(u.km/u.s), '^', color='lime', label='new fit separate') \nax_cachito.set_xticks(np.arange(0, 90, 10))\nax_cachito.grid()\nax_cachito.vlines((IR_dates-texpl).value, linestyle='--', ymin=10000, ymax=20000, label='IR spectra')\nax_cachito.legend(loc='lower left')", "_____no_output_____" ] ], [ [ "# Fit Velocity", "_____no_output_____" ], [ "## Cachito Fit", "_____no_output_____" ] ], [ [ "phase_cachito = (Time(new_fit_cachito['date'])-texpl).value\nvelocity_cachito = -1*calc_velocity(new_fit_cachito['vel0'], HA).to(u.km/u.s).value", "_____no_output_____" ], [ "fitter_power = fitting.LevMarLSQFitter()\nfitter_linear = fitting.LinearLSQFitter()\npower_model = models.PowerLaw1D()\npoly_model3 = models.Polynomial1D(degree=3)\npoly_model4 = models.Polynomial1D(degree=4)\npoly_model5 = models.Polynomial1D(degree=5)", "_____no_output_____" ], [ "power_fit_cachito = fitter_power(power_model, phase_cachito, velocity_cachito)", "_____no_output_____" ], [ "poly_fit3_cachito = fitter_linear(poly_model3, phase_cachito, velocity_cachito)\npoly_fit4_cachito = fitter_linear(poly_model4, phase_cachito, velocity_cachito)\npoly_fit5_cachito = fitter_linear(poly_model5, phase_cachito, velocity_cachito)", "_____no_output_____" ], [ "fit_time = np.arange(1, phase_cachito[-1]+1,1)\n\nfig = plt.figure(figsize=[10, 5])\nax_cachito = fig.add_subplot(2,1,1)\nax_resid = fig.add_subplot(2,1,2, sharex=ax_cachito)\nax_cachito.plot(phase_cachito, velocity_cachito, '^', color='lime', label='new fit separate') \nax_cachito.set_xticks(np.arange(0, 90, 10))\nax_cachito.grid()\nax_cachito.plot(fit_time, power_fit_cachito(fit_time), label='Power Law')\nax_cachito.plot(fit_time, poly_fit4_cachito(fit_time), label='Polynomial deg={}'.format(poly_model4.degree))\nax_cachito.set_title('Cachito Velocity (if Hydrogen)')\nax_cachito.vlines((IR_dates-texpl).value, linestyle='--', ymin=12000, ymax=21000, label='IR spectra')\nax_cachito.set_ylabel('Velocity (km/s)')\n\nax_cachito.set_ylim(ymin=12000, ymax=21000)\nax_cachito.legend(loc='best')\n\nax_resid.axhline(0, color='k')\nax_resid.vlines((IR_dates-texpl).value, linestyle='--', ymin=-500, ymax=500, label='IR spectra')\nax_resid.plot(phase_cachito, velocity_cachito - power_fit_cachito(phase_cachito), 'o', label='Power')\nax_resid.plot(phase_cachito, velocity_cachito - poly_fit3_cachito(phase_cachito), 'o', label='deg3')\nax_resid.plot(phase_cachito, velocity_cachito - poly_fit4_cachito(phase_cachito), 'o', label='deg4')\nax_resid.plot(phase_cachito, velocity_cachito - poly_fit5_cachito(phase_cachito), 'o', label='deg5')\nax_resid.set_yticks([-500, -250, 0, 250, 500])\nax_resid.grid()\nax_resid.legend(loc='best', ncol=3)\nax_resid.set_ylabel('Residual (km/s)')\nax_resid.set_xlabel('Phase (days)')\n\nplt.savefig(os.path.join(FIG_DIR, 'cachito_velocity_fit.pdf'))\n\nprint('Power law std = {}'.format(np.std(velocity_cachito - power_fit_cachito(phase_cachito))))\nprint('Deg 4 polynomial std = {}'.format(np.std(velocity_cachito - poly_fit4_cachito(phase_cachito))))\nprint('Deg 3 polynomial std = {}'.format(np.std(velocity_cachito - poly_fit3_cachito(phase_cachito))))", "Power law std = 391.2385538728443\nDeg 4 polynomial std = 193.85033736603393\nDeg 3 polynomial std = 246.76558954290823\n" ] ], [ [ "Speaking with Stefano - we're going to use the power law fit; Nugent (2006) and Faran (2014) both fit power laws", "_____no_output_____" ], [ "## H-Alpha Fit", "_____no_output_____" ] ], [ [ "phase_HA = (Time(new_fit_HA['date'])-texpl).value\nvelocity_HA = -1*calc_velocity(new_fit_HA['vel0'], HA).to(u.km/u.s).value", "_____no_output_____" ], [ "fitter_power = fitting.LevMarLSQFitter()\nfitter_linear = fitting.LinearLSQFitter()\npower_model = models.PowerLaw1D()\npoly_model3 = models.Polynomial1D(degree=3)\npoly_model4 = models.Polynomial1D(degree=4)\npoly_model5 = models.Polynomial1D(degree=5)", "_____no_output_____" ], [ "power_fit_HA = fitter_power(power_model, phase_HA, velocity_HA)", "_____no_output_____" ], [ "poly_fit3_HA = fitter_linear(poly_model3, phase_HA, velocity_HA)\npoly_fit4_HA = fitter_linear(poly_model4, phase_HA, velocity_HA)\npoly_fit5_HA = fitter_linear(poly_model5, phase_HA, velocity_HA)", "_____no_output_____" ], [ "fit_time = np.arange(1, phase_HA[-1]+1,1)\n\nfig = plt.figure(figsize=[10, 5])\nax_HA = fig.add_subplot(2,1,1)\nax_resid = fig.add_subplot(2,1,2, sharex=ax_HA)\nax_HA.plot(phase_HA, velocity_HA, '^', color='lime', label='new fit separate') \nax_HA.set_xticks(np.arange(0, 90, 10))\nax_HA.grid()\nax_HA.plot(fit_time, power_fit_HA(fit_time), label='Power Law')\nax_HA.plot(fit_time, poly_fit4_HA(fit_time), label='Polynomial deg={}'.format(poly_model4.degree))\nax_HA.set_title('HA Velocity (if Hydrogen)')\nax_HA.vlines((IR_dates-texpl).value, linestyle='--', ymin=8000, ymax=12000, label='IR spectra')\nax_HA.set_ylim(ymin=8000, ymax=12000)\nax_HA.legend(loc='best')\nax_HA.set_ylabel('velocity (km/s)')\n\nax_resid.axhline(0, color='k')\nax_resid.vlines((IR_dates-texpl).value, linestyle='--', ymin=-500, ymax=500, label='IR spectra')\nax_resid.plot(phase_HA, velocity_HA - power_fit_HA(phase_HA), 'o', label='Power')\nax_resid.plot(phase_HA, velocity_HA - poly_fit3_HA(phase_HA), 'o', label='deg3')\nax_resid.plot(phase_HA, velocity_HA - poly_fit4_HA(phase_HA), 'o', label='deg4')\nax_resid.plot(phase_HA, velocity_HA - poly_fit5_HA(phase_HA), 'o', label='deg5')\nax_resid.grid()\nax_resid.legend(loc='best', ncol=2)\nax_resid.set_xlabel('Phase (days)')\nax_resid.set_ylabel('Residual')\n\nprint('Power law std = {}'.format(np.std(velocity_HA - power_fit_HA(phase_HA))))\nprint('Deg 4 polynomial std = {}'.format(np.std(velocity_HA - poly_fit4_HA(phase_HA))))\nprint('Deg 3 polynomial std = {}'.format(np.std(velocity_HA - poly_fit3_HA(phase_HA))))\nplt.savefig(os.path.join(FIG_DIR, 'HA_velocity_fit.pdf'))", "Power law std = 215.510555052082\nDeg 4 polynomial std = 209.33691258591205\nDeg 3 polynomial std = 221.00547094202278\n" ] ], [ [ "# Look at Silicon Velocity and fit the FeII Velocity", "_____no_output_____" ] ], [ [ "tbdata_feII = asc.read(os.path.join(DATA_DIR, 'FeII_multi.tab'))\ntbdata_feII.remove_columns(['vel1', 'vel_err_left_1', 'vel_err_right_1', 'vel_pew_1', 'vel_pew_err1'])\ntbdata_feII.rename_column('vel0', 'velocity')\ntbdata_feII.rename_column('vel_err_left_0', 'vel_err_left')\ntbdata_feII.rename_column('vel_err_right_0', 'vel_err_right')\ntbdata_feII.rename_column('vel_pew_0', 'pew')\ntbdata_feII.rename_column('vel_pew_err0', 'pew_err')", "_____no_output_____" ], [ "phase_feII = (Time(tbdata_feII['date'])-texpl).value\nvelocity_feII = -1*calc_velocity(tbdata_feII['velocity'], FeII).to(u.km/u.s)", "_____no_output_____" ], [ "power_model_feII = models.PowerLaw1D(alpha=power_fit_cachito.alpha, x_0=power_fit_cachito.x_0)\npower_fit_feII = fitter_power(power_model_feII, phase_feII, velocity_feII)", "_____no_output_____" ], [ "fig = plt.figure(figsize=[10, 5])\nax_Fe = fig.add_subplot(2,1,1)\nax_resid = fig.add_subplot(2,1,2, sharex=ax_Fe)\n\nax_Fe.plot(phase_feII, velocity_feII, '^', label='FeII (5169)') \nax_Fe.plot((Time(new_fit_cachito['date'])-texpl).value, -1*calc_velocity(new_fit_cachito['vel0'], SiII).to(u.km/u.s), '^', label='Cachito (as SiII 6533)')\nax_Fe.plot(fit_time, power_fit_feII(fit_time))\nax_Fe.vlines((IR_dates-texpl).value, linestyle='--', ymin=-3000, ymax=12000, label='IR spectra')\nax_Fe.set_xticks(np.arange(0, 90, 10))\nax_Fe.legend()\nax_Fe.set_title(r'FeII 5169 Velocity')\nax_Fe.set_ylim(3000, 11000)\n\nax_resid.axhline(0, color='k')\nax_resid.plot(phase_feII, velocity_feII - power_fit_feII(phase_feII), 'o')\nax_resid.set_yticks([-500, -250, 0, 250, 500])\nax_resid.grid()\nax_resid.vlines((IR_dates-texpl).value, linestyle='--', ymin=-500, ymax=500, label='IR spectra')\nprint('Power law std = {}'.format(np.std(velocity_feII - power_fit_feII(phase_feII))))", "Power law std = 342.44842167279404 km / s\n" ], [ "fig = plt.figure()\nax_Fe = fig.add_subplot(1,1,1)\n\nax_Fe.plot((Time(new_fit_cachito['date'])-texpl).value, -1*calc_velocity(new_fit_cachito['vel0'], SiII).to(u.km/u.s), '^', label='Cachito (as SiII 6533)')\n#ax_Fe.plot((Time(new_fit_cachito['date'])-texpl).value, -1*calc_velocity(new_fit_together['vel0'], SiII).to(u.km/u.s), '^', label='Cachito (as SiII 6533); new joint fit', alpha=0.25)\n#ax_Fe.plot((Time(new_fit_cachito['date'])-texpl).value, -1*calc_velocity(old_fitting['vel0'], SiII).to(u.km/u.s), '^', label='Cachito (as SiII 6533); old joint fit', alpha=0.25)\nax_Fe.plot(phase_feII, velocity_feII, 'o', label='FeII (5169)') \n\nax_Fe.set_xticks(np.arange(0, 90, 10))\nax_Fe.legend()\nax_Fe.set_title(r'FeII 5169 Velocity')\nax_Fe.set_ylim(5000, 11000)\nax_Fe.set_xlim(0, 40)\nax_Fe.set_xlabel('Phase (days)')\nax_Fe.set_ylabel('Velocity (km/s)')\nplt.savefig(os.path.join(FIG_DIR, 'cachito_fe_vel_comp.pdf'))", "_____no_output_____" ], [ "cp ../figures/cachito_fe_vel_comp.pdf ../paper/figures/", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ] ]
4aa472b94b9ce5b94672a6f9c5cbac9da332a2a8
25,043
ipynb
Jupyter Notebook
QuestionAnswering/SQuAD_training/huggingface-bert-japanese.ipynb
AtsunoriFujita/sagemaker_nlp_examples
4d818586d9e2cbac750936dc5f44d967ca7a57e7
[ "Apache-2.0" ]
11
2021-08-13T06:26:57.000Z
2022-03-08T03:28:03.000Z
QuestionAnswering/SQuAD_training/huggingface-bert-japanese.ipynb
AtsunoriFujita/sagemaker_nlp_examples
4d818586d9e2cbac750936dc5f44d967ca7a57e7
[ "Apache-2.0" ]
null
null
null
QuestionAnswering/SQuAD_training/huggingface-bert-japanese.ipynb
AtsunoriFujita/sagemaker_nlp_examples
4d818586d9e2cbac750936dc5f44d967ca7a57e7
[ "Apache-2.0" ]
null
null
null
30.355152
346
0.579563
[ [ [ "# Huggingface SageMaker-SDK - BERT Japanese QA example", "_____no_output_____" ], [ "1. [Introduction](#Introduction) \n2. [Development Environment and Permissions](#Development-Environment-and-Permissions)\n 1. [Installation](#Installation) \n 2. [Permissions](#Permissions)\n 3. [Uploading data to sagemaker_session_bucket](#Uploading-data-to-sagemaker_session_bucket) \n3. [(Optional) Deepen your understanding of SQuAD](#(Optional)-Deepen-your-understanding-of-SQuAD) \n4. [Fine-tuning & starting Sagemaker Training Job](#Fine-tuning-\\&-starting-Sagemaker-Training-Job) \n 1. [Creating an Estimator and start a training job](#Creating-an-Estimator-and-start-a-training-job) \n 2. [Estimator Parameters](#Estimator-Parameters) \n 3. [Download fine-tuned model from s3](#Download-fine-tuned-model-from-s3)\n 4. [Question Answering on Local](#Question-Answering-on-Local) \n5. [_Coming soon_:Push model to the Hugging Face hub](#Push-model-to-the-Hugging-Face-hub)", "_____no_output_____" ], [ "# Introduction\n\nこのnotebookはHuggingFaceの[run_squad.py](https://github.com/huggingface/transformers/blob/master/examples/legacy/question-answering/run_squad.py)を日本語データで動作する様に変更を加えたものです。 \nデータは[運転ドメインQAデータセット](https://nlp.ist.i.kyoto-u.ac.jp/index.php?Driving%20domain%20QA%20datasets)を使用します。 \n\nこのデモでは、AmazonSageMakerのHuggingFace Estimatorを使用してSageMakerのトレーニングジョブを実行します。 \n\n_**NOTE: このデモは、SagemakerNotebookインスタンスで動作検証しています**_ \n _**データセットは各自許諾に同意の上ダウンロードしていただけますようお願いいたします(データサイズは約4MBです)**_", "_____no_output_____" ], [ "# Development Environment and Permissions", "_____no_output_____" ], [ "## Installation\n\nこのNotebookはSageMakerの`conda_pytorch_p36`カーネルを利用しています。 \n日本語処理のため、`transformers`ではなく`transformers[ja]`をインスールします。\n\n**_Note: このnotebook上で推論テストを行う場合、(バージョンが古い場合は)pytorchのバージョンアップが必要になります。_**", "_____no_output_____" ] ], [ [ "# localで推論のテストを行う場合\n!pip install torch==1.7.1", "_____no_output_____" ], [ "!pip install \"sagemaker>=2.31.0\" \"transformers[ja]==4.6.1\" \"datasets[s3]==1.6.2\" --upgrade", "_____no_output_____" ] ], [ [ "## Permissions", "_____no_output_____" ], [ "ローカル環境でSagemakerを使用する場合はSagemakerに必要な権限を持つIAMロールにアクセスする必要があります。[こちら](https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html)を参照してください", "_____no_output_____" ] ], [ [ "import sagemaker\n\n\nsess = sagemaker.Session()\n# sagemaker session bucket -> used for uploading data, models and logs\n# sagemaker will automatically create this bucket if it not exists\nsagemaker_session_bucket=None\nif sagemaker_session_bucket is None and sess is not None:\n # set to default bucket if a bucket name is not given\n sagemaker_session_bucket = sess.default_bucket()\n\nrole = sagemaker.get_execution_role()\nsess = sagemaker.Session(default_bucket=sagemaker_session_bucket)\n\nprint(f\"sagemaker role arn: {role}\")\nprint(f\"sagemaker bucket: {sess.default_bucket()}\")\nprint(f\"sagemaker session region: {sess.boto_region_name}\")", "_____no_output_____" ] ], [ [ "# データの準備\n\n事前にデータ(`DDQA-1.0.tar.gz`)をこのnotobookと同じ階層に配置してください\n\n以下では、データをダウンロードして解凍 (unzip) します。", "_____no_output_____" ] ], [ [ "# Unzip\n\n!tar -zxvf DDQA-1.0.tar.gz", "_____no_output_____" ] ], [ [ "## Uploading data to `sagemaker_session_bucket`\n\nS3へデータをアップロードします。", "_____no_output_____" ] ], [ [ "s3_prefix = 'samples/datasets/driving-domain-qa'\n\ninput_train = sess.upload_data(\n path='./DDQA-1.0/RC-QA/DDQA-1.0_RC-QA_train.json', \n key_prefix=f'{s3_prefix}/train'\n)\n\ninput_validation = sess.upload_data(\n path='./DDQA-1.0/RC-QA/DDQA-1.0_RC-QA_dev.json', \n key_prefix=f'{s3_prefix}/valid'\n)", "_____no_output_____" ], [ "# データのUpload path\n\nprint(input_train)\nprint(input_validation)", "_____no_output_____" ] ], [ [ "# (Optional) Deepen your understanding of SQuAD\n\n**このセクションはオプションであり、Fine-tuning & starting Sagemaker Training Jobまでスキップできます**", "_____no_output_____" ], [ "## 運転ドメインQAデータセットについて\n\n運転ドメインQAデータセットはSQuAD2.0形式となっており、`run_squad.py`でそのまま実行できます。 \nトレーニングジョブの実行とは関連しませんが、ここでは少しデータについて理解を深めたいと思います。", "_____no_output_____" ], [ "QAデータセットの形式(README_ja.txt)\n--------------------\n\n本QAデータセットの形式はSQuAD2.0と同じです。SQuAD2.0の問題は、「文章」、「質問」、「答え」の三つ組になっており、「答え」は「文章」の中の一部になっています。一部の問題は、「文章」の中に「答え」が無いなど、答えられない問題になっています。詳細は以下の論文をご参照ください。\n\nPranav Rajpurkar, Robin Jia, and Percy Liang.\nKnow what you don’t know: Unanswerable questions for SQuAD,\nIn ACL2018, pages 784–789.\nhttps://www.aclweb.org/anthology/P18-2124.pdf\n\n以下に、jsonファイル中のQAデータセットを例示します。 \n注)jsonファイル中の\"context\"は「文章」\n\n```json\n{\n \"version\": \"v2.0\",\n \"data\": [\n {\n \"title\": \"運転ドメイン\",\n \"paragraphs\": [\n {\n \"context\": \"著者は以下の文章を書きました。本日お昼頃、梅田方面へ自転車で出かけました。ちょっと大きな交差点に差し掛かりました。自転車にまたがった若い女性が信号待ちしています。その後で私も止まって信号が青になるのを待っていました。\",\n \"qas\": [\n {\n \"id\": \"55604556390008_00\",\n \"question\": \"待っていました、の主語は何か?\",\n \"answers\": [\n {\n \"text\": \"私\",\n \"answer_start\": 85\n },\n {\n \"text\": \"著者\",\n \"answer_start\": 0\n }\n ],\n \"is_impossible\": false\n }\n ]\n }\n ]\n }\n ]\n}\n```\n\n参考文献\n--------\n\n高橋 憲生、柴田 知秀、河原 大輔、黒橋 禎夫\nドメインを限定した機械読解モデルに基づく述語項構造解析\n言語処理学会 第25回年次大会 発表論文集 (2019年3月)\n https://www.anlp.jp/proceedings/annual_meeting/2019/pdf_dir/B1-4.pdf \n  ※データセットの構築方法について記載\n\nNorio Takahashi, Tomohide Shibata, Daisuke Kawahara and Sadao Kurohashi.\nMachine Comprehension Improves Domain-Specific Japanese Predicate-Argument Structure Analysis,\nIn Proceedings of 2019 Conference on Empirical Methods in Natural Language Processing and 9th International Joint Conference on Natural Language Processing, Workshop MRQA: Machine Reading for Question Answering, 2019.\n https://mrqa.github.io/assets/papers/42_Paper.pdf \n  ※データセットの構築方法、文章中に答えが無い問題について記載", "_____no_output_____" ] ], [ [ "# データの読み込み\nimport json\n\nwith open(\"./DDQA-1.0/RC-QA/DDQA-1.0_RC-QA_train.json\", \"r\") as f:\n squad = json.load(f)", "_____no_output_____" ], [ "squad['data'][0]['paragraphs'][0]", "_____no_output_____" ] ], [ [ "SQuAD2.0形式は少し複雑なjson形式となっています。 \n次に`run_squad.py`内でどのような前処理が実行されているかについて少し触れます。 \n\nこのparagraphsにはコンテキストが1つと質問が2つ、回答が6つ含まれていますが、後の処理ではここから \n**2つの「コンテキスト」、「質問」、「答え」の三つ組**が作成されます。 \n回答は1番目のものが使用されます。", "_____no_output_____" ] ], [ [ "from transformers.data.processors.squad import SquadV2Processor\nfrom transformers import squad_convert_examples_to_features\n\ndata_dir = './DDQA-1.0/RC-QA'\ntrain_file = 'DDQA-1.0_RC-QA_train.json'\n\nmax_seq_length = 384 # トークン化後の最大入力シーケンス長。これより長いシーケンスは切り捨てられ、これより短いシーケンスはパディングされます\ndoc_stride = 128 # 長いドキュメントをチャンクに分割する場合、チャンク間でどのくらいのストライドを取るか\nmax_query_length = 64 # 質問のトークンの最大数。 これより長い質問はこの長さに切り捨てられます\nthreads = 1", "_____no_output_____" ], [ "from transformers import AutoTokenizer\n\n# Tokenizer\ntokenizer = AutoTokenizer.from_pretrained('cl-tohoku/bert-base-japanese-whole-word-masking') ", "_____no_output_____" ], [ "# jsonファイルを読みこみ、複雑な構造を分解します\n\nprocessor = SquadV2Processor()\nexamples = processor.get_train_examples(data_dir, filename=train_file)", "_____no_output_____" ], [ "# QuestionAnsweringモデルへ入力できるようにトークナイズします\n# 以下の実行に数分時間がかかります\n\nfeatures, dataset = squad_convert_examples_to_features(\n examples=examples,\n tokenizer=tokenizer,\n max_seq_length=max_seq_length,\n doc_stride=doc_stride,\n max_query_length=max_query_length,\n is_training=True,\n return_dataset=\"pt\",\n threads=threads,\n)", "_____no_output_____" ] ], [ [ "`dataset`は後に`dataloader`に渡され、以下のように使用されます。\n\n\n```python\nfor _ in train_iterator:\n epoch_iterator = tqdm(train_dataloader, desc=\"Iteration\", disable=args.local_rank not in [-1, 0])\n for step, batch in enumerate(epoch_iterator):\n\n # Skip past any already trained steps if resuming training\n if steps_trained_in_current_epoch > 0:\n steps_trained_in_current_epoch -= 1\n continue\n\n model.train()\n batch = tuple(t.to(args.device) for t in batch)\n\n inputs = {\n \"input_ids\": batch[0],\n \"attention_mask\": batch[1],\n \"token_type_ids\": batch[2],\n \"start_positions\": batch[3],\n \"end_positions\": batch[4],\n }\n```\n\n`input_ids`, `attention_mask`, `token_type_ids`はTransformerベースのモデルで一般的な入力形式です \nQuestionAnsweringモデル特有のものとして`start_positions`, `end_positions`が挙げられます", "_____no_output_____" ] ], [ [ "# 参考に一つ目の中身を見てみます\n\ni = 0\ndataset[i]", "_____no_output_____" ], [ "# すでに テキスト→トークン化→ID化されているため、逆の操作で元に戻します。\n# 質問と文章が含まれていることが確認できます\n\ntokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(dataset[i][0]))", "_____no_output_____" ], [ "# ID化→トークン化まで\n\ntokenizer.convert_ids_to_tokens(dataset[i][0])", "_____no_output_____" ], [ "# 回答は、start_positionsのトークンで始まり、end_positionsでトークンで終わるように表現されます\n# 試しに該当箇所のトークンを文字に戻してみます。\n\nprint(tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens([dataset[i][0][dataset[i][3]]])))\nprint(tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens([dataset[i][0][dataset[i][4]]])))", "_____no_output_____" ] ], [ [ "これから実行する`QuestionAnswering`は、**「コンテキスト」**内から**「質問」**に対する**「答え」**となる`start_positions`と`end_positions`を予測し、そのスパンを抽出するタスクとなります。", "_____no_output_____" ], [ "# Fine-tuning & starting Sagemaker Training Job\n\n`HuggingFace`のトレーニングジョブを作成するためには`HuggingFace` Estimatorが必要になります。 \nEstimatorは、エンドツーエンドのAmazonSageMakerトレーニングおよびデプロイタスクを処理します。 Estimatorで、どのFine-tuningスクリプトを`entry_point`として使用するか、どの`instance_type`を使用するか、どの`hyperparameters`を渡すかなどを定義します。\n\n\n```python\nhuggingface_estimator = HuggingFace(\n entry_point='train.py',\n source_dir='./scripts',\n base_job_name='huggingface-sdk-extension',\n instance_type='ml.p3.2xlarge',\n instance_count=1,\n transformers_version='4.4',\n pytorch_version='1.6',\n py_version='py36',\n role=role,\n hyperparameters={\n 'epochs': 1,\n 'train_batch_size': 32,\n 'model_name':'distilbert-base-uncased'\n }\n)\n```\n\nSageMakerトレーニングジョブを作成すると、SageMakerは`huggingface`コンテナを実行するために必要なec2インスタンスの起動と管理を行います。 \nFine-tuningスクリプト`train.py`をアップロードし、`sagemaker_session_bucket`からコンテナ内の`/opt/ml/input/data`にデータをダウンロードして、トレーニングジョブを実行します。\n\n\n```python\n/opt/conda/bin/python train.py --epochs 1 --model_name distilbert-base-uncased --train_batch_size 32\n```\n\n`HuggingFace estimator`で定義した`hyperparameters`は、名前付き引数として渡されます。\n\nまたSagemakerは、次のようなさまざまな環境変数を通じて、トレーニング環境に関する有用なプロパティを提供しています。\n\n* `SM_MODEL_DIR`:トレーニングジョブがモデルアーティファクトを書き込むパスを表す文字列。トレーニング後、このディレクトリのアーティファクトはモデルホスティングのためにS3にアップロードされます。\n\n* `SM_NUM_GPUS`:ホストで使用可能なGPUの数を表す整数。\n\n* `SM_CHANNEL_XXXX`:指定されたチャネルの入力データを含むディレクトリへのパスを表す文字列。たとえば、HuggingFace estimatorのfit呼び出しで`train`と`test`という名前の2つの入力チャネルを指定すると、環境変数`SM_CHANNEL_TRAIN`と`SM_CHANNEL_TEST`が設定されます。\n\nこのトレーニングジョブをローカル環境で実行するには、`instance_type='local'`、GPUの場合は`instance_type='local_gpu'`で定義できます。 \n**_Note:これはSageMaker Studio内では機能しません_**", "_____no_output_____" ] ], [ [ "# requirements.txtはトレーニングジョブの実行前に実行されます(コンテナにライブラリを追加する際に使用します)\n# 残念なことにSageMakerのHuggingFaceコンテナは日本語処理(トークナイズ)に必要なライブラリが組み込まれていません\n# したがってtransformers[ja]==4.6.1をジョブ実行前にインストールしています(fugashiとipadic)でも構いません\n# tensorboardも組み込まれていないため、インストールします\n\n!pygmentize ./scripts/requirements.txt", "_____no_output_____" ], [ "# トレーニングジョブで実行されるコード\n!pygmentize ./scripts/run_squad.py", "_____no_output_____" ], [ "from sagemaker.huggingface import HuggingFace\n\n\n# hyperparameters, which are passed into the training job\nhyperparameters={\n 'model_type': 'bert',\n 'model_name_or_path': 'cl-tohoku/bert-base-japanese-whole-word-masking',\n 'output_dir': '/opt/ml/model',\n 'data_dir':'/opt/ml/input/data',\n 'train_file': 'train/DDQA-1.0_RC-QA_train.json',\n 'predict_file': 'validation/DDQA-1.0_RC-QA_dev.json',\n 'version_2_with_negative': 'True',\n 'do_train': 'True',\n 'do_eval': 'True',\n 'fp16': 'True',\n 'per_gpu_train_batch_size': 16,\n 'per_gpu_eval_batch_size': 16,\n 'max_seq_length': 384,\n 'doc_stride': 128,\n 'max_query_length': 64,\n 'learning_rate': 5e-5,\n 'num_train_epochs': 2,\n #'max_steps': 100, # If > 0: set total number of training steps to perform. Override num_train_epochs.\n 'save_steps': 1000, \n}\n\n# metric definition to extract the results\nmetric_definitions=[\n {\"Name\": \"train_runtime\", \"Regex\": \"train_runtime.*=\\D*(.*?)$\"},\n {'Name': 'train_samples_per_second', 'Regex': \"train_samples_per_second.*=\\D*(.*?)$\"},\n {'Name': 'epoch', 'Regex': \"epoch.*=\\D*(.*?)$\"},\n {'Name': 'f1', 'Regex': \"f1.*=\\D*(.*?)$\"},\n {'Name': 'exact_match', 'Regex': \"exact_match.*=\\D*(.*?)$\"}]", "_____no_output_____" ] ], [ [ "## Creating an Estimator and start a training job", "_____no_output_____" ] ], [ [ "# estimator\n\nhuggingface_estimator = HuggingFace(\n entry_point='run_squad.py',\n source_dir='./scripts',\n metric_definitions=metric_definitions,\n instance_type='ml.p3.8xlarge',\n instance_count=1,\n volume_size=200,\n role=role,\n transformers_version='4.6',\n pytorch_version='1.7',\n py_version='py36',\n hyperparameters=hyperparameters\n)", "_____no_output_____" ], [ "# starting the train job with our uploaded datasets as input\nhuggingface_estimator.fit({'train': input_train, 'validation': input_validation})\n\n# ml.p3.8xlarge, 2 epochでの実行時間の目安\n# Training seconds: 758\n# Billable seconds: 758", "_____no_output_____" ] ], [ [ "## Estimator Parameters", "_____no_output_____" ] ], [ [ "# container image used for training job\nprint(f\"container image used for training job: \\n{huggingface_estimator.image_uri}\\n\")\n\n# s3 uri where the trained model is located\nprint(f\"s3 uri where the trained model is located: \\n{huggingface_estimator.model_data}\\n\")\n\n# latest training job name for this estimator\nprint(f\"latest training job name for this estimator: \\n{huggingface_estimator.latest_training_job.name}\\n\")", "_____no_output_____" ], [ "# access the logs of the training job\nhuggingface_estimator.sagemaker_session.logs_for_job(huggingface_estimator.latest_training_job.name)", "_____no_output_____" ] ], [ [ "## Download-fine-tuned-model-from-s3", "_____no_output_____" ] ], [ [ "import os\n\nOUTPUT_DIR = './output/'\nif not os.path.exists(OUTPUT_DIR):\n os.makedirs(OUTPUT_DIR)", "_____no_output_____" ], [ "from sagemaker.s3 import S3Downloader\n\n# 学習したモデルのダウンロード\nS3Downloader.download(\n s3_uri=huggingface_estimator.model_data, # s3 uri where the trained model is located\n local_path='.', # local path where *.targ.gz is saved\n sagemaker_session=sess # sagemaker session used for training the model\n)", "_____no_output_____" ], [ "# OUTPUT_DIRに解凍します\n\n!tar -zxvf model.tar.gz -C output", "_____no_output_____" ] ], [ [ "## Question Answering on Local", "_____no_output_____" ] ], [ [ "from transformers import AutoTokenizer, AutoModelForQuestionAnswering\nimport torch", "_____no_output_____" ], [ "model = AutoModelForQuestionAnswering.from_pretrained('./output') \ntokenizer = AutoTokenizer.from_pretrained('cl-tohoku/bert-base-japanese-whole-word-masking') ", "_____no_output_____" ] ], [ [ "以下のセルは`./DDQA-1.0/RC-QA/DDQA-1.0_RC-QA_dev.json`からコピーしたものです", "_____no_output_____" ] ], [ [ "context = '実は先週、CBR600RRで事故りました。たまにはCBRにも乗らなきゃなーと思い久々にCBRで出勤したところ、家から10分ほど走ったところにある片側一車線の交差点で対向右折車と衝突してしまいました。自分が直進青信号で交差点へ進入したところで対向右折車線の車が突然右折を開始。とっさに急ブレーキはかけましたが、止まることはできずに右折車に衝突、自分は空中で一回転して左斜め数メートル先の路上へと飛ばされました。'\nquestion='何に乗っていて事故りましたか?'", "_____no_output_____" ], [ "#context = 'まぁ,何回か改正してるわけで,自転車を走らせる領域を変更しないって言うのは,怠慢っていうか責任逃れっていうか,道交法に携わってるヤツはみんな馬鹿なのか.大体の人はここまで極端な意見ではないだろうけど,自転車は歩道を走るほうが自然だとは考えているだろう.というのも, みんな自転車乗ってる時歩道を走るでしょ?自転車で歩道走ってても歩行者にそこまで危険な目に合わせないと考えているし,車道に出たら明らかに危険な目に合うと考えている.'\n#question='大体の人は自転車はどこを走るのが自然だと思っている?'", "_____no_output_____" ], [ "#context = '幸いけが人が出なくて良かったものの、タイヤの脱落事故が後を絶たない。先日も高速道路でトラックのタイヤがはずれ、中央分離帯を越え、反対車線を通行していた観光バスに直撃した。不幸にもバスを運転していた運転手さんがお亡くなりになった。もし、僕がこんな場面に遭遇していたら、この運転手さんのように、乗客の安全を考えて冷静に止まっただろうか?'\n#question = '後を絶たないのは何ですか?'", "_____no_output_____" ], [ "#context = '右折待ちの一般ドライバーの方は、直進車線からの右折タクシーに驚いて右折のタイミングを失ってしまい、更なる混雑を招いているようでした」と述べていました。2004年8月6日付けには、ある女性が「道を譲っても挨拶をしない人が多い。特に女性の方。そのため意地悪ですが対向車のドライバーが女性だと譲りません。私はまだ人間が出来ていないので受け流すことが出来ません」ということを言っていましたが、その気持ち良く分かります。私は横断歩道の歩行者に対しては特別真面目で、歩行者がいるかどうかを常に注意して、いるときは必ず止まるよう心掛けています。それでも気付かずに止まることができなかったときは、「ああ、悪いことしちゃったな…」と、バックミラーを見ながら思います。'\n#question = '歩行者がいるかどうかを常に注意しているのは誰ですか?'", "_____no_output_____" ], [ "# 推論\ninputs = tokenizer.encode_plus(question, context, add_special_tokens=True, return_tensors=\"pt\")\ninput_ids = inputs[\"input_ids\"].tolist()[0]\noutput = model(**inputs)\nanswer_start = torch.argmax(output.start_logits) \nanswer_end = torch.argmax(output.end_logits) + 1 \nanswer = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(input_ids[answer_start:answer_end]))\n\n# 結果\nprint(\"質問: \"+question)\nprint(\"回答: \"+answer)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
4aa47af11a2b695a7f40a89ec28f4670845911ec
152,336
ipynb
Jupyter Notebook
docs/html/example.ipynb
tugraz-sww/intensity_duration_frequency_analysis
ba03a82ad214658b9e9a1b4410eace704fa55c7b
[ "MIT" ]
null
null
null
docs/html/example.ipynb
tugraz-sww/intensity_duration_frequency_analysis
ba03a82ad214658b9e9a1b4410eace704fa55c7b
[ "MIT" ]
null
null
null
docs/html/example.ipynb
tugraz-sww/intensity_duration_frequency_analysis
ba03a82ad214658b9e9a1b4410eace704fa55c7b
[ "MIT" ]
1
2019-02-12T10:23:31.000Z
2019-02-12T10:23:31.000Z
79.341667
92,560
0.737272
[ [ [ "from idf_analysis.idf_class import IntensityDurationFrequencyAnalyse\nfrom idf_analysis.definitions import *\nimport pandas as pd\n%matplotlib inline", "_____no_output_____" ] ], [ [ "# Intensity Duration Frequency Analyse", "_____no_output_____" ], [ "## Parameter\n\n**series_kind**:\n\n`PARTIAL` = Partielle Serie (partial duration series, PDS) (peak over threshold, POT)\n\n`ANNUAL` = Jährliche Serie (annual maximum series, AMS)\n\n**worksheet**:\n\n`DWA`:\n- DWA-A 531\n- KOSTRA - empfohlen\n- Stützstellen: 60min und 12h\n\n`DWA-adv`:\n- DWA-A 531\n- Unterscheidung in überwiegend konvektiv und advektiv verursachte Starkregen\n- Stützstellen: 3h und 24h\n\n`ATV`:\n- ATV-A 121\n- Stützstellen: 3h und 48h\n\n**output_path** = Pfad zum Speichern\n\n**extended_durations** = Inkludiert die Dauerstufen `[720, 1080, 1440, 2880, 4320, 5760, 7200, 8640]` in der Analyse\n\n", "_____no_output_____" ] ], [ [ "out = 'example'\nname = 'EXAMPLE'\nidf = IntensityDurationFrequencyAnalyse(series_kind=PARTIAL, worksheet=DWA, output_path=out,\n extended_durations=True, output_filename=name,\n auto_save=True, unix=True)", "_____no_output_____" ] ], [ [ "Es wird nun ein Ordner `<name>_data` erstellt in `<out>`", "_____no_output_____" ] ], [ [ "data = pd.read_parquet('example/expample_rain_data.parquet')", "_____no_output_____" ], [ "data.head()", "_____no_output_____" ], [ "data.tail()", "_____no_output_____" ], [ "idf.set_series(data['precipitation'])", "_____no_output_____" ] ], [ [ "Bei jeder neuen Berechnung werden Zwischenergebnisse erstellt, welche nur abhängig von der gewählten Serie `series_kind`sind. Dieser Vorgang dauert einige Sekunden.\n\nAbgerufen können diese Zwischenergebnisse mit:\n\n(Dies Operation geschieht im Hintergrund und muss nicht explizit durchgeführt werden.)", "_____no_output_____" ] ], [ [ "idf.interim_results", "_____no_output_____" ] ], [ [ "Ist `auto_save=True` werden die Zwischenergebnisse je Serie (abhängig von `output_path` und `output_filename`) in die csv-Datei `<name>_interim_results.csv` geschrieben.\nDies wird empfohlen da jeder neue Aufruf etwas Zeit beansprucht und dadurch die Bereichnung verkürzt wird.", "_____no_output_____" ] ], [ [ "! tree example/EXAMPLE_data", "\u001b[01;34mexample/EXAMPLE_data\u001b[00m\r\n└── EXAMPLE_interim_results.csv\r\n\r\n0 directories, 1 file\r\n" ], [ "pd.read_csv(idf.output_filename + '_interim_results.csv', index_col=0)", "_____no_output_____" ] ], [ [ "Aus diesen Zwischenergebnissen werden in weiterer Folge die Parameter zur Berechnung der Regenhöhe und Regenspende ermittelt.\n\nHier sind bereist die Berechnungsverfahren und Stückpunkte laut dem gewählten `worksheet` berücksichtigt.\n\n(Dies Operation geschieht ebenfalls im Hintergrund und muss nicht explizit durchgeführt werden.)", "_____no_output_____" ] ], [ [ "idf.parameter", "_____no_output_____" ] ], [ [ "## Berechnungen", "_____no_output_____" ] ], [ [ "from IPython.display import Latex\ndef print_latex(string):\n Latex('$' + string.replace(' ', '\\;')+ '$')", "_____no_output_____" ], [ "idf.depth_of_rainfall(duration=15, return_period=1)", "_____no_output_____" ], [ "idf.print_depth_of_rainfall(duration=15, return_period=1)", "Resultierende Regenhöhe h_N(T_n=1.0a, D=15.0min) = 9.64 mm\n" ], [ "idf.rain_flow_rate(duration=15, return_period=1)", "_____no_output_____" ], [ "idf.print_rain_flow_rate(duration=15, return_period=1)", "Resultierende Regenspende r_N(T_n=1.0a, D=15.0min) = 107.06 L/(s*ha)\n" ], [ "idf.r_720_1()", "_____no_output_____" ], [ "idf.get_return_period(height_of_rainfall=10, duration=15)", "_____no_output_____" ], [ "idf.get_duration(height_of_rainfall=10, return_period=1)", "_____no_output_____" ], [ "idf.result_table()", "_____no_output_____" ], [ "idf.result_table(add_names=True)", "_____no_output_____" ] ], [ [ "To save the table as a csv:", "_____no_output_____" ] ], [ [ "idf.write_table()", " 0.5 1.0 2.0 3.0 5.0 10.0 15.0 50.0 100.0\n5 2.9 4.5 6.2 7.1 8.3 10.0 10.9 13.8 15.4\n10 5.3 7.5 9.7 11.0 12.7 14.9 16.2 20.0 22.2\n15 7.0 9.6 12.3 13.8 15.8 18.4 19.9 24.5 27.2\n20 8.2 11.2 14.2 16.0 18.2 21.1 22.9 28.1 31.1\n30 9.8 13.4 17.0 19.1 21.7 25.3 27.3 33.5 37.1\n45 11.2 15.4 19.7 22.2 25.3 29.6 32.1 39.4 43.7\n60 11.9 16.7 21.5 24.3 27.9 32.7 35.5 43.9 48.7\n90 13.6 18.6 23.7 26.6 30.3 35.3 38.3 47.0 52.0\n180 17.2 22.6 27.9 31.1 35.1 40.5 43.6 53.0 58.4\n270 19.6 25.2 30.8 34.1 38.3 43.9 47.2 56.9 62.5\n360 21.5 27.3 33.1 36.4 40.7 46.5 49.9 59.9 65.7\n450 23.1 29.0 34.9 38.4 42.7 48.7 52.1 62.4 68.3\n600 25.3 31.4 37.5 41.0 45.5 51.6 55.2 65.8 71.9\n720 26.8 33.0 39.2 42.8 47.4 53.6 57.2 68.0 74.2\n720 26.8 33.0 39.2 42.8 47.4 53.6 57.2 68.0 74.2\n1080 30.2 36.6 43.0 46.8 51.6 58.0 61.8 72.9 79.4\n1440 32.1 39.1 46.1 50.2 55.4 62.4 66.5 78.7 85.8\n2880 36.2 46.6 57.1 63.2 70.8 81.3 87.4 105.5 115.9\n4320 39.2 51.2 63.2 70.2 79.1 91.1 98.1 118.9 130.9\n5760 40.7 53.6 66.5 74.1 83.7 96.6 104.2 126.7 139.6\n7200 42.0 55.5 69.1 77.0 87.0 100.5 108.4 131.9 145.5\n8640 43.5 57.8 72.0 80.4 90.9 105.1 113.5 138.3 152.5\n" ], [ "! tree example/EXAMPLE_data", "\u001b[01;34mexample/EXAMPLE_data\u001b[00m\r\n├── EXAMPLE_interim_results.csv\r\n└── EXAMPLE_results_h_N.csv\r\n\r\n0 directories, 2 files\r\n" ], [ "fig = idf.result_figure(color=True)", "_____no_output_____" ] ], [ [ "----------------------------------------------------------------------------------", "_____no_output_____" ], [ "*Dieser Block funktioniert nur in einen laufenden Jupyter Notebook!*", "_____no_output_____" ] ], [ [ "from ipywidgets import interact, fixed, interact_manual\nimport ipywidgets as widgets", "_____no_output_____" ], [ "def f(min_duration = 5, max_duration=720, color=True, logx=False):\n fig = idf.result_figure(return_periods=[1,2,5,10,50], \n min_duration=min_duration, \n max_duration=max_duration, \n color=color, \n logx=logx)", "_____no_output_____" ], [ "interact(f, min_duration = (0,60,5), max_duration=(60,8640, 60), color=True, logx=False)", "_____no_output_____" ] ], [ [ "----------------------------------------------------------------------------------", "_____no_output_____" ], [ "To save the plot as a png use:", "_____no_output_____" ] ], [ [ "idf.result_plot()", "_____no_output_____" ], [ "! tree example/EXAMPLE_data", "\u001b[01;34mexample/EXAMPLE_data\u001b[00m\r\n├── \u001b[01;35mEXAMPLE_idf_plot.png\u001b[00m\r\n├── EXAMPLE_interim_results.csv\r\n└── EXAMPLE_results_h_N.csv\r\n\r\n0 directories, 3 files\r\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ] ]
4aa47b526c72901a0cfd70e791f00a79d75b6abf
13,979
ipynb
Jupyter Notebook
pdf_reader_exemplo.ipynb
andrade-lcs/pdf_reader
1992afb20dd9d2a39df9d8c183d2de7277012f83
[ "MIT" ]
1
2021-02-26T17:26:25.000Z
2021-02-26T17:26:25.000Z
pdf_reader_exemplo.ipynb
andrade-lcs/pdf_reader
1992afb20dd9d2a39df9d8c183d2de7277012f83
[ "MIT" ]
null
null
null
pdf_reader_exemplo.ipynb
andrade-lcs/pdf_reader
1992afb20dd9d2a39df9d8c183d2de7277012f83
[ "MIT" ]
null
null
null
31.988558
281
0.401817
[ [ [ "Instalação da biblioteca", "_____no_output_____" ] ], [ [ "!pip install pdfplumber", "Requirement already satisfied: pdfplumber in /usr/local/lib/python3.7/dist-packages (0.5.28)\nRequirement already satisfied: Wand in /usr/local/lib/python3.7/dist-packages (from pdfplumber) (0.6.6)\nRequirement already satisfied: Pillow>=7.0.0 in /usr/local/lib/python3.7/dist-packages (from pdfplumber) (7.1.2)\nRequirement already satisfied: pdfminer.six==20200517 in /usr/local/lib/python3.7/dist-packages (from pdfplumber) (20200517)\nRequirement already satisfied: pycryptodome in /usr/local/lib/python3.7/dist-packages (from pdfminer.six==20200517->pdfplumber) (3.10.1)\nRequirement already satisfied: chardet; python_version > \"3.0\" in /usr/local/lib/python3.7/dist-packages (from pdfminer.six==20200517->pdfplumber) (3.0.4)\nRequirement already satisfied: sortedcontainers in /usr/local/lib/python3.7/dist-packages (from pdfminer.six==20200517->pdfplumber) (2.4.0)\n" ] ], [ [ "Importação das bibliotecas utilizadas", "_____no_output_____" ] ], [ [ "import re\nimport pdfplumber\nimport pandas as pd\nfrom collections import namedtuple", "_____no_output_____" ] ], [ [ "Determinação do cabeçalho da tabela que será lida", "_____no_output_____" ] ], [ [ "Line = namedtuple('line', 'UA BR UnidadeFederal Km1 Inicial Km2 Final EstruturaHDM4 TipodeSuperfície Clima1 clima2 clima3 Intervalode Deflexao') #'títulos das colunas que o scrip irá reconhecer no pdf com mesma formatação e separados por espaço, ex.: 'Nome Idade Cidade'", "_____no_output_____" ] ], [ [ "Determinação do formato de dado a ser lido", "_____no_output_____" ] ], [ [ "line_re = re.compile(r'\\+') #utilizando regular expression para configurar a linha que o scrip irá ler, ex.: Se é numérico: [0-9]", "_____no_output_____" ] ], [ [ "Importação do arquivo a ser lido", "_____no_output_____" ] ], [ [ "file = 'Relatório Final.pdf' #nome do arquivo com formato, se o arquivo estiver fora da pasta do projeto deverá se colocar o nome com caminho do arquivo ex.: 'C:\\Usuario\\Desktop\\arquivo.pdf'", "_____no_output_____" ] ], [ [ "Leitura do arquivo pdf", "_____no_output_____" ] ], [ [ "lines_df = [] #lista que irá receber os dados lidos\nc = 0 #contador\n\nwith pdfplumber.open(file) as pdf: #comando para biblioteca ler o arquivo na variável 'file'\n for page in pdf.pages[116:118]: #comando para a biblioteca ler cada uma das páginas do arquivo\n text = page.extract_text() #comando para extrair o conteúdo da página\n c += 1 #incremento do contador para cada página lida\n for line in text.split('\\n'): #separação do conteúdo lido por linhas\n if line_re.search(line): #comando para identificação da linha configurada\n items = line.split() #separação do conteúdo por espaço\n while len(items) < 14:\n items.append('NaN')\n lines_df.append(Line(*items)) #comando que adiciona os items a lista de dados", "_____no_output_____" ] ], [ [ "Criação do dataframe", "_____no_output_____" ] ], [ [ "df = pd.DataFrame(lines_df)", "_____no_output_____" ] ], [ [ "Visualização dos dados lidos do arquivo pdf", "_____no_output_____" ] ], [ [ "df.describe()", "_____no_output_____" ], [ "df.info()", "<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 51 entries, 0 to 50\nData columns (total 14 columns):\n # Column Non-Null Count Dtype \n--- ------ -------------- ----- \n 0 UA 51 non-null object\n 1 BR 51 non-null object\n 2 UnidadeFederal 51 non-null object\n 3 Km1 51 non-null object\n 4 Inicial 51 non-null object\n 5 Km2 51 non-null object\n 6 Final 51 non-null object\n 7 EstruturaHDM4 51 non-null object\n 8 TipodeSuperfície 51 non-null object\n 9 Clima1 51 non-null object\n 10 clima2 51 non-null object\n 11 clima3 51 non-null object\n 12 Intervalode 51 non-null object\n 13 Deflexao 51 non-null object\ndtypes: object(14)\nmemory usage: 5.7+ KB\n" ] ], [ [ "Tratamento dos dados", "_____no_output_____" ] ], [ [ "#aqui pode ser feito vários exercícios de tratamento e correção de dados lidos", "_____no_output_____" ] ], [ [ "Salvar no formato csv os dados lidos para ser acessado posteriormente de forma acessível ao pandas", "_____no_output_____" ] ], [ [ "df.to_csv('dados.csv', sep=';', index=False, line_terminator='\\r\\n',encoding='utf-8') #inserir o nome do arquivo a ser criado", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4aa48dfffcf89cc53203fee2c696e1323a187086
127,986
ipynb
Jupyter Notebook
FCI/module-analysis-files/computeProportions.ipynb
62442katieb/PhysicsLearning
88625c9d26550e5e6f3b3cbb6657b2164f5a9976
[ "Apache-2.0" ]
null
null
null
FCI/module-analysis-files/computeProportions.ipynb
62442katieb/PhysicsLearning
88625c9d26550e5e6f3b3cbb6657b2164f5a9976
[ "Apache-2.0" ]
null
null
null
FCI/module-analysis-files/computeProportions.ipynb
62442katieb/PhysicsLearning
88625c9d26550e5e6f3b3cbb6657b2164f5a9976
[ "Apache-2.0" ]
null
null
null
184.417867
55,788
0.847663
[ [ [ "# Compute overlap proportion between: \n# 1) clustering of individuals based on similar FCI responses\n# 2) FCI question clusters results and their behavioral interpretations\n# Author: Jessica Bartley\n# Last edited: 11/6/17\n\n%matplotlib inline\n\n# libraries\nimport numpy as np\nfrom __future__ import division\nimport matplotlib.pyplot as plt\nimport pandas as pd\nfrom math import pi\n#import seaborn as sns", "_____no_output_____" ], [ "# read in files\nf_idKey = \"idKey.csv\" #Key from study PIDs to IDs Eric used in R code (lets call those rIDs)\nf_fciQcomm = \"fci_charac.csv\" #List of question (lets shorten to Q) community membership for all FCI answer choices.\nf_fciPcomm = \"communities.csv\" #List of rIDs making up each person (lets shorten to P) cluster\nf_fciresp = \"responses.csv\" #Each subject's response for the 9 in-scanner FCI questions.", "_____no_output_____" ], [ "\"\"\"\nGlobal definitions\n\"\"\"\n\n# Number of P clusters observed\nnclusters = 13\n\n# all possible in-scanner FCI Q responses (named via Eric's Q coding scheme)\nQs = ['X2a', 'X2b', 'X2c', 'X2d', 'X3a', 'X3b', 'X3c', \\\n 'X3e', 'X6a', 'X6b', 'X6c', 'X6e', 'X7a', 'X7b', \\\n 'X7c', 'X7e', 'X8a', 'X8b', 'X8d', 'X8e', 'X12b', \\\n 'X12c', 'X12d', 'X12e', 'X14a', 'X14b', 'X14c', \\\n 'X14d', 'X27a', 'X27b', 'X27c', 'X27d', 'X29a', \\\n 'X29b', 'X29d', 'X29e']\n\n# Incorrect FCI answer choices not included in Eric's origional Q community analysis (due to infrequent responses). \nmissingQs = ['X6e', 'X12e', 'X29a', 'X29e']\n\n# Eric's names for each FCI Q community membership. Cluster '10' has the correct answer choices and cluster 'NA' has the above missingQs\nallmems = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', 'NA']\n\n# Number of subjects observed for each P cluster, ordered sequentially from cluster 1 to cluster 13\nclustersizes = [24, 17, 10, 10, 8, 7, 7, 5, 5, 5, 4, 4, 2]\n\n# All P clusters by name\nclusterlist = ('cluster1','cluster2','cluster3','cluster4','cluster5',\\\n 'cluster6','cluster7','cluster8','cluster9','cluster10',\\\n 'cluster11','cluster12','cluster13')", "_____no_output_____" ], [ "def dequote(a_string):\n \"\"\"\n If a string has single or double quotes around it, remove them.\n Make sure the pair of quotes match.\n If a matching pair of quotes is not found, return the string unchanged.\n \"\"\"\n if (a_string[0] == a_string[-1]) and a_string.startswith((\"'\", '\"')):\n return a_string[1:-1]\n return a_string", "_____no_output_____" ], [ "def listToFloat(a_list):\n \"\"\"\n If all elements in a list are numbers being interpreted as strings then turn them into floats.\n \"\"\"\n a = []\n for i in range(0,len(a_list)):\n a.append(int(dequote(a_list[i])))\n a_list = a\n return a_list", "_____no_output_____" ], [ "def dropExtraQs(allQs, scannerQs):\n \"\"\"\n Remove all quesiton answer choices that were not shown in the scanner.\n \"\"\"\n qlist = []\n for item in allQs:\n if item[0] in scannerQs:\n qlist.append(item)\n return qlist", "_____no_output_____" ], [ "def computeProportion(a_list):\n \"\"\"\n Creates dictionay where \n keys = Q community membership \n value = proportion of Q responses falling within any given Q community.\n e.g. for Q2 in cluster 1: {'1':0.75, '2': 0.2, '3': 0 ...}\n \"\"\"\n proportion = {}\n for item in a_list:\n key = str(item)\n if key in proportion.keys():\n proportion[key] += 1\n else:\n proportion[key] = 1\n total = sum(proportion.itervalues(), 0.0)\n proportion = {item: value / total for item, value in proportion.iteritems()} \n for mem in allmems:\n if mem not in proportion.keys():\n proportion.update({mem: 0})\n return proportion", "_____no_output_____" ], [ "# convert files to readable format\nlist_idKey = np.genfromtxt(f_idKey, names=True, delimiter=',', dtype=None)\nlist_pComm = np.genfromtxt(f_fciPcomm, names=True, delimiter=',', dtype=None)\nlist_Qcomm = np.genfromtxt(f_fciQcomm, names=True, delimiter=',', dtype=None)\nlist_Qcomm = dropExtraQs(list_Qcomm, Qs) # keep only Q responses shown within the scanning session\nlist_fciresp = np.genfromtxt(f_fciresp, names=True, delimiter=',', dtype=None)\n\n\n# make the above lists into separate strings\ncommunities = zip(*list_pComm)[0] # P clusters string {\"1.1\", \"1.2\"...}\nsubjects = zip(*list_pComm)[1] # rIDs ordered by appearence in P clusters\nrIDs = zip(*list_idKey)[0]\nrIDs = listToFloat(rIDs) # rIDs ordered numerically\nIDs = zip(*list_idKey)[1] # Study IDs\nQIDs = zip(*list_Qcomm)[0] # Eric's Q response IDs\nquestionlist = list(list_fciresp.dtype.names) # Qs asked within scanner\nquestionlist.pop(0) # remove empty first element from list\n\n\n# generate dictionaries\ncommKey = {} # rID : P cluster\nfor counter, communitymem in enumerate(communities):\n commKey.update({subjects[counter]: communitymem})\n\nidKey_pid2rid = {} # study ID : rID\nfor counter, rID in enumerate(rIDs):\n idKey_pid2rid.update({IDs[counter]: rID})\n\nidKey_rid2pid = {} # rID : study ID\nfor counter, rID in enumerate(rIDs):\n idKey_rid2pid.update({rID: IDs[counter]})\n \nquestKey = {} # FCI Q response code : Q community membership\nfor counter, qid in enumerate(QIDs):\n questKey.update({qid: list_Qcomm[counter][1]})\nfor question in questionlist:\n x = \"{0}\".format(question.replace('Q','X'))\n questKey.update({x+'N':0})\nfor missingQ in missingQs:\n questKey.update({missingQ:'NA'})\n \npComm = {} # P cluster number : rIDs\nfor pair in list_pComm:\n key = str(pair[0].split('.')[0].replace('\\\"',''))\n value = pair[1]\n if key in pComm.keys():\n pComm[key].append(value)\n else:\n pComm[key] = []\n pComm[key].append(value)", "/home/data/nbc/data-analysis/env/lib/python2.7/site-packages/ipykernel/__main__.py:2: VisibleDeprecationWarning: Reading unicode strings without specifying the encoding argument is deprecated. Set the encoding, use None for the system default.\n from ipykernel import kernelapp as app\n/home/data/nbc/data-analysis/env/lib/python2.7/site-packages/ipykernel/__main__.py:3: VisibleDeprecationWarning: Reading unicode strings without specifying the encoding argument is deprecated. Set the encoding, use None for the system default.\n app.launch_new_instance()\n/home/data/nbc/data-analysis/env/lib/python2.7/site-packages/ipykernel/__main__.py:4: VisibleDeprecationWarning: Reading unicode strings without specifying the encoding argument is deprecated. Set the encoding, use None for the system default.\n/home/data/nbc/data-analysis/env/lib/python2.7/site-packages/ipykernel/__main__.py:6: VisibleDeprecationWarning: Reading unicode strings without specifying the encoding argument is deprecated. Set the encoding, use None for the system default.\n" ], [ "def respToQcode():\n \"\"\"\n Converts in-scanner responses (1=\"a\", 2=\"b\"...) to FCI response codes (\"X2a\", \"X2b\"...)\n e.g. takes in ('\"216\"', 3, 4, 3, 4, 2, 1, 2, 3, 2)\n and gives back ('\"216\"', 'X2c', 'X3e', 'X6b', 'X7e', 'X8b', 'X12b', 'X14b', 'X27c', 'X29b')\n \"\"\"\n # row is an integer in [0,len(list_fciresp)]\n # col is an integer in [0,len(list_fciresp.dtype.names)-1]\n names = list_fciresp.dtype.names\n list_fciresp_mod = []\n for participant in list_fciresp:\n sublist = []\n for name in names:\n if 'q' not in name.lower():\n sublist.append(participant[name])\n else:\n qnumber = int(name[1:])\n # Some origional FCI answer choices were not shown in the scanner\n # (the in-scanner FCI only showed 4 answer choices while the origional FCI has 5)\n # e.g. if an origional FCI answer coded as \"d\" was not shown, then the corresponding\n # in-scanner coded as \"d\" would actually map to origional FCI answer choice \"e\".\n # Below is a re-odering of those in-scanner answer choices to map them to the\n # correct origional FCI answer choice.\n def NumberAnswerToLetterAnswer(x):\n if qnumber in [3,6,7]:\n if x == 1:\n return 'a'\n elif x == 2:\n return 'b'\n elif x == 3:\n return 'c'\n elif x == 4:\n return 'e'\n else:\n return 'N'\n elif qnumber in [8,29]:\n if x == 1:\n return 'a'\n elif x == 2:\n return 'b'\n elif x == 3:\n return 'd'\n elif x == 4:\n return 'e'\n else:\n return 'N'\n elif qnumber in [12]:\n if x == 1:\n return 'b'\n elif x == 2:\n return 'c'\n elif x == 3:\n return 'd'\n elif x == 4:\n return 'e'\n else:\n return 'N'\n else:\n if x == 1:\n return 'a'\n elif x == 2:\n return 'b'\n elif x == 3:\n return 'c'\n elif x == 4:\n return 'd'\n else:\n return 'N'\n x = \"{0}{1}\".format(name.replace('Q','X'),NumberAnswerToLetterAnswer(participant[name]))\n sublist.append(x)\n list_fciresp_mod.append(tuple(sublist)) \n return list_fciresp_mod", "_____no_output_____" ], [ "# The below gives each subject's index in the respToQcode() list of answer choices per subject ID\nidToClusterIndex = {} # Study ID : corresponding P cluster index\nperson_list = []\nfor i in range(0,len(respToQcode())):\n person_list.append(dequote(respToQcode()[i][0]))\nfor counter, person in enumerate(person_list):\n personindex = [i for i, row in enumerate(respToQcode()) if respToQcode()[counter][0] in row] #index of subject in respToQcode\n idToClusterIndex.update({respToQcode()[counter][0]: personindex})", "_____no_output_____" ], [ "\"\"\"\nCompute the proportion of Q community membership represented within each P cluster\n\"\"\"\n\n# Step 1: compute proportion of Q community membership for each Q within a P cluster \n\npersonlist = []\nfor i in range(0,len(respToQcode())):\n person_list.append(dequote(respToQcode()[i][0]))\n\n# Create nested dictionary with: \n# dict structure: allMemberships['P cluster']['Question']['Q membership'] = Proportion\n# e.g. allMemberships['11']['Q2']['1'] = 0.0046\n# key = P cluster name\n# value = dictionary w/ \n# key = Q# \n# value = dictionary w/ \n# key = Q community membership name \n# value = proportion of Q community membership\nallMemberships = {}\naccuracy = {} # P cluster : average accuracy assoicated with P cluster\nfor cluster in pComm.keys(): #loop through P clusters\n clustMembership = {}\n for qcounter, question in enumerate(questionlist): #loop through questions in P cluster\n FCImembership_list = []\n for idcounter, id in enumerate(pComm[cluster]): #loop through subject responses to question\n person = '\"' + str(idKey_rid2pid[id]) + '\"'\n num = str(idKey_rid2pid[id])\n numAsString = '\"' + num + '\"'\n index = idToClusterIndex[numAsString][0]\n #map Q response code to Q community membership\n FCImembership_list.append(questKey[respToQcode()[index][qcounter+1]])\n Qmembership = computeProportion(FCImembership_list)\n clustMembership.update({question: Qmembership})\n #get average accuracy in P cluster to use later\n acc_list = []\n for question in questionlist:\n acc_list.append(clustMembership[question]['10'])\n avg_acc = reduce(lambda x, y: x + y, acc_list) / len(acc_list)\n accuracy.update({cluster: avg_acc})\n allMemberships.update({cluster: clustMembership})\nprint accuracy\n\n# Step 2: compute proportion of Q community membership per cluster (average across Q's within a P cluster)\n\n# Create nested dictionary with:\n# dict structure: clusterdict['P cluster']['Q membership'] = Average proportion\n# e.g. clusterdict['2']['10'] = 0.732\n# key = P cluster name\n# value = dictionary w/ \n# key = Q community membership name \n# value = Average proportion across cluster\nclusterdict = {}\nfor cluster in pComm.keys(): #loop through P clusters\n d = {}\n for mem in allmems: #loop through Q community memberships\n #get Q community membership proportion across questions\n proportion_list = []\n for question in questionlist: #loop through questions in P cluster\n proportion_list.append(allMemberships[cluster][question][mem]) #gives [propQ1,propQ2,...] for each mem in allmems\n #compute average proportion of Q community membership for P cluster\n d.update({mem: reduce(lambda x, y: x + y, proportion_list) / len(proportion_list)})\n clusterdict.update({cluster: d})\n\n \n# Step 3: scale Q community membership proportions by P cluster size for visualization\n\nscaledProportions = {}\nfor cluster in pComm.keys(): #loop through P clusters\n scaledNestedDict = {}\n keys = clusterdict[cluster].keys() #Q community memberships\n values = clusterdict[cluster].values() #average (unscaled) proportions\n scaledValues = []\n #scale average Q community membership proportions by cluster size\n for value in values:\n scaledValues.append(value*(len(pComm[cluster])/sum(clustersizes)))\n for index, key in enumerate(keys):\n scaledNestedDict.update({key: scaledValues[index]})\n scaledProportions.update({cluster: scaledNestedDict})", "{'11': 0.4722222222222222, '10': 0.4222222222222223, '13': 0.1111111111111111, '12': 0.4166666666666667, '1': 0.7777777777777777, '3': 0.5333333333333333, '2': 0.7320261437908495, '5': 0.7777777777777778, '4': 0.6555555555555556, '7': 0.4285714285714286, '6': 0.5714285714285714, '9': 0.2888888888888889, '8': 0.5111111111111111}\n" ], [ "\"\"\"\nCreate Facited radar plots\n\"\"\"\n\n# Set data\ndf = pd.DataFrame({\n'group': ['1','2','3','4','5','6','7','8','9','10','11','12','13'],\n\n# The below are scaled by cluster size. Probably the best way to do it.\n'm1': [scaledProportions['1']['1'],scaledProportions['2']['1'],scaledProportions['3']['1'],scaledProportions['4']['1'],scaledProportions['5']['1'],scaledProportions['6']['1'],scaledProportions['7']['1'],scaledProportions['8']['1'],scaledProportions['9']['1'],scaledProportions['10']['1'],scaledProportions['11']['1'],scaledProportions['12']['1'],scaledProportions['13']['1']],\n'm2': [scaledProportions['1']['2'],scaledProportions['2']['2'],scaledProportions['3']['2'],scaledProportions['4']['2'],scaledProportions['5']['2'],scaledProportions['6']['2'],scaledProportions['7']['2'],scaledProportions['8']['2'],scaledProportions['9']['2'],scaledProportions['10']['2'],scaledProportions['11']['2'],scaledProportions['12']['2'],scaledProportions['13']['2']],\n'm3': [scaledProportions['1']['3'],scaledProportions['2']['3'],scaledProportions['3']['3'],scaledProportions['4']['3'],scaledProportions['5']['3'],scaledProportions['6']['3'],scaledProportions['7']['3'],scaledProportions['8']['3'],scaledProportions['9']['3'],scaledProportions['10']['3'],scaledProportions['11']['3'],scaledProportions['12']['3'],scaledProportions['13']['3']],\n'm4': [scaledProportions['1']['4'],scaledProportions['2']['4'],scaledProportions['3']['4'],scaledProportions['4']['4'],scaledProportions['5']['4'],scaledProportions['6']['4'],scaledProportions['7']['4'],scaledProportions['8']['4'],scaledProportions['9']['4'],scaledProportions['10']['4'],scaledProportions['11']['4'],scaledProportions['12']['4'],scaledProportions['13']['4']],\n'm5': [scaledProportions['1']['5'],scaledProportions['2']['5'],scaledProportions['3']['5'],scaledProportions['4']['5'],scaledProportions['5']['5'],scaledProportions['6']['5'],scaledProportions['7']['5'],scaledProportions['8']['5'],scaledProportions['9']['5'],scaledProportions['10']['5'],scaledProportions['11']['5'],scaledProportions['12']['5'],scaledProportions['13']['5']],\n'm6': [scaledProportions['1']['6'],scaledProportions['2']['6'],scaledProportions['3']['6'],scaledProportions['4']['6'],scaledProportions['5']['6'],scaledProportions['6']['6'],scaledProportions['7']['6'],scaledProportions['8']['6'],scaledProportions['9']['6'],scaledProportions['10']['6'],scaledProportions['11']['6'],scaledProportions['12']['6'],scaledProportions['13']['6']],\n'm7': [scaledProportions['1']['7'],scaledProportions['2']['7'],scaledProportions['3']['7'],scaledProportions['4']['7'],scaledProportions['5']['7'],scaledProportions['6']['7'],scaledProportions['7']['7'],scaledProportions['8']['7'],scaledProportions['9']['7'],scaledProportions['10']['7'],scaledProportions['11']['7'],scaledProportions['12']['7'],scaledProportions['13']['7']],\n'm8': [scaledProportions['1']['8'],scaledProportions['2']['8'],scaledProportions['3']['8'],scaledProportions['4']['8'],scaledProportions['5']['8'],scaledProportions['6']['8'],scaledProportions['7']['8'],scaledProportions['8']['8'],scaledProportions['9']['8'],scaledProportions['10']['8'],scaledProportions['11']['8'],scaledProportions['12']['8'],scaledProportions['13']['8']],\n'm9': [scaledProportions['1']['9'],scaledProportions['2']['9'],scaledProportions['3']['9'],scaledProportions['4']['9'],scaledProportions['5']['9'],scaledProportions['6']['9'],scaledProportions['7']['9'],scaledProportions['8']['9'],scaledProportions['9']['9'],scaledProportions['10']['9'],scaledProportions['11']['9'],scaledProportions['12']['9'],scaledProportions['13']['9']],\n#'m10': [scaledProportions['1']['10'],scaledProportions['2']['10'],scaledProportions['3']['10'],scaledProportions['4']['10'],scaledProportions['5']['10'],scaledProportions['6']['10'],scaledProportions['7']['10'],scaledProportions['8']['10'],scaledProportions['9']['10'],scaledProportions['10']['10'],scaledProportions['11']['10'],scaledProportions['12']['10'],scaledProportions['13']['10']],\n#'mN': [scaledProportions['1']['NA'],scaledProportions['2']['NA'],scaledProportions['3']['NA'],scaledProportions['4']['NA'],scaledProportions['5']['NA'],scaledProportions['6']['NA'],scaledProportions['7']['NA'],scaledProportions['8']['NA'],scaledProportions['9']['NA'],scaledProportions['10']['NA'],scaledProportions['11']['NA'],scaledProportions['12']['NA'],scaledProportions['13']['NA']]\n})\n\n\ndef make_spider( row, title, color):\n\n # number of variable\n categories=list(df)[1:]\n N = len(categories)\n\n # What will be the angle of each axis in the plot? (we divide the plot / number of variable)\n angles = [n / float(N) * 2 * pi for n in range(N)]\n angles += angles[:1]\n\n # Initialise the spider plot\n ax = plt.subplot(4,4,row+1, polar=True, )\n\n # If you want the first axis to be on top:\n ax.set_theta_offset(pi / 2)\n ax.set_theta_direction(-1)\n\n # Draw one axis per variable + add labels\n plt.xticks(angles[:-1], categories, color='grey', size=7)\n \n # Extend margins\n plt.subplots_adjust(left=0.2, hspace=0.4)\n #plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)\n\n # Draw ylabels\n ax.set_rlabel_position(0)\n #values if I'm plotting the scaled values and including cluster 10 (correct answers):\n# plt.yticks([.5,.1,.15], [\".5\",\".1\",\".15\"], color=\"grey\", size=6)\n# plt.ylim(0,0.2)\n #values if I'm plotting the scaled values and not including cluster 10 (correct answers)\n plt.yticks([.01,.02], [\"\",\"\"], color=\"grey\", size=6)\n plt.ylim(0,0.03)\n\n # Ind1\n values=df.loc[row].drop('group').values.flatten().tolist()\n values += values[:1]\n ax.plot(angles, values, color=color, linewidth=2, linestyle='solid')\n ax.fill(angles, values, color=color, alpha=0.4)\n\n # Add a title\n ttl = plt.title(title, size=11, color=color, y=1.052)\n ttl.set_position([.5, 1.12])\n\n# ------- PART 2: Apply to all individuals\n# initialize the figure\nmy_dpi=250\nplt.figure(figsize=(3200/my_dpi, 3200/my_dpi), dpi=my_dpi)\n\n# Create a color palette:\n# good options: \"tab10\", \"Set2\", \"gist_stern\", \"tab20\", \"tab20b\", \"Paired\", \"Dark2\", \"Set1\"\nmy_palette = plt.cm.get_cmap(\"tab10\", len(df.index)) \n\n# Plot the three normative groups\nfor row in range(0, 3):\n make_spider( row=row, title='cluster '+df['group'][row]+' (n='+str(clustersizes[row])+')', color=my_palette(row+1))\n# plt.text(1, 0.037, r'accuracy {0}%'.format(acc), color=\"gray\", size=8.5)\n plt.text(-.815, 0.04651, r'{0}% correct'.format(int(accuracy[df['group'][row]]*100)), color=\"gray\", size=8)\n\nplt.show()\nplt.clf()", "_____no_output_____" ], [ "\"\"\"\nCreate singular radar plot\nResults are basically unreadable...\n\"\"\"\n\n# Set data\ndf = pd.DataFrame({\n'group': ['1 (n=27)','2 (n=18)','3 (n=10)','4 (n=10)','5 (n=8)',\\\n '6 (n=7)','7 (n=7)','8 (n=5)','9 (n=5)','10 (n=5)',\\\n '11 (n=4)','12 (n=4)','13 (n=2)'],\n\n# The below are scaled by cluster size\n'm1': [scaledProportions['1']['1'],scaledProportions['2']['1'],scaledProportions['3']['1'],scaledProportions['4']['1'],scaledProportions['5']['1'],scaledProportions['6']['1'],scaledProportions['7']['1'],scaledProportions['8']['1'],scaledProportions['9']['1'],scaledProportions['10']['1'],scaledProportions['11']['1'],scaledProportions['12']['1'],scaledProportions['13']['1']],\n'm2': [scaledProportions['1']['2'],scaledProportions['2']['2'],scaledProportions['3']['2'],scaledProportions['4']['2'],scaledProportions['5']['2'],scaledProportions['6']['2'],scaledProportions['7']['2'],scaledProportions['8']['2'],scaledProportions['9']['2'],scaledProportions['10']['2'],scaledProportions['11']['2'],scaledProportions['12']['2'],scaledProportions['13']['2']],\n'm3': [scaledProportions['1']['3'],scaledProportions['2']['3'],scaledProportions['3']['3'],scaledProportions['4']['3'],scaledProportions['5']['3'],scaledProportions['6']['3'],scaledProportions['7']['3'],scaledProportions['8']['3'],scaledProportions['9']['3'],scaledProportions['10']['3'],scaledProportions['11']['3'],scaledProportions['12']['3'],scaledProportions['13']['3']],\n'm4': [scaledProportions['1']['4'],scaledProportions['2']['4'],scaledProportions['3']['4'],scaledProportions['4']['4'],scaledProportions['5']['4'],scaledProportions['6']['4'],scaledProportions['7']['4'],scaledProportions['8']['4'],scaledProportions['9']['4'],scaledProportions['10']['4'],scaledProportions['11']['4'],scaledProportions['12']['4'],scaledProportions['13']['4']],\n'm5': [scaledProportions['1']['5'],scaledProportions['2']['5'],scaledProportions['3']['5'],scaledProportions['4']['5'],scaledProportions['5']['5'],scaledProportions['6']['5'],scaledProportions['7']['5'],scaledProportions['8']['5'],scaledProportions['9']['5'],scaledProportions['10']['5'],scaledProportions['11']['5'],scaledProportions['12']['5'],scaledProportions['13']['5']],\n'm6': [scaledProportions['1']['6'],scaledProportions['2']['6'],scaledProportions['3']['6'],scaledProportions['4']['6'],scaledProportions['5']['6'],scaledProportions['6']['6'],scaledProportions['7']['6'],scaledProportions['8']['6'],scaledProportions['9']['6'],scaledProportions['10']['6'],scaledProportions['11']['6'],scaledProportions['12']['6'],scaledProportions['13']['6']],\n'm7': [scaledProportions['1']['7'],scaledProportions['2']['7'],scaledProportions['3']['7'],scaledProportions['4']['7'],scaledProportions['5']['7'],scaledProportions['6']['7'],scaledProportions['7']['7'],scaledProportions['8']['7'],scaledProportions['9']['7'],scaledProportions['10']['7'],scaledProportions['11']['7'],scaledProportions['12']['7'],scaledProportions['13']['7']],\n'm8': [scaledProportions['1']['8'],scaledProportions['2']['8'],scaledProportions['3']['8'],scaledProportions['4']['8'],scaledProportions['5']['8'],scaledProportions['6']['8'],scaledProportions['7']['8'],scaledProportions['8']['8'],scaledProportions['9']['8'],scaledProportions['10']['8'],scaledProportions['11']['8'],scaledProportions['12']['8'],scaledProportions['13']['8']],\n'm9': [scaledProportions['1']['9'],scaledProportions['2']['9'],scaledProportions['3']['9'],scaledProportions['4']['9'],scaledProportions['5']['9'],scaledProportions['6']['9'],scaledProportions['7']['9'],scaledProportions['8']['9'],scaledProportions['9']['9'],scaledProportions['10']['9'],scaledProportions['11']['9'],scaledProportions['12']['9'],scaledProportions['13']['9']],\n#'m10': [scaledProportions['1']['10'],scaledProportions['2']['10'],scaledProportions['3']['10'],scaledProportions['4']['10'],scaledProportions['5']['10'],scaledProportions['6']['10'],scaledProportions['7']['10'],scaledProportions['8']['10'],scaledProportions['9']['10'],scaledProportions['10']['10'],scaledProportions['11']['10'],scaledProportions['12']['10'],scaledProportions['13']['10']],\n#'mN': [scaledProportions['1']['NA'],scaledProportions['2']['NA'],scaledProportions['3']['NA'],scaledProportions['4']['NA'],scaledProportions['5']['NA'],scaledProportions['6']['NA'],scaledProportions['7']['NA'],scaledProportions['8']['NA'],scaledProportions['9']['NA'],scaledProportions['10']['NA'],scaledProportions['11']['NA'],scaledProportions['12']['NA'],scaledProportions['13']['NA']]\n\n})\n\n# ------- PART 1: Create background\n \n# number of variable\ncategories=list(df)[1:]\nN = len(categories)\n \n# What will be the angle of each axis in the plot? (we divide the plot / number of variable)\nangles = [n / float(N) * 2 * pi for n in range(N)]\nangles += angles[:1]\n \n# Initialise the spider plot\nax = plt.subplot(111, polar=True)\n \n# If you want the first axis to be on top:\nax.set_theta_offset(pi / 2)\nax.set_theta_direction(-1)\n \n# Draw one axe per variable + add labels labels yet\nplt.xticks(angles[:-1], categories)\n \n# Draw ylabels\nax.set_rlabel_position(0)\n#good values if I'm including cluster 10 (all correct answers)\n#plt.yticks([.5,.1,.15], [\".5\",\".1\",\".15\"], color=\"grey\", size=6)\n#plt.ylim(0,0.2)\nplt.yticks([.01,.02], [\".01\",\".02\"], color=\"grey\", size=6)\nplt.ylim(0,0.03)\n\n\n# ------- PART 2: Add plots\n# Plot each individual = each line of the data\n\n# Ind1: Normaitve Group A\nvalues=df.loc[0].drop('group').values.flatten().tolist()\nvalues += values[:1]\nax.plot(angles, values, linewidth=1, linestyle='solid', label=\"cluster 1\")\nax.fill(angles, values, 'b', alpha=0.1)\n \n# Ind2: Normaitve Group B\nvalues=df.loc[1].drop('group').values.flatten().tolist()\nvalues += values[:1]\nax.plot(angles, values, linewidth=1, linestyle='solid', label=\"cluster 2\")\nax.fill(angles, values, 'r', alpha=0.1)\n\n# Ind3: Normative Group C\nvalues=df.loc[2].drop('group').values.flatten().tolist()\nvalues += values[:1]\nax.plot(angles, values, linewidth=1, linestyle='solid', label=\"cluster 3\")\nax.fill(angles, values, 'g', alpha=0.1)\n\n# Add legend\nplt.legend(loc='upper right', bbox_to_anchor=(1.65, .97))\n\nplt.show()\nplt.clf()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa48e4a049cc67686aae8e0c2368f97b39909d3
57,693
ipynb
Jupyter Notebook
IPL-DataScrape.ipynb
ABMalhotra/IPL-Bidding
048132139aa5d83deaef6bd87348ae5b6e3fcb32
[ "MIT" ]
null
null
null
IPL-DataScrape.ipynb
ABMalhotra/IPL-Bidding
048132139aa5d83deaef6bd87348ae5b6e3fcb32
[ "MIT" ]
null
null
null
IPL-DataScrape.ipynb
ABMalhotra/IPL-Bidding
048132139aa5d83deaef6bd87348ae5b6e3fcb32
[ "MIT" ]
null
null
null
38.38523
133
0.323419
[ [ [ "import bs4 as bs\nimport urllib\nimport pandas as pd", "_____no_output_____" ], [ "years = [2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019]\nresults={}", "_____no_output_____" ], [ "for year in years:\n source = urllib.request.urlopen('https://www.iplt20.com/stats/'+str(year)).read()\n soup = bs.BeautifulSoup(source,'html')\n \n # title of the page\n print(year,':',soup.title.string)\n \n # title of the page\n points_table = soup.find_all(class_=\"standings-table__optional\")\n data_dump=[]\n for entry in points_table:\n data_dump.append(entry.text)\n data_dump=data_dump[6:]\n \n # title of the page\n results_data=[]\n idx=0\n while idx<len(data_dump):\n data = [data_dump[idx],data_dump[idx+1],data_dump[idx+2],data_dump[idx+3]]\n data = list(map(int,data))\n results_data.append(data)\n idx+=8\n \n year_results={}\n teams = soup.find_all(class_=\"standings-table__team-name js-team\")\n for standing,team in enumerate(teams):\n year_results[team.text]=results_data[standing],standing+1\n \n results[year]=year_results", "2008 : IPLT20.com - Indian Premier League Official Website - Stats\n2009 : IPLT20.com - Indian Premier League Official Website - Stats\n2010 : IPLT20.com - Indian Premier League Official Website - Stats\n2011 : IPLT20.com - Indian Premier League Official Website - Stats\n2012 : IPLT20.com - Indian Premier League Official Website - Stats\n2013 : IPLT20.com - Indian Premier League Official Website - Stats\n2014 : IPLT20.com - Indian Premier League Official Website - Stats\n2015 : IPLT20.com - Indian Premier League Official Website - Stats\n2016 : IPLT20.com - Indian Premier League Official Website - Stats\n2017 : IPLT20.com - Indian Premier League Official Website - Stats\n2018 : IPLT20.com - Indian Premier League Official Website - Stats\n2019 : IPLT20.com - Indian Premier League Official Website - Stats\n" ], [ "results = pd.DataFrame(results)", "_____no_output_____" ], [ "df=pd.read_excel('AuctionData.xlsx') \ndf.Team=df.Team.astype(str).str.strip()\ndf = df.set_index(['year', 'Team'])\ndf", "_____no_output_____" ], [ "exchange_rate=61 #1USD=61INR, ~2013\ninr_years=list(range(2014,2021))", "_____no_output_____" ], [ "df.Balance=df.Balance.str.replace(',','').str.replace('₹','').astype(float) #Remove rupee symbol\ndf.Balance[2013]=df.Balance[2013]*exchange_rate\ndf.Overseas=df.Overseas.astype(int)\ndf.TotalPlayers=df.TotalPlayers.astype(int)\nfinances = df", "/Users/abm/.3-VirtualEnvironments/python3_ML/lib/python3.7/site-packages/ipykernel_launcher.py:2: SettingWithCopyWarning: \nA value is trying to be set on a copy of a slice from a DataFrame\n\nSee the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n \n" ], [ "finances", "_____no_output_____" ], [ "#Export\nfinances.to_pickle('IPL_finances.pkl')\nresults.to_pickle('IPL_results.pkl')", "_____no_output_____" ], [ "results", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa48f766832699901737619a99652eade8bf7b2
1,627
ipynb
Jupyter Notebook
.ipynb_checkpoints/Untitled-checkpoint.ipynb
berelieta/ETL-project
b4b9623130abd8e6ed96c08e46fd3bbdd0716936
[ "CC-BY-4.0" ]
null
null
null
.ipynb_checkpoints/Untitled-checkpoint.ipynb
berelieta/ETL-project
b4b9623130abd8e6ed96c08e46fd3bbdd0716936
[ "CC-BY-4.0" ]
null
null
null
.ipynb_checkpoints/Untitled-checkpoint.ipynb
berelieta/ETL-project
b4b9623130abd8e6ed96c08e46fd3bbdd0716936
[ "CC-BY-4.0" ]
null
null
null
18.488636
54
0.525507
[ [ [ "#Dependencies\nimport pymongo", "_____no_output_____" ], [ "csv_file = \"../Resources/customer_data.csv\"\ncustomer_data_df = pd.read_csv(csv_file)", "_____no_output_____" ], [ "# Initialize PyMongo to work with MongoDBs\nconn = 'mongodb://localhost:12312'\nclient = pymongo.MongoClient(conn)", "_____no_output_____" ], [ "use lego_db", "_____no_output_____" ], [ "#Create collections\ndb.createCollection(\"index\")\n\n", "_____no_output_____" ], [ "# Define database and collection\ndb = client.lego_db\ncollection = db.items", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code" ] ]
4aa49105806a009fca095f52f4ec3ea47b08b8da
148,595
ipynb
Jupyter Notebook
LOC.ipynb
dieterch/dReliaCalc3
fd9a29cb62dad78893934e6e6773162bbb310a5c
[ "MIT" ]
null
null
null
LOC.ipynb
dieterch/dReliaCalc3
fd9a29cb62dad78893934e6e6773162bbb310a5c
[ "MIT" ]
null
null
null
LOC.ipynb
dieterch/dReliaCalc3
fd9a29cb62dad78893934e6e6773162bbb310a5c
[ "MIT" ]
null
null
null
398.378016
80,804
0.938242
[ [ [ "## DreliaCalc LOC Report", "_____no_output_____" ] ], [ [ "import arrow\ndateformat='DD.MM.YYYY - HH:mm'\nprint(arrow.now('Europe/Vienna').format(dateformat))", "03.02.2021 - 13:57\n" ], [ "%cd /opt/notebooks/dmyplant2\n!git pull --rebase", "/opt/notebooks/dmyplant2\nAlready up to date.\nCurrent branch master is up to date.\n" ], [ "%cd ../dReliaCalc\nimport dmyplant2\nimport pandas as pd\nimport numpy as np\nfrom pprint import pprint as pp", "/opt/notebooks/dReliaCalc\n" ], [ "dval = pd.read_csv(\"input.csv\",sep=';', encoding='utf-8')\ndval['val start'] = pd.to_datetime(dval['val start'], format='%d.%m.%Y')\nfailures = pd.read_csv(\"failures.csv\",sep=';', encoding='utf-8')\nfailures['date'] = pd.to_datetime(failures['date'], format='%d.%m.%Y')", "_____no_output_____" ], [ "from dmyplant2 import cred\nmp = dmyplant2.MyPlant(7200)\nvl = dmyplant2.Validation(mp,dval, cui_log=False) ", "_____no_output_____" ], [ "import ipywidgets as widgets\nfrom IPython.display import display\nle = vl.engines[0]\n\nw = widgets.Dropdown(\n options=['Pick Engine'] + [e.__str__() for e in vl.engines],\n value='Pick Engine',\n description='Engine:',\n)\ndef on_change(change):\n global le\n if change['type'] == 'change' and change['name'] == 'value':\n le = vl.eng_serialNumber(change['new'][:7])\n print(le)\nw.observe(on_change)\ndisplay(w)\n", "_____no_output_____" ], [ "from pprint import pprint as pp\nid = le.id\nprint(le)", "1320114 M03 BMW LANDSHUT 4.10 \n" ], [ "# fetch Lube Oil Consuption data\nlocdef = {227: 'OilConsumption', \n 237: 'DeltaOpH',\n 228: 'OilVolume', \n 225: 'ActiveEnergy', \n 226: 'AvgPower'}\n\nlimit = 2500\n\n# call myplant\ndfr = le.batch_hist_dataItems(itemIds=locdef, p_limit=2500 ,timeCycle=30)\n\n# Set Type of time column to DateTime\ndf = dfr\ndf['datetime'] = pd.to_datetime(df['time'] * 1000000)\n\n# Filter to Validation Period\ndf = df[df.datetime > pd.to_datetime(le._d['val start'])]\nprint(dfr['time'].count(), df['time'].count())\n\n\n# Filter Oil Consumption outliers by < 3 * stdev\ndf = df[np.abs(df.OilConsumption-df.OilConsumption.mean())\n <= (3*df.OilConsumption.std())]\n\n# Calc Rolling Mean values\ndf['LOC'] = df.OilConsumption.rolling(50).mean()\ndf['Pow'] = df.AvgPower.rolling(50).mean()\n", "2325 675\n" ], [ "dfl=df[['datetime','OilConsumption','LOC','AvgPower','Pow']]\ndfl=df[['datetime','LOC','Pow']]\nax = dfl.plot(subplots=False, x='datetime', secondary_y=['AvgPower','Pow'], ylim=(0,0.3), figsize=(16,10), title=le, grid=True)\nax.set_ylim(1000,5000)", "_____no_output_____" ], [ "#parameters\ntdef = {161: 'CountOph', 102: 'PowerAct'}\n#tlimit = 2500\n\n#tfrom = arrow.get(le.valstart_ts)\n#tfrom = arrow.get('2020-02-07')\ntfrom = arrow.now('Europe/Vienna').shift(months=-2) \n#tfrom = arrow.now('Europe/Vienna').shift(days=-2)\n\n#tto = arrow.now('Europe/Vienna')\n#tto = arrow.get(2021,1,23,16,0)\ntto=arrow.now('Europe/Vienna')\n\nttimecycle='1800'\n#tassetType='J-Engine'\n#tincludeMinMax='false'\n#tforceDownSampling='false'", "_____no_output_____" ], [ "df = le.batch_hist_dataItems(itemIds=tdef, p_from=tfrom, p_to=tto,timeCycle=ttimecycle)\n# Set Type of time column to DateTime\ndf['datetime'] = pd.to_datetime(df['time'] * 1000000)\ndf['CountOph'] = df.CountOph - le._d['oph@start']\n\n# Just include the data to plot\ndfp = df[['datetime','CountOph','PowerAct']]\nprint(dfp.tail(3))\ndfp['datetime'].count()", " datetime CountOph PowerAct\n2973 2021-02-03 11:30:00 3355.0 0.0\n2974 2021-02-03 12:00:00 3355.0 0.0\n2975 2021-02-03 12:30:00 3355.0 0.0\n" ], [ "dfp.plot(subplots=False, x='datetime', color=['red','blue'], secondary_y = ['CountOph'],ylim=(0,5000), figsize=(16,10), title=le)\n", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa49120fd716abcc990f1a6ecb546881a84f311
126,834
ipynb
Jupyter Notebook
VGG_04_Drop_128.ipynb
toxtli/VGG-CIFAR10-Keras
64e27a7d18f01db358272bd03ab75807651e8f1c
[ "Apache-2.0" ]
4
2019-01-29T17:08:19.000Z
2019-09-30T02:57:15.000Z
VGG_04_Drop_128.ipynb
toxtli/VGG-CIFAR10-Keras
64e27a7d18f01db358272bd03ab75807651e8f1c
[ "Apache-2.0" ]
null
null
null
VGG_04_Drop_128.ipynb
toxtli/VGG-CIFAR10-Keras
64e27a7d18f01db358272bd03ab75807651e8f1c
[ "Apache-2.0" ]
2
2019-10-18T18:48:38.000Z
2020-10-18T11:15:26.000Z
144.622577
36,376
0.754687
[ [ [ "from google.colab import drive\ndrive.mount('gdrive')\n%cd /content/gdrive/My\\ Drive/colab", "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3Aietf%3Awg%3Aoauth%3A2.0%3Aoob&scope=email%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdocs.test%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive.photos.readonly%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpeopleapi.readonly&response_type=code\n\nEnter your authorization code:\n··········\nMounted at gdrive\n/content/gdrive/My Drive/colab\n" ], [ "from __future__ import print_function\nimport json\nimport keras\nimport pickle\nimport os.path\nfrom keras.datasets import cifar10\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Dropout, Activation, Flatten\nfrom keras.layers import Conv2D, MaxPooling2D, BatchNormalization\nfrom keras.callbacks import ModelCheckpoint\nfrom keras.callbacks import LambdaCallback\nfrom keras import optimizers\nfrom keras import regularizers\nfrom keras.utils import plot_model\nimport numpy as np\nimport matplotlib.pyplot as plt\n\ndef build_model(x_shape, weight_decay, num_classes):\n # Build the network of vgg for 10 classes with massive dropout and weight decay as described in the paper.\n model = Sequential()\n weight_decay = weight_decay\n\n model.add(Conv2D(64, (3, 3), padding='same',\n input_shape=x_shape, kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(Activation('relu'))\n model.add(BatchNormalization())\n model.add(Dropout(0.3))\n\n model.add(Conv2D(64, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(Activation('relu'))\n model.add(BatchNormalization())\n\n model.add(Flatten())\n model.add(Dense(200, kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(Activation('relu'))\n model.add(BatchNormalization())\n model.add(Dropout(0.5))\n \n model.add(Dense(100, kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(Activation('relu'))\n model.add(BatchNormalization())\n model.add(Dropout(0.5))\n \n model.add(Dense(num_classes))\n model.add(Activation('softmax'))\n return model\n\n\ndef normalize(X_train, X_test):\n # this function normalize inputs for zero mean and unit variance\n # it is used when training a model.\n # Input: training set and test set\n # Output: normalized training set and test set according to the trianing set statistics.\n mean = np.mean(X_train, axis=(0, 1, 2, 3))\n std = np.std(X_train, axis=(0, 1, 2, 3))\n X_train = (X_train - mean) / (std + 1e-7)\n X_test = (X_test - mean) / (std + 1e-7)\n return X_train, X_test\n\n\ndef normalize_production(x):\n # this function is used to normalize instances in production according to saved training set statistics\n # Input: X - a training set\n # Output X - a normalized training set according to normalization constants.\n\n # these values produced during first training and are general for the standard cifar10 training set normalization\n mean = 120.707\n std = 64.15\n return (x - mean)/(std+1e-7)\n\n\ndef predict(x, normalize=True, batch_size=50):\n if normalize:\n x = normalize_production(x)\n return model.predict(x, batch_size)\n\ndef updateEpoch(epoch, logs):\n to_save = num_epoch + epoch + 1\n report_data['acc'].append(logs['acc'])\n report_data['loss'].append(logs['loss'])\n report_data['val_acc'].append(logs['val_acc'])\n report_data['val_loss'].append(logs['val_loss'])\n with open(epoch_file, \"w\") as file: \n file.write(str(to_save))\n with open(data_file, \"wb\") as file:\n pickle.dump(report_data, file)\n with open(all_file, \"a+\") as file:\n all_data = [to_save, report_data['acc'], report_data['val_acc'],\n report_data['loss'], report_data['val_loss']]\n file.write(json.dumps(all_data))\n print(epoch, logs)\n\ndef train(model):\n # training parameters\n batch_size = 128\n maxepoches = 100\n learning_rate = 0.1\n lr_decay = 1e-6\n lr_drop = 20\n # The data, shuffled and split between train and test sets:\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train, x_test = normalize(x_train, x_test)\n\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n\n def lr_scheduler(epoch):\n return learning_rate * (0.5 ** (epoch // lr_drop))\n\n # data augmentation\n datagen = ImageDataGenerator(\n featurewise_center=False, # set input mean to 0 over the dataset\n samplewise_center=False, # set each sample mean to 0\n featurewise_std_normalization=False, # divide inputs by std of the dataset\n samplewise_std_normalization=False, # divide each input by its std\n zca_whitening=False, # apply ZCA whitening\n rotation_range=15, # randomly rotate images in the range (degrees, 0 to 180)\n width_shift_range=0.1, # randomly shift images horizontally (fraction of total width)\n height_shift_range=0.1, # randomly shift images vertically (fraction of total height)\n horizontal_flip=True, # randomly flip images\n vertical_flip=False) # randomly flip images\n # (std, mean, and principal components if ZCA whitening is applied).\n datagen.fit(x_train)\n\n # optimization details\n sgd = optimizers.SGD(lr=learning_rate, decay=lr_decay, momentum=0.9)\n model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])\n plot_model(model, to_file='model.png')\n \n reduce_lr = keras.callbacks.LearningRateScheduler(lr_scheduler)\n checkpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max')\n lambdaCall = LambdaCallback(on_epoch_end=updateEpoch)\n callbacks_list = [reduce_lr,checkpoint,lambdaCall]\n\n # training process in a for loop with learning rate drop every 20 epoches.\n history = model.fit_generator(\n datagen.flow(x_train, y_train, batch_size=batch_size),\n steps_per_epoch=x_train.shape[0] // batch_size,\n epochs=maxepoches,\n validation_data=(x_test, y_test),\n callbacks=callbacks_list,\n verbose=1)\n\n model.save_weights('cifar10vgg_3.h5')\n \n # summarize history for accuracy\n plt.plot(report_data['acc'])\n plt.plot(report_data['val_acc'])\n plt.title('model accuracy')\n plt.ylabel('accuracy')\n plt.xlabel('epoch')\n plt.legend(['train', 'test'], loc='upper left')\n plt.show()\n\n # summarize history for loss\n plt.plot(report_data['loss'])\n plt.plot(report_data['val_loss'])\n plt.title('model loss')\n plt.ylabel('loss')\n plt.xlabel('epoch')\n plt.legend(['train', 'test'], loc='upper left')\n plt.show()\n \n return history\n\nnum_classes = 10\nweight_decay = 0.0005\nx_shape = [32, 32, 3]\ntrain_bool = True\nepoch_file=\"hw1_3_epoch_num.txt\"\ndata_file=\"hw1_3_data.txt\"\nfilepath=\"hw1_3_weights.best.hdf5\"\nall_file=\"hw1_3_all.txt\"\n\nmodel = build_model(x_shape, weight_decay, num_classes)\n\nnum_epoch = 0\nif not os.path.isfile(epoch_file):\n with open(epoch_file, \"w+\") as file: \n file.write(str(num_epoch))\nelse:\n with open(epoch_file, \"r\") as file: \n num_epoch = int(file.read())\n\nif os.path.isfile(filepath):\n model.load_weights(filepath)\n\nif os.path.isfile(data_file):\n with open(data_file, \"rb\") as file:\n report_data = pickle.load(file)\n\n# print the model summary\nmodel.summary()\n\nreport_data = {\n \"acc\":[],\n \"val_acc\":[],\n \"loss\":[],\n \"val_loss\":[]\n}\n\nif train_bool:\n history = train(model)\nelse:\n model.load_weights('cifar10vgg_3.h5')\n\n(x_train, y_train), (x_test, y_test) = cifar10.load_data()\nx_train = x_train.astype('float32')\nx_test = x_test.astype('float32')\n\ny_train = keras.utils.to_categorical(y_train, 10)\ny_test = keras.utils.to_categorical(y_test, 10)\n\npredicted_x = model.predict(x_test)\nresiduals = np.argmax(predicted_x, 1) != np.argmax(y_test, 1)\n\nloss = sum(residuals)/len(residuals)\nprint(\"the validation 0/1 loss is: \", loss)", "Using TensorFlow backend.\n" ] ] ]
[ "code" ]
[ [ "code", "code" ] ]
4aa4a81d0abfa4b1a4d144d74612114a90a39ed2
4,260
ipynb
Jupyter Notebook
image_augmentaion/lowres/transform_lowres_images_valid_EE.ipynb
UVA-DSI-2019-Capstones/CHRC
3b89fb6039e435f383754f933537201402391a07
[ "MIT" ]
null
null
null
image_augmentaion/lowres/transform_lowres_images_valid_EE.ipynb
UVA-DSI-2019-Capstones/CHRC
3b89fb6039e435f383754f933537201402391a07
[ "MIT" ]
null
null
null
image_augmentaion/lowres/transform_lowres_images_valid_EE.ipynb
UVA-DSI-2019-Capstones/CHRC
3b89fb6039e435f383754f933537201402391a07
[ "MIT" ]
1
2019-09-07T14:01:14.000Z
2019-09-07T14:01:14.000Z
26.625
133
0.540376
[ [ [ "import staintools\nimport csv\nimport os\nimport glob\nimport re\nfrom pandas import DataFrame, Series\nfrom PIL import Image\nimport timeit\nimport time\nimport cv2\nfrom matplotlib import pyplot as plt\nimport numpy as np", "_____no_output_____" ], [ "train_paths = [\"/scratch/kk4ze/data_lowres_2200x2200/valid/EE/\"]", "_____no_output_____" ], [ "# get images\nimages = {}\nimages_by_folder = {}\nfor train_path in train_paths:\n images_by_folder[str(train_path)] = []\n files = glob.glob(os.path.join(train_path, '*.jpg'))\n for fl in files:\n flbase = os.path.basename(fl)\n flbase_noext = os.path.splitext(flbase)[0]\n images[flbase_noext]=fl\n images_by_folder[str(train_path)].append(flbase_noext)", "_____no_output_____" ], [ "# initialize stain and brightness normalizer\nstain_normalizer = staintools.StainNormalizer(method='vahadane')\nstandardizer = staintools.BrightnessStandardizer()", "_____no_output_____" ], [ "# choose target image\ntarget_image = staintools.read_image(\"/scratch/kk4ze/data_lowres_2200x2200/train/Celiac/C03-05_03_5901_4803_horiz__0.jpg\")\nstandard_target_image = standardizer.transform(target_image)\nstain_normalizer.fit(standard_target_image)", "_____no_output_____" ], [ "# get destination path\npath_change_map = {}\n\nfor key in list(images_by_folder.keys()):\n temp = key.replace('data_lowres_2200x2200', 'data_lowres_2200x2200_augmented')\n path_change_map[key] = temp", "_____no_output_____" ], [ "for key in images_by_folder.keys():\n for value in list(images_by_folder[key]):\n# print(key)\n# print(value)\n# print (str(count) + ' ' + str(value))\n source_img_path = str(key) + str(value) + '.jpg'\n dest_img_path = str(path_change_map[key]) + str(value) + '.jpg'\n# print(source_img_path)\n img = staintools.read_image(source_img_path)\n if (np.mean(img) > 240) or (np.mean(img) < 10):\n continue\n # standardize brightness\n img_standard = standardizer.transform(img)\n # transform the images\n img_normalized = stain_normalizer.transform(img_standard)\n # write image to path\n\n# plt.imshow(img)\n# plt.title('my picture')\n# plt.show()\n# plt.imshow(img_normalized)\n# plt.title('my picture')\n# plt.show()\n\n cv2.imwrite(os.path.normpath(dest_img_path), img_normalized)", "_____no_output_____" ], [ "path_change_map", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa4baeffb156663f2d3c00672007373fb3de41b
48,179
ipynb
Jupyter Notebook
training/2021_Fully3D/Week2/03_PDHG.ipynb
lauramurgatroyd/CIL-Demos
d36027983bdd407af4c4c87242a4789b1f043d30
[ "Apache-2.0" ]
6
2019-11-13T05:20:46.000Z
2021-03-18T14:28:59.000Z
training/2021_Fully3D/Week2/03_PDHG.ipynb
lauramurgatroyd/CIL-Demos
d36027983bdd407af4c4c87242a4789b1f043d30
[ "Apache-2.0" ]
33
2019-06-25T20:44:51.000Z
2021-05-07T12:25:32.000Z
training/2021_Fully3D/Week2/03_PDHG.ipynb
lauramurgatroyd/CIL-Demos
d36027983bdd407af4c4c87242a4789b1f043d30
[ "Apache-2.0" ]
3
2021-06-18T10:40:57.000Z
2021-11-09T10:32:22.000Z
36.721799
627
0.570581
[ [ [ "# -*- coding: utf-8 -*-\n# This work is part of the Core Imaging Library (CIL) developed by CCPi \n# (Collaborative Computational Project in Tomographic Imaging), with \n# substantial contributions by UKRI-STFC and University of Manchester.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Copyright 2019 UKRI-STFC, The University of Manchester\n# Authored by: Evangelos Papoutsellis (UKRI-STFC)", "_____no_output_____" ] ], [ [ "<h1><center>Primal Dual Hybrid Gradient Algorithm </center></h1>\n\nIn this demo, we learn how to use the **Primal Dual Hybrid Algorithm (PDHG)** introduced by [Chambolle & Pock](https://hal.archives-ouvertes.fr/hal-00490826/document) for Tomography Reconstruction. We will solve the following minimisation problem under three different regularisation terms, i.e., \n\n* $\\|\\cdot\\|_{1}$ or\n* Tikhonov regularisation or\n* with $L=\\nabla$ and Total variation:\n\n\n<a id='all_reg'></a>\n\n$$\\begin{equation}\nu^{*} =\\underset{u}{\\operatorname{argmin}} \\frac{1}{2} \\| \\mathcal{A} u - g\\|^{2} +\n\\underbrace{\n\\begin{cases}\n\\alpha\\,\\|u\\|_{1}, & \\\\[10pt]\n\\alpha\\,\\|\\nabla u\\|_{2}^{2}, & \\\\[10pt]\n\\alpha\\,\\mathrm{TV}(u) + \\mathbb{I}_{\\{u\\geq 0\\}}(u).\n\\end{cases}}_{Regularisers}\n\\tag{1}\n\\end{equation}$$\n\nwhere,\n\n1. $g$ is the Acqusisition data obtained from the detector.\n\n1. $\\mathcal{A}$ is the projection operator ( _Radon transform_ ) that maps from an image-space to an acquisition space, i.e., $\\mathcal{A} : \\mathbb{X} \\rightarrow \\mathbb{Y}, $ where $\\mathbb{X}$ is an __ImageGeometry__ and $\\mathbb{Y}$ is an __AcquisitionGeometry__.\n\n1. $\\alpha$: regularising parameter that measures the trade-off between the fidelity and the regulariser terms.\n\n1. The total variation (isotropic) is defined as $$\\mathrm{TV}(u) = \\|\\nabla u \\|_{2,1} = \\sum \\sqrt{ (\\partial_{y}u)^{2} + (\\partial_{x}u)^{2} }$$\n\n1. $\\mathbb{I}_{\\{u\\geq 0\\}}(u) : = \n\\begin{cases}\n0, & \\mbox{ if } u\\geq 0\\\\\n\\infty , & \\mbox{ otherwise}\n\\,\n\\end{cases}\n$, $\\quad$ a non-negativity constraint for the minimiser $u$.", "_____no_output_____" ], [ "<h2><center><u> Learning objectives </u></center></h2> \n\n- Load the data using the CIL reader: `TXRMDataReader`.\n- Preprocess the data using the CIL processors: `Binner`, `TransmissionAbsorptionConverter`.\n- Run FBP and SIRT reconstructions.\n- Setup PDHG for 3 different regularisers: $L^{1}$, Tikhonov and Total variation.\n\n<!---\n1. Brief intro for non-smooth minimisation problems using PDHG algorithm.\n1. Setup and run PDHG with (__non-smooth__) $L^{1}$ norm regulariser. __(No BlockFramework)__\n1. Use __BlockFunction__ and __Block Framework__ to setup PDHG for Tikhonov and TV reconstructions.\n1. Run Total variation reconstruction with different regularising parameters and compared with FBP and SIRT reconstructions.\n \n\nAt the end of this demo, we will be able to reproduce all the reconstructions presented in the figure below. One can observe that the __Tikhonov regularisation__ with $L = \\nabla$ was able to remove the noise but could not preserve the edges. However, this can be achieved with the the total variation reconstruction.\n\n<img src=\"CIL-Demos/Notebooks/images/recon_all_tomo.jpeg\" width=\"1500\"/>\n--->\n\n<!-- <h2><center><u> Prerequisites </u></center></h2>\n\n- AcquisitionData, AcquisitionGeometry, AstraProjectorSimple.\n- BlockOperator, Gradient.\n- FBP, SIRT, CGLS, Tikhonov. -->\n\nWe first import all the necessary libraries for this notebook.\n\n<!---\nIn order to use the PDHG algorithm for the problem above, we need to express our minimisation problem into the following form:\n\n<a id='PDHG_form'></a>\n$$\\min_{u} \\mathcal{F}(K u) + \\mathcal{G}(u)$$\n\nwhere we assume that:\n\n1. $\\mathcal{F}$, $\\mathcal{G}$ are __convex__ functionals\n \n - $\\mathcal{F}: Y \\rightarrow \\mathbb{R}$ \n \n - $\\mathcal{G}: X \\rightarrow \\mathbb{R}$\n \n \n2. $K$ is a continuous linear operator acting from a space X to another space Y :\n\n$$K : X \\rightarrow Y \\quad $$ \n\nwith operator norm defined as $$\\| K \\| = \\max\\{ \\|K x\\|_{Y} : \\|x\\|_{X}\\leq 1 \\}.$$ \n\n**Note**: The Gradient operator has $\\|\\nabla\\| = \\sqrt{8} $ and for the projection operator we use the [Power Method](https://en.wikipedia.org/wiki/Power_iteration) to approximate the greatest eigenvalue of $K$.\n\n--->", "_____no_output_____" ] ], [ [ "# Import libraries\n\nfrom cil.framework import BlockDataContainer\n\nfrom cil.optimisation.functions import L2NormSquared, L1Norm, BlockFunction, MixedL21Norm, IndicatorBox, TotalVariation\nfrom cil.optimisation.operators import GradientOperator, BlockOperator\nfrom cil.optimisation.algorithms import PDHG, SIRT\n\nfrom cil.plugins.astra.operators import ProjectionOperator\nfrom cil.plugins.astra.processors import FBP\n\nfrom cil.plugins.ccpi_regularisation.functions import FGP_TV\n\nfrom cil.utilities.display import show2D, show_geometry\nfrom cil.utilities.jupyter import islicer\n\nfrom cil.io import TXRMDataReader\n\nfrom cil.processors import Binner, TransmissionAbsorptionConverter, Slicer\n\nimport matplotlib.pyplot as plt\n\nimport numpy as np\n\nimport os", "_____no_output_____" ] ], [ [ "# Data information\n\nIn this demo, we use the **Walnut** found in [Jørgensen_et_all](https://zenodo.org/record/4822516#.YLXyAJMzZp8). In total, there are 6 individual micro Computed Tomography datasets in the native Zeiss TXRM/TXM format. The six datasets were acquired at the 3D Imaging Center at Technical University of Denmark in 2014 (HDTomo3D in 2016) as part of the ERC-funded project High-Definition Tomography (HDTomo) headed by Prof. Per Christian Hansen. ", "_____no_output_____" ], [ "# Load walnut data", "_____no_output_____" ] ], [ [ "reader = TXRMDataReader()\n\npathname = os.path.abspath(\"/mnt/materials/SIRF/Fully3D/CIL/Walnut/valnut_2014-03-21_643_28/tomo-A\")\ndata_name = \"valnut_tomo-A.txrm\"\nfilename = os.path.join(pathname,data_name )\n\nreader.set_up(file_name=filename, angle_unit='radian')\ndata3D = reader.read()\n\n# reorder data to match default order for Astra/Tigre operator\ndata3D.reorder('astra')\n\n# Get Image and Acquisition geometries\nag3D = data3D.geometry\nig3D = ag3D.get_ImageGeometry()", "_____no_output_____" ] ], [ [ "## Acquisition and Image geometry information", "_____no_output_____" ] ], [ [ "print(ag3D)", "_____no_output_____" ], [ "print(ig3D)", "_____no_output_____" ] ], [ [ "# Show Acquisition geometry and full 3D sinogram.", "_____no_output_____" ] ], [ [ "show_geometry(ag3D)", "_____no_output_____" ], [ "show2D(data3D, slice_list = [('vertical',512), ('angle',800), ('horizontal',512)], cmap=\"inferno\", num_cols=3, size=(15,15))", "_____no_output_____" ] ], [ [ "# Slice through projections", "_____no_output_____" ] ], [ [ "islicer(data3D, direction=1, cmap=\"inferno\")", "_____no_output_____" ] ], [ [ "## For demonstration purposes, we extract the central slice and select only 160 angles from the total 1601 angles.\n\n1. We use the `Slicer` processor with step size of 10.\n1. We use the `Binner` processor to crop and bin the acquisition data in order to reduce the field of view.\n1. We use the `TransmissionAbsorptionConverter` to convert from transmission measurements to absorption based on the Beer-Lambert law.\n\n**Note:** To avoid circular artifacts in the reconstruction space, we subtract the mean value of a background Region of interest (ROI), i.e., ROI that does not contain the walnut.", "_____no_output_____" ] ], [ [ "# Extract vertical slice\ndata2D = data3D.subset(vertical='centre')\n\n# Select every 10 angles\nsliced_data = Slicer(roi={'angle':(0,1601,10)})(data2D)\n\n# Reduce background regions\nbinned_data = Binner(roi={'horizontal':(120,-120,2)})(sliced_data)\n\n# Create absorption data \nabsorption_data = TransmissionAbsorptionConverter()(binned_data) \n\n# Remove circular artifacts\nabsorption_data -= np.mean(absorption_data.as_array()[80:100,0:30])", "_____no_output_____" ], [ "# Get Image and Acquisition geometries for one slice\nag2D = absorption_data.geometry\nag2D.set_angles(ag2D.angles, initial_angle=0.2, angle_unit='radian')\nig2D = ag2D.get_ImageGeometry()", "_____no_output_____" ], [ "print(\" Acquisition Geometry 2D: {} with labels {}\".format(ag2D.shape, ag2D.dimension_labels))\nprint(\" Image Geometry 2D: {} with labels {}\".format(ig2D.shape, ig2D.dimension_labels))", "_____no_output_____" ] ], [ [ "## Define Projection Operator \nWe can define our projection operator using our __astra__ __plugin__ that wraps the Astra-Toolbox library.", "_____no_output_____" ] ], [ [ "A = ProjectionOperator(ig2D, ag2D, device = \"gpu\")", "_____no_output_____" ] ], [ [ "## FBP and SIRT reconstuctions\n\nNow, let's perform simple reconstructions using the **Filtered Back Projection (FBP)** and **Simultaneous Iterative Reconstruction Technique [SIRT](../appendix.ipynb/#SIRT) .**\n\nRecall, for FBP we type\n\n\n```python\n\n fbp_recon = FBP(ig, ag, device = 'gpu')(absorption_data)\n \n```\n\n", "_____no_output_____" ], [ "For SIRT, we type\n\n \n```python\n \n x_init = ig.allocate()\n sirt = SIRT(initial = x_init, operator = A, data=absorption_data, \n max_iteration = 50, update_objective_interval=10)\n sirt.run(verbose=1)\n sirt_recon = sirt.solution \n \n```\n\n**Note**: In SIRT, a non-negative constraint can be used with\n\n\n```python \n \n constraint=IndicatorBox(lower=0) \n \n```\n ", "_____no_output_____" ], [ "## Exercise 1: Run FBP and SIRT reconstructions\n\nUse the code blocks described above and run FBP (`fbp_recon`) and SIRT (`sirt_recon`) reconstructions.\n\n**Note**: To display the results, use \n\n \n```python \n \n show2D([fbp_recon,sirt_recon], title = ['FBP reconstruction','SIRT reconstruction'], cmap = 'inferno') \n \n```\n\n", "_____no_output_____" ] ], [ [ "# Setup and run the FBP algorithm\nfbp_recon = FBP(..., ..., device = 'gpu')(absorption_data)\n\n# Setup and run the SIRT algorithm, with non-negative constraint\nx_init = ig2D.allocate() \nsirt = SIRT(initial = x_init, \n operator = ..., \n data= ..., \n constraint = ...,\n max_iteration = 300, \n update_objective_interval=100)\nsirt.run(verbose=1)\nsirt_recon = sirt.solution\n\n# Show reconstructions\nshow2D([fbp_recon,sirt_recon], \n title = ['FBP reconstruction','SIRT reconstruction'], \n cmap = 'inferno', fix_range=(0,0.05))", "_____no_output_____" ] ], [ [ "## Exercise 1: Solution", "_____no_output_____" ] ], [ [ "# Setup and run the FBP algorithm\nfbp_recon = FBP(ig2D, ag2D, device = 'gpu')(absorption_data)", "_____no_output_____" ], [ "# Setup and run the SIRT algorithm, with non-negative constraint\nx_init = ig2D.allocate() \nsirt = SIRT(initial = x_init, \n operator = A ,\n data = absorption_data, \n constraint = IndicatorBox(lower=0),\n max_iteration = 300, \n update_objective_interval=100)\nsirt.run(verbose=1)\nsirt_recon = sirt.solution", "_____no_output_____" ], [ "# Show reconstructions\nshow2D([fbp_recon,sirt_recon], \n title = ['FBP reconstruction','SIRT reconstruction'], \n cmap = 'inferno', fix_range=(0,0.05))", "_____no_output_____" ] ], [ [ "<h2><center> Why PDHG? </center></h2>\n\nIn the previous notebook, we presented the __Tikhonov regularisation__ for tomography reconstruction, i.e.,\n\n<a id='Tikhonov'></a>\n$$\\begin{equation}\nu^{*} =\\underset{u}{\\operatorname{argmin}} \\frac{1}{2} \\| \\mathcal{A} u - g\\|^{2} + \\alpha\\|L u\\|^{2}_{2}\n\\tag{Tikhonov}\n\\end{equation}$$\n\nwhere we can use either the `GradientOperator` ($L = \\nabla) $ or the `IdentityOperator` ($L = \\mathbb{I}$). Due to the $\\|\\cdot\\|^{2}_{2}$ terms, one can observe that the above objective function is differentiable. As shown in the previous notebook, we can use the standard `GradientDescent` algorithm namely\n \n\n```python\n \n f1 = LeastSquares(A, absorption_data)\n D = GradientOperator(ig2D)\n f2 = OperatorCompositionFunction(L2NormSquared(),D)\n f = f1 + alpha_tikhonov*f2\n\n gd = GD(x_init=ig2D.allocate(), objective_function=f, step_size=None, \n max_iteration=1000, update_objective_interval = 10)\n gd.run(100, verbose=1)\n \n```\n\n\nHowever, this is not always the case. Consider for example an $L^{1}$ norm for the fidelity, i.e., $\\|\\mathcal{A} u - g\\|_{1}$ or an $L^{1}$ norm of the regulariser i.e., $\\|u\\|_{1}$ or a non-negativity constraint $\\mathbb{I}_{\\{u>0\\}}(u)$. An alternative is to use **Proximal Gradient Methods**, discused in the previous notebook, e.g., the `FISTA` algorithm, where we require one of the functions to be differentiable and the other to have a __simple__ proximal method, i.e., \"easy to solve\". For more information, we refer to [Parikh_Boyd](https://web.stanford.edu/~boyd/papers/pdf/prox_algs.pdf#page=30).\n\nUsing the __PDHG algorithm__, we can solve minimisation problems where the objective is not differentiable, and the only required assumption is convexity with __simple__ proximal problems.", "_____no_output_____" ], [ "<h2><center> $L^{1}$ regularisation </center></h2>\n\nLet $L=$`IdentityOperator` in [Tikhonov regularisation](#Tikhonov) and replace the\n\n$$\\alpha^{2}\\|L u\\|^{2}_{2}\\quad\\mbox{ with }\\quad \\alpha\\|u\\|_{1}, $$ \n\nwhich results to a non-differentiable objective function. Hence, we have \n\n<a id='Lasso'></a>\n$$\\begin{equation}\nu^{*} =\\underset{u}{\\operatorname{argmin}} \\frac{1}{2} \\| \\mathcal{A} u - g\\|^{2} + \\alpha\\|u\\|_{1} \n\\tag{$L^{2}-L^{1}$}\n\\end{equation}$$", "_____no_output_____" ], [ "<h2><center> How to setup and run PDHG? </center></h2>\n\nIn order to use the PDHG algorithm for the problem above, we need to express our minimisation problem into the following form:\n\n<a id='PDHG_form'></a>\n$$\\begin{equation}\n\\min_{u\\in\\mathbb{X}} \\mathcal{F}(K u) + \\mathcal{G}(u)\n\\label{PDHG_form}\n\\tag{2}\n\\end{equation}$$\n\nwhere we assume that:\n\n1. $\\mathcal{F}$, $\\mathcal{G}$ are __convex__ functionals:\n \n - $\\mathcal{F}: \\mathbb{Y} \\rightarrow \\mathbb{R}$ \n \n - $\\mathcal{G}: \\mathbb{X} \\rightarrow \\mathbb{R}$\n \n \n1. $K$ is a continuous linear operator acting from a space $\\mathbb{X}$ to another space $\\mathbb{Y}$ :\n\n $$K : \\mathbb{X} \\rightarrow \\mathbb{Y} \\quad $$ \n\n with operator norm defined as $$\\| K \\| = \\max\\{ \\|K x\\|_{\\mathbb{Y}} : \\|x\\|_{\\mathbb{X}}\\leq 1 \\}.$$ \n\n\nWe can write the problem [($L^{2}-L^{1})$](#Lasso) into [(2)](#PDHG_form), if we let\n\n1. $K = \\mathcal{A} \\quad \\Longleftrightarrow \\quad $ `K = A` \n\n1. $\\mathcal{F}: Y \\rightarrow \\mathbb{R}, \\mbox{ with } \\mathcal{F}(z) := \\frac{1}{2}\\| z - g \\|^{2}, \\quad \\Longleftrightarrow \\quad$ ` F = 0.5 * L2NormSquared(absorption_data)`\n\n1. $\\mathcal{G}: X \\rightarrow \\mathbb{R}, \\mbox{ with } \\mathcal{G}(z) := \\alpha\\|z\\|_{1}, \\quad \\Longleftrightarrow \\quad$ ` G = alpha * L1Norm()`\n\nHence, we can verify that with the above setting we have that [($L^{2}-L^{1})$](#Lasso)$\\Rightarrow$[(2)](#PDHG_form) for $x=u$, $$\\underset{u}{\\operatorname{argmin}} \\frac{1}{2}\\|\\mathcal{A} u - g\\|^{2}_{2} + \\alpha\\|u\\|_{1} = \n\\underset{u}{\\operatorname{argmin}} \\mathcal{F}(\\mathcal{A}u) + \\mathcal{G}(u) = \\underset{x}{\\operatorname{argmin}} \\mathcal{F}(Kx) + \\mathcal{G}(x) $$", "_____no_output_____" ], [ "The algorithm is described in the [Appendix](../appendix.ipynb/#PDHG) and for every iteration, we solve two (proximal-type) subproblems, i.e., __primal & dual problems__ where \n$\\mbox{prox}_{\\tau \\mathcal{G}}(x)$ and $\\mbox{prox}_{\\sigma \\mathcal{F^{*}}}(x)$ are the **proximal operators** of $\\mathcal{G}$ and $\\mathcal{F}^{*}$ (convex conjugate of $\\mathcal{F}$), i.e.,\n\n$$\\begin{equation}\n\\mbox{prox}_{\\lambda \\mathcal{F}}(x) = \\underset{z}{\\operatorname{argmin}} \\frac{1}{2}\\|z - x \\|^{2} + \\lambda \n\\mathcal{F}(z) \\end{equation}\n$$\n\nOne application of the proximal operator is similar to a gradient step but is defined for convex and not necessarily differentiable functions.\n\n\nTo setup and run PDHG in CIL:\n\n\n```python\n \n pdhg = PDHG(f = F, g = G, operator = K, \n max_iterations = 500, update_objective_interval = 100)\n pdhg.run(verbose=1)\n \n```\n\n**Note:** To monitor convergence, we use `pdhg.run(verbose=1)` that prints the objective value of the primal problem, or `pdhg.run(verbose=2)` that prints the objective value of the primal and dual problems, as well as the primal dual gap. Nothing is printed with `verbose=0`.", "_____no_output_____" ], [ "<a id='sigma_tau'></a>\n### Define operator $K$, functions $\\mathcal{F}$ and $\\mathcal{G}$", "_____no_output_____" ] ], [ [ "K = A\nF = 0.5 * L2NormSquared(b=absorption_data)\nalpha = 0.01\nG = alpha * L1Norm()", "_____no_output_____" ] ], [ [ "### Setup and run PDHG", "_____no_output_____" ] ], [ [ "# Setup and run PDHG\npdhg_l1 = PDHG(f = F, g = G, operator = K, \n max_iteration = 500,\n update_objective_interval = 100)\npdhg_l1.run(verbose=1)", "_____no_output_____" ], [ "# Show reconstuction and ground truth\nshow2D([pdhg_l1.solution,fbp_recon], fix_range=(0,0.05), title = ['L1 regularisation', 'FBP'], cmap = 'inferno')\n\n# Plot middle line profile\nplt.figure(figsize=(30,8))\nplt.rcParams.update({'font.size': 15})\nplt.rcParams.update({'lines.linewidth': 5})\nplt.plot(fbp_recon.subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'FBP')\nplt.plot(pdhg_l1.solution.subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'L1 regularisation')\nplt.legend()\nplt.title('Middle Line Profiles')\nplt.show()", "_____no_output_____" ] ], [ [ "<h2><center> PDHG for Total Variation Regularisation </center></h2>\n\nNow, we continue with the setup of the PDHG algorithm using the Total variation regulariser appeared in [(1)](#all_reg).\n\nSimilarly, to the [($L^{2}-L^{1}$)](#Lasso) problem, we need to express [($L^{2}-TV$)](#all_reg) in the general form of [PDHG](#PDHG_form). This can be done using two different formulations:\n\n1. Explicit formulation: All the subproblems in the PDHG algorithm have a closed form solution.\n1. Implicit formulation: One of the subproblems in the PDHG algorithm is not solved explicitly but an inner solver is used.\n\n---\n<h2><center> ($L^{2}-TV$) with Explicit PDHG </center></h2>\n\nFor the setup of the **($L^{2}-TV$) Explicit PDHG**, we let\n\n$$\\begin{align}\n& f_{1}: \\mathbb{Y} \\rightarrow \\mathbb{R}, \\quad f_{1}(z_{1}) = \\alpha\\,\\|z_{1}\\|_{2,1}, \\mbox{ ( the TV term ) }\\\\\n& f_{2}: \\mathbb{X} \\rightarrow \\mathbb{R}, \\quad f_{2}(z_{2}) = \\frac{1}{2}\\|z_{2} - g\\|_{2}^{2}, \\mbox{ ( the data-fitting term ). }\n\\end{align}$$\n\n```python\n\n f1 = alpha * MixedL21Norm()\n f2 = 0.5 * L2NormSquared(b=absorption_data)\n\n```\n\n\n\nFor $z = (z_{1}, z_{2})\\in \\mathbb{Y}\\times \\mathbb{X}$, we define a separable function, e.g., [BlockFunction,](../appendix.ipynb/#BlockFunction)\n\n$$\\mathcal{F}(z) : = \\mathcal{F}(z_{1},z_{2}) = f_{1}(z_{1}) + f_{2}(z_{2})$$\n\n\n\n```python\n \n F = BlockFunction(f1, f2)\n \n```\n\n\nIn order to obtain an element $z = (z_{1}, z_{2})\\in \\mathbb{Y}\\times \\mathbb{X}$, we need to define a `BlockOperator` $K$, using the two operators involved in [$L^{2}-TV$](#TomoTV), i.e., the `GradientOperator` $\\nabla$ and the `ProjectionOperator` $\\mathcal{A}$.\n\n$$ \\mathcal{K} = \n\\begin{bmatrix}\n\\nabla\\\\\n\\mathcal{A}\n\\end{bmatrix}\n$$\n\n\n\n```python\n \n Grad = GradientOperator(ig)\n K = BlockOperator(Grad, A)\n \n```\n\n\n\nFinally, we enforce a non-negativity constraint by letting $\\mathcal{G} = \\mathbb{I}_{\\{u>0\\}}(u)$ $\\Longleftrightarrow$ `G = IndicatorBox(lower=0)`\n \nAgain, we can verify that with the above setting we can express our problem into [(2)](#PDHG_form), for $x=u$\n\n$$\n\\begin{align}\n\\underset{u}{\\operatorname{argmin}}\\alpha\\|\\nabla u\\|_{2,1} + \\frac{1}{2}\\|\\mathcal{A} u - g\\|^{2}_{2} + \\mathbb{I}_{\\{u>0\\}}(u) = \\underset{u}{\\operatorname{argmin}} f_{1}(\\nabla u) + f_{2}(\\mathcal{A}u) + \\mathbb{I}_{\\{u>0\\}}(u) \\\\ = \\underset{u}{\\operatorname{argmin}} F(\n\\begin{bmatrix}\n\\nabla \\\\\n\\mathcal{A}\n\\end{bmatrix}u) + \\mathbb{I}_{\\{u>0\\}}(u) = \n\\underset{u}{\\operatorname{argmin}} \\mathcal{F}(Ku) + \\mathcal{G}(u) = \\underset{x}{\\operatorname{argmin}} \\mathcal{F}(Kx) + \\mathcal{G}(x) \n\\end{align}\n$$\n", "_____no_output_____" ] ], [ [ "# Define BlockFunction F\nalpha_tv = 0.0003\nf1 = alpha_tv * MixedL21Norm()\nf2 = 0.5 * L2NormSquared(b=absorption_data)\nF = BlockFunction(f1, f2)\n\n# Define BlockOperator K\nGrad = GradientOperator(ig2D)\nK = BlockOperator(Grad, A)\n\n# Define Function G\nG = IndicatorBox(lower=0)\n\n\n# Setup and run PDHG\npdhg_tv_explicit = PDHG(f = F, g = G, operator = K,\n max_iteration = 1000,\n update_objective_interval = 200)\npdhg_tv_explicit.run(verbose=1)", "_____no_output_____" ], [ "# Show reconstuction and ground truth\nshow2D([pdhg_tv_explicit.solution,fbp_recon], fix_range=(0,0.055), title = ['TV regularisation','FBP'], cmap = 'inferno')\n\n# Plot middle line profile\nplt.figure(figsize=(30,8))\nplt.rcParams.update({'font.size': 15})\nplt.rcParams.update({'lines.linewidth': 5})\nplt.plot(fbp_recon.subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'FBP')\nplt.plot(pdhg_tv_explicit.solution .subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'TV regularisation')\nplt.legend()\nplt.title('Middle Line Profiles')\nplt.show()", "_____no_output_____" ] ], [ [ "## Speed of PDHG convergence", "_____no_output_____" ], [ "The PDHG algorithm converges when $\\sigma\\tau\\|K\\|^{2}<1$, where the variable $\\sigma$, $\\tau$ are called the _primal and dual stepsizes_. When we setup the PDHG algorithm, the default values of $\\sigma$ and $\\tau$ are used:\n\n- $\\sigma=1.0$\n- $\\tau = \\frac{1.0}{\\sigma\\|K\\|^{2}}$,\n\nand are not passed as arguments in the setup of PDHG. However, **the speed of the algorithm depends heavily on the choice of these stepsizes.** For the following, we encourage you to use different values, such as:\n\n- $\\sigma=\\frac{1}{\\|K\\|}$\n- $\\tau =\\frac{1}{\\|K\\|}$\n\nwhere $\\|K\\|$ is the operator norm of $K$. \n\n```python\n\nnormK = K.norm()\nsigma = 1./normK\ntau = 1./normK\n\n\nPDHG(f = F, g = G, operator = K, sigma=sigma, tau=tau,\n max_iteration = 2000,\n update_objective_interval = 500)\n\n```\n\nThe operator norm is computed using the [Power Method](https://en.wikipedia.org/wiki/Power_iteration) to approximate the greatest eigenvalue of $K$.\n\n\n", "_____no_output_____" ], [ "## Exercise 2: Setup and run PDHG algorithm for Tikhonov regularisation\n\nUse exactly the same code as above and replace:\n\n$$f_{1}(z_{1}) = \\alpha\\,\\|z_{1}\\|_{2,1} \\mbox{ with } f_{1}(z_{1}) = \\alpha\\,\\|z_{1}\\|_{2}^{2}.$$\n", "_____no_output_____" ] ], [ [ "# Define BlockFunction F\nalpha_tikhonov = 0.05\nf1 = ... \nF = BlockFunction(f1, f2)\n\n# Setup and run PDHG\npdhg_tikhonov_explicit = PDHG(f = F, g = G, operator = K,\n max_iteration = 500,\n update_objective_interval = 100)\npdhg_tikhonov_explicit.run(verbose=1)", "_____no_output_____" ] ], [ [ "## Exercise 2: Solution", "_____no_output_____" ] ], [ [ "# Define BlockFunction F\nalpha_tikhonov = 0.05\nf1 = alpha_tikhonov * L2NormSquared()\nF = BlockFunction(f1, f2)\n\n# Setup and run PDHG\npdhg_tikhonov_explicit = PDHG(f = F, g = G, operator = K,\n max_iteration = 1000,\n update_objective_interval = 200)\npdhg_tikhonov_explicit.run(verbose=1)", "_____no_output_____" ], [ "# Show reconstuction and ground truth\nshow2D([pdhg_tikhonov_explicit.solution,fbp_recon], fix_range=(0,0.055), title = ['Tikhonov regularisation','FBP'], cmap = 'inferno')\n\n# Plot middle line profile\nplt.figure(figsize=(30,8))\nplt.rcParams.update({'font.size': 15})\nplt.rcParams.update({'lines.linewidth': 5})\nplt.plot(fbp_recon.subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'FBP')\nplt.plot(pdhg_tikhonov_explicit.solution .subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'Tikhonov regularisation')\nplt.legend()\nplt.title('Middle Line Profiles')\nplt.show()", "_____no_output_____" ] ], [ [ "---\n<h2><center> ($L^{2}-TV$) with Implicit PDHG </center></h2>\n\nIn the implicit PDHG, one of the proximal subproblems, i.e., $\\mathrm{prox}_{\\tau\\mathcal{F}^{*}}$ or $\\mathrm{prox}_{\\sigma\\mathcal{G}}$ are not solved exactly and an iterative solver is used. For the setup of the **Implicit PDHG**, we let\n\n$$\\begin{align}\n& \\mathcal{F}: \\mathbb{Y} \\rightarrow \\mathbb{R}, \\quad \\mathcal{F}(z_{1}) = \\frac{1}{2}\\|z_{1} - g\\|_{2}^{2}\\\\\n& \\mathcal{G}: \\mathbb{X} \\rightarrow \\mathbb{R}, \\quad \\mathcal{G}(z_{2}) = \\alpha\\, \\mathrm{TV}(z_{2}) = \\|\\nabla z_{2}\\|_{2,1}\n\\end{align}$$\n\nFor the function $\\mathcal{G}$, we can use the `TotalVariation` `Function` class from `CIL`. Alternatively, we can use the `FGP_TV` `Function` class from our `cil.plugins.ccpi_regularisation` that wraps regularisation routines from the [CCPi-Regularisation Toolkit](https://github.com/vais-ral/CCPi-Regularisation-Toolkit). For these functions, the `proximal` method implements an iterative solver, namely the **Fast Gradient Projection (FGP)** algorithm that solves the **dual** problem of\n\n$$\\begin{equation}\n\\mathrm{prox}_{\\tau G}(u) = \\underset{z}{\\operatorname{argmin}} \\frac{1}{2} \\| u - z\\|^{2} + \\tau\\,\\alpha\\,\\mathrm{TV}(z) + \\mathbb{I}_{\\{z>0\\}}(z),\n\\end{equation}\n$$\n\nfor every PDHG iteration. Hence, we need to specify the number of iterations for the FGP algorithm. In addition, we can enforce a non-negativity constraint using `lower=0.0`. For the `FGP_TV` class, we can either use `device=cpu` or `device=gpu` to speed up this inner solver.\n\n\n```python\n\n G = alpha * FGP_TV(max_iteration=100, nonnegativity = True, device = 'gpu')\n \n G = alpha * TotalVariation(max_iteration=100, lower=0.)\n \n```\n", "_____no_output_____" ], [ "## Exercise 3: Setup and run implicit PDHG algorithm with the Total variation regulariser\n\n- Using the TotalVariation class, from CIL. This solves the TV denoising problem (using the FGP algorithm) in CPU.\n\n\n- Using the FGP_TV class from the CCPi regularisation plugin.\n\n **Note:** In the FGP_TV implementation no pixel size information is included when in the forward and backward of the finite difference operator. Hence, we need to divide our regularisation parameter by the pixel size, e.g., $$\\frac{\\alpha}{\\mathrm{ig2D.voxel\\_size\\_y}}$$\n \n\n", "_____no_output_____" ], [ "## $(L^{2}-TV)$ Implicit PDHG: using FGP_TV", "_____no_output_____" ] ], [ [ "F = 0.5 * L2NormSquared(b=absorption_data)\nG = (alpha_tv/ig2D.voxel_size_y) * ... \nK = A\n\n# Setup and run PDHG\npdhg_tv_implicit_regtk = PDHG(f = F, g = G, operator = K,\n max_iteration = 1000,\n update_objective_interval = 200)\npdhg_tv_implicit_regtk.run(verbose=1)", "_____no_output_____" ] ], [ [ "## Exercise 3: Solution", "_____no_output_____" ] ], [ [ "F = 0.5 * L2NormSquared(b=absorption_data)\nG = (alpha_tv/ig2D.voxel_size_y) * FGP_TV(max_iteration=100, device='gpu')\nK = A\n\n# Setup and run PDHG\npdhg_tv_implicit_regtk = PDHG(f = F, g = G, operator = K,\n max_iteration = 1000,\n update_objective_interval = 200)\npdhg_tv_implicit_regtk.run(verbose=1)", "_____no_output_____" ], [ "# Show reconstuction and ground truth\nshow2D([pdhg_tv_implicit_regtk.solution,pdhg_tv_explicit.solution, \n (pdhg_tv_explicit.solution-pdhg_tv_implicit_regtk.solution).abs()], \n fix_range=[(0,0.055),(0,0.055),(0,1e-3)],\n title = ['TV (Implicit CCPi-RegTk)','TV (Explicit)', 'Absolute Difference'], \n cmap = 'inferno', num_cols=3)\n\n# Plot middle line profile\nplt.figure(figsize=(30,8))\nplt.rcParams.update({'font.size': 15})\nplt.rcParams.update({'lines.linewidth': 5})\nplt.plot(pdhg_tv_explicit.solution.subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'TV (explicit)')\nplt.plot(pdhg_tv_implicit_regtk.solution.subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'TV (implicit)')\nplt.legend()\nplt.title('Middle Line Profiles')\nplt.show()", "_____no_output_____" ] ], [ [ "In the above comparison between explicit and implicit TV reconstructions, we observe some differences in the reconstructions and in the middle line profiles. This is due to a) the number of iterations and b) $\\sigma, \\tau$ values used in both the explicit and implicit setup of the PDHG algorithm. You can try more iterations with different values of $\\sigma$ and $\\tau$ for both cases in order to be sure that converge to the same solution.\n\nFor example, you can use:\n\n* max_iteration = 2000 \n* $\\sigma=\\tau=\\frac{1}{\\|K\\|}$", "_____no_output_____" ], [ "## $(L^{2}-TV)$ Implicit PDHG: using TotalVariation", "_____no_output_____" ] ], [ [ "G = alpha_tv * TotalVariation(max_iteration=100, lower=0.)\n\n# Setup and run PDHG\npdhg_tv_implicit_cil = PDHG(f = F, g = G, operator = K,\n max_iteration = 500,\n update_objective_interval = 100)\npdhg_tv_implicit_cil.run(verbose=1)", "_____no_output_____" ], [ "# Show reconstuction and ground truth\nshow2D([pdhg_tv_implicit_regtk.solution,\n pdhg_tv_implicit_cil.solution,\n (pdhg_tv_implicit_cil.solution-pdhg_tv_implicit_regtk.solution).abs()], \n fix_range=[(0,0.055),(0,0.055),(0,1e-3)], num_cols=3,\n title = ['TV (CIL)','TV (CCPI-RegTk)', 'Absolute Difference'], \n cmap = 'inferno')\n\n# Plot middle line profile\nplt.figure(figsize=(30,8))\nplt.rcParams.update({'font.size': 15})\nplt.rcParams.update({'lines.linewidth': 5})\nplt.plot(pdhg_tv_implicit_regtk.solution.subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'TV (CCPi-RegTk)')\nplt.plot(pdhg_tv_implicit_cil.solution.subset(horizontal_y = int(ig2D.voxel_num_y/2)).as_array(), label = 'TV (CIL)')\nplt.legend()\nplt.title('Middle Line Profiles')\nplt.show()", "_____no_output_____" ] ], [ [ "# FBP reconstruction with all the projection angles.", "_____no_output_____" ] ], [ [ "binned_data3D = Binner(roi={'horizontal':(120,-120,2)})(data3D)\nabsorption_data3D = TransmissionAbsorptionConverter()(binned_data3D.subset(vertical=512))\nabsorption_data3D -= np.mean(absorption_data3D.as_array()[80:100,0:30])\nag3D = absorption_data3D.geometry\nag3D.set_angles(ag3D.angles, initial_angle=0.2, angle_unit='radian')\nig3D = ag3D.get_ImageGeometry()\n\nfbp_recon3D = FBP(ig3D, ag3D)(absorption_data3D)", "_____no_output_____" ] ], [ [ "# Show all reconstructions \n\n- FBP (1601 projections)\n- FBP (160 projections)\n- SIRT (160 projections)\n- $L^{1}$ regularisation (160 projections)\n- Tikhonov regularisation (160 projections)\n- Total variation regularisation (160 projections)", "_____no_output_____" ] ], [ [ "show2D([fbp_recon3D, \n fbp_recon, \n sirt_recon, \n pdhg_l1.solution, \n pdhg_tikhonov_explicit.solution,\n pdhg_tv_explicit.solution],\n title=['FBP 1601 projections', 'FBP', 'SIRT','$L^{1}$','Tikhonov','TV'],\n cmap=\"inferno\",num_cols=3, size=(25,20), fix_range=(0,0.05))", "_____no_output_____" ] ], [ [ "## Zoom ROIs", "_____no_output_____" ] ], [ [ "show2D([fbp_recon3D.as_array()[175:225,150:250], \n fbp_recon.as_array()[175:225,150:250], \n sirt_recon.as_array()[175:225,150:250], \n pdhg_l1.solution.as_array()[175:225,150:250], \n pdhg_tikhonov_explicit.solution.as_array()[175:225,150:250],\n pdhg_tv_implicit_regtk.solution.as_array()[175:225,150:250]],\n title=['FBP 1601 projections', 'FBP', 'SIRT','$L^{1}$','Tikhonov','TV'],\n cmap=\"inferno\",num_cols=3, size=(25,20), fix_range=(0,0.05))", "_____no_output_____" ] ], [ [ "<h1><center>Conclusions</center></h1>\n\nIn the PDHG algorithm, the step-sizes $\\sigma, \\tau$ play a significant role in terms of the convergence speed. In the above problems, we used the default values:\n\n* $\\sigma = 1.0$, $\\tau = \\frac{1.0}{\\sigma\\|K\\|^{2}}$\n\nand we encourage you to try different values provided that $\\sigma\\tau\\|K\\|^{2}<1$ is satisfied. Certainly, these values are not the optimal ones and there are sevelar accelaration methods in the literature to tune these parameters appropriately, see for instance [Chambolle_Pock2010](https://hal.archives-ouvertes.fr/hal-00490826/document), [Chambolle_Pock2011](https://ieeexplore.ieee.org/document/6126441), [Goldstein et al](https://arxiv.org/pdf/1305.0546.pdf), [Malitsky_Pock](https://arxiv.org/pdf/1608.08883.pdf).\n\nIn the following notebook, we are going to present a stochastic version of PDHG, namely **SPDHG** introduced in [Chambolle et al](https://arxiv.org/pdf/1706.04957.pdf) which is extremely useful to reconstruct large datasets, e.g., 3D walnut data. The idea behind SPDHG is to split our initial dataset into smaller chunks and apply forward and backward operations to these randomly selected subsets of the data. SPDHG has been used for different imaging applications and produces significant computational improvements\nover the PDHG algorithm, see [Ehrhardt et al](https://arxiv.org/abs/1808.07150) and [Papoutsellis et al](https://arxiv.org/pdf/2102.06126.pdf).", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
4aa4bbff0f75d272e17e784cfae80801d0a50e4f
3,062
ipynb
Jupyter Notebook
DSA/arrays/combinationSum.ipynb
lance-lh/Data-Structures-and-Algorithms
c432654edaeb752536e826e88bcce3ed2ab000fb
[ "MIT" ]
1
2019-03-27T13:00:28.000Z
2019-03-27T13:00:28.000Z
DSA/arrays/combinationSum.ipynb
lance-lh/Data-Structures-and-Algorithms
c432654edaeb752536e826e88bcce3ed2ab000fb
[ "MIT" ]
null
null
null
DSA/arrays/combinationSum.ipynb
lance-lh/Data-Structures-and-Algorithms
c432654edaeb752536e826e88bcce3ed2ab000fb
[ "MIT" ]
null
null
null
27.097345
305
0.493468
[ [ [ "Given a set of candidate numbers (candidates) (without duplicates) and a target number (target), find all unique combinations in candidates where the candidate numbers sums to target.\n\nThe same repeated number may be chosen from candidates unlimited number of times.\n\nNote:\n\n- All numbers (including target) will be positive integers.\n- The solution set must not contain duplicate combinations.\n\nExample 1:\n\n Input: candidates = [2,3,6,7], target = 7,\n A solution set is:\n [\n [7],\n [2,2,3]\n ]\nExample 2:\n\n Input: candidates = [2,3,5], target = 8,\n A solution set is:\n [\n [2,2,2,2],\n [2,3,3],\n [3,5]\n ]", "_____no_output_____" ], [ "重要参考和总结:\n\n- [A general approach to backtracking questions in Java (Subsets, Permutations, Combination Sum, Palindrome Partitioning)](https://leetcode.com/problems/combination-sum/discuss/16502/A-general-approach-to-backtracking-questions-in-Java-(Subsets-Permutations-Combination-Sum-Palindrome-Partitioning))", "_____no_output_____" ] ], [ [ "class Solution(object):\n def combinationSum(self, candidates, target):\n \"\"\"\n :type candidates: List[int]\n :type target: int\n :rtype: List[List[int]]\n \"\"\"\n res = []\n candidates.sort()\n self.dfs(candidates, target, 0, [], res)\n return res\n \n \n def dfs(self, nums, target, index, path, res):\n if target < 0:\n return \n if target == 0:\n res.append(path)\n return\n \n for i in range(index, len(nums)):\n self.dfs(nums, target - nums[i], i, path + [nums[i]], res)\n\n# test\ncandidates = [2,3,5]\ntarget = 8\nSolution().combinationSum(candidates, target)", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ] ]
4aa4c1f701ca7c0f626c266ee5c8238c72e2c9e4
119,701
ipynb
Jupyter Notebook
doc/user_guide/ipynb/isolated_skyrmion.ipynb
computationalmodelling/fidimag
07a275c897a44ad1e0d7e8ef563f10345fdc2a6e
[ "BSD-2-Clause" ]
53
2016-02-27T09:40:21.000Z
2022-01-19T21:37:44.000Z
doc/user_guide/ipynb/isolated_skyrmion.ipynb
computationalmodelling/fidimag
07a275c897a44ad1e0d7e8ef563f10345fdc2a6e
[ "BSD-2-Clause" ]
132
2016-02-26T13:18:58.000Z
2021-12-01T21:52:42.000Z
doc/user_guide/ipynb/isolated_skyrmion.ipynb
computationalmodelling/fidimag
07a275c897a44ad1e0d7e8ef563f10345fdc2a6e
[ "BSD-2-Clause" ]
32
2016-02-26T13:21:40.000Z
2022-03-08T08:54:51.000Z
236.096647
28,376
0.918505
[ [ [ "# Isolated skyrmion in confined helimagnetic nanostructure", "_____no_output_____" ], [ "**Authors**: Marijan Beg, Marc-Antonio Bisotti, Weiwei Wang, Ryan Pepper, David Cortes-Ortuno\n\n**Date**: 26 June 2016 (Updated 24 Jan 2019)\n\nThis notebook can be downloaded from the github repository, found [here](https://github.com/computationalmodelling/fidimag/blob/master/doc/ipynb/isolated_skyrmion.ipynb).", "_____no_output_____" ], [ "## Problem specification", "_____no_output_____" ], [ "A thin film disk sample with thickness $t=10 \\,\\text{nm}$ and diameter $d=100 \\,\\text{nm}$ is simulated. The material is FeGe with material parameters [1]:\n\n- exchange energy constant $A = 8.78 \\times 10^{-12} \\,\\text{J/m}$,\n- magnetisation saturation $M_\\text{s} = 3.84 \\times 10^{5} \\,\\text{A/m}$, and\n- Dzyaloshinskii-Moriya energy constant $D = 1.58 \\times 10^{-3} \\,\\text{J/m}^{2}$.\n\nIt is expected that when the system is initialised in the uniform out-of-plane direction $\\mathbf{m}_\\text{init} = (0, 0, 1)$, it relaxes to the isolated Skyrmion (Sk) state (See Supplementary Information in Ref. 1). (Note that LLG dynamics is important, which means that artificially disable the precession term in LLG may lead to other states).", "_____no_output_____" ], [ "## Simulation using the LLG equation", "_____no_output_____" ] ], [ [ "from fidimag.micro import Sim\nfrom fidimag.common import CuboidMesh\nfrom fidimag.micro import UniformExchange, Demag, DMI\nfrom fidimag.common import plot\nimport time\n%matplotlib inline", "_____no_output_____" ] ], [ [ "The cuboidal thin film mesh which contains the disk is created:", "_____no_output_____" ] ], [ [ "d = 100 # diameter (nm)\nt = 10 # thickness (nm)\n\n# Mesh discretisation.\ndx = dy = 2.5 # nm\ndz = 2\n\nmesh = CuboidMesh(nx=int(d/dx), ny=int(d/dy), nz=int(t/dz), dx=dx, dy=dy, dz=dz, unit_length=1e-9)", "_____no_output_____" ] ], [ [ "Since the disk geometry is simulated, it is required to set the saturation magnetisation to zero in the regions of the mesh outside the disk. In order to do that, the following function is created:", "_____no_output_____" ] ], [ [ "def Ms_function(Ms):\n def wrapped_function(pos):\n x, y, z = pos[0], pos[1], pos[2]\n \n r = ((x-d/2.)**2 + (y-d/2.)**2)**0.5 # distance from the centre\n \n if r <= d/2:\n # Mesh point is inside the disk.\n return Ms\n else:\n # Mesh point is outside the disk.\n return 0\n return wrapped_function", "_____no_output_____" ] ], [ [ "To reduce the relaxation time, we define a state using a python function.", "_____no_output_____" ] ], [ [ "def init_m(pos):\n x,y,z = pos\n x0, y0 = d/2., d/2.\n r = ((x-x0)**2 + (y-y0)**2)**0.5\n \n if r<10:\n return (0,0, 1)\n elif r<30:\n return (0,0, -1)\n elif r<60:\n return (0, 0, 1)\n else:\n return (0, 0, -1)", "_____no_output_____" ] ], [ [ "Having the magnetisation saturation function, the simulation object can be created:", "_____no_output_____" ] ], [ [ "# FeGe material paremeters.\nMs = 3.84e5 # saturation magnetisation (A/m)\nA = 8.78e-12 # exchange energy constant (J/m)\nD = 1.58e-3 # Dzyaloshinkii-Moriya energy constant (J/m**2)\nalpha = 1 # Gilbert damping\ngamma = 2.211e5 # gyromagnetic ration (m/As)\n\n# Create simulation object.\nsim = Sim(mesh)\n# sim = Sim(mesh, driver='steepest_descent')\nsim.Ms = Ms_function(Ms)\nsim.driver.alpha = alpha\nsim.driver.gamma = gamma\n\n# Add energies.\nsim.add(UniformExchange(A=A))\nsim.add(DMI(D=D))\nsim.add(Demag())\n\n# Since the magnetisation dynamics is not important in this stage,\n# the precession term in LLG equation can be set to artificially zero.\n# sim.driver.do_precession = False\n\n# Initialise the system.\nsim.set_m(init_m)", "_____no_output_____" ] ], [ [ "This is the initial configuration used before relaxation:", "_____no_output_____" ] ], [ [ "plot(sim, component='all', z=0.0, cmap='RdBu')", "_____no_output_____" ] ], [ [ "Now the system is relaxed to find a metastable state of the system:", "_____no_output_____" ] ], [ [ "# Relax the system to its equilibrium.\nstart = time.time()\nsim.driver.relax(dt=1e-13, stopping_dmdt=0.1, max_steps=10000,\n save_m_steps=None, save_vtk_steps=None, printing=False)\nend = time.time()", "_____no_output_____" ], [ "#NBVAL_IGNORE_OUTPUT\nprint('Timing: ', end - start)", "Timing: 77.00890803337097\n" ], [ "sim.save_vtk()", "_____no_output_____" ] ], [ [ "The magnetisation components of obtained equilibrium configuration can be plotted in the following way:", "_____no_output_____" ], [ "We plot the magnetisation at the bottom of the sample:", "_____no_output_____" ] ], [ [ "plot(sim, component='all', z=0.0, cmap='RdBu')", "_____no_output_____" ] ], [ [ "and at the top of the sample:", "_____no_output_____" ] ], [ [ "plot(sim, component='all', z=10.0, cmap='RdBu')", "_____no_output_____" ] ], [ [ "and we plot the xy spin angle through the middle of the sample:", "_____no_output_____" ] ], [ [ "plot(sim, component='angle', z=5.0, cmap='hsv')", "_____no_output_____" ] ], [ [ "## Simulation using Steepest Descent", "_____no_output_____" ], [ "An alternative method for the minimisation of the energy is using a SteepestDescent method:", "_____no_output_____" ] ], [ [ "# Create simulation object.\nsim = Sim(mesh, driver='steepest_descent')\nsim.Ms = Ms_function(Ms)\nsim.driver.gamma = gamma\n\n# Add energies.\nsim.add(UniformExchange(A=A))\nsim.add(DMI(D=D))\nsim.add(Demag())\n\n# The maximum timestep:\nsim.driver.tmax = 1\n\n# Initialise the system.\nsim.set_m(init_m)", "_____no_output_____" ] ], [ [ "In this case the driver has a `minimise` method", "_____no_output_____" ] ], [ [ "start = time.time()\nsim.driver.minimise(max_steps=10000, stopping_dm=0.5e-4, initial_t_step=1e-2)\nend = time.time()", "#0 max_tau=0.01 max_dm=0.0477 \n#1000 max_tau=0.01 max_dm=0.0022 \n#2000 max_tau=0.01 max_dm=0.000592 \n#3000 max_tau=0.01 max_dm=0.000327 \n#4000 max_tau=0.01 max_dm=0.000179 \n#5000 max_tau=0.01 max_dm=9.82e-05 \n#6000 max_tau=0.01 max_dm=5.43e-05 \n#6141 max_tau=0.01 max_dm=5e-05 \n" ], [ "#NBVAL_IGNORE_OUTPUT\nprint('Timing: ', end - start)", "Timing: 67.3641722202301\n" ] ], [ [ "And the final state is equivalent to the one found with the LLG technique", "_____no_output_____" ] ], [ [ "plot(sim, component='all', z=0.0, cmap='RdBu')", "_____no_output_____" ] ], [ [ "## References\n\n[1] Beg, M. et al. Ground state search, hysteretic behaviour, and reversal mechanism of skyrmionic textures in confined helimagnetic nanostructures. *Sci. Rep.* **5**, 17137 (2015).", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
4aa4c71f56f645e402220aecc0e9e5c1af5a58bd
106,430
ipynb
Jupyter Notebook
stuffs/awgn_channel_snr.ipynb
gasparka/stuffs
e8c53121e495c160d5ed4d4b8e2324a26d37e39e
[ "Apache-2.0" ]
null
null
null
stuffs/awgn_channel_snr.ipynb
gasparka/stuffs
e8c53121e495c160d5ed4d4b8e2324a26d37e39e
[ "Apache-2.0" ]
null
null
null
stuffs/awgn_channel_snr.ipynb
gasparka/stuffs
e8c53121e495c160d5ed4d4b8e2324a26d37e39e
[ "Apache-2.0" ]
null
null
null
615.202312
56,307
0.950334
[ [ [ "import matplotlib\nmatplotlib.use('nbagg')\n# %matplotlib notebook\nimport matplotlib.pyplot as plt\n%load_ext autoreload\n%autoreload 2\n\nimport numpy as np\nimport scipy", "The autoreload extension is already loaded. To reload it, use:\n %reload_ext autoreload\n" ], [ "# this is stolen from some python project\nfrom numpy import complex, sum, abs, pi, arange, array, size, shape, real, sqrt\nfrom numpy import matrix, sqrt, sum, zeros, concatenate, sinc\nfrom numpy.random import randn, seed, random\ndef awgn(input_signal, snr_dB, rate=1.0):\n \"\"\"\n Addditive White Gaussian Noise (AWGN) Channel.\n Parameters\n ----------\n input_signal : 1D ndarray of floats\n Input signal to the channel.\n snr_dB : float\n Output SNR required in dB.\n rate : float\n Rate of the a FEC code used if any, otherwise 1.\n Returns\n -------\n output_signal : 1D ndarray of floats\n Output signal from the channel with the specified SNR.\n \"\"\"\n\n avg_energy = sum(abs(input_signal) * abs(input_signal))/len(input_signal)\n snr_linear = 10**(snr_dB/10.0)\n noise_variance = avg_energy/(2*rate*snr_linear)\n\n noise = (sqrt(noise_variance) * randn(len(input_signal))) + (sqrt(noise_variance) * randn(len(input_signal))*1j)\n\n output_signal = input_signal + noise\n\n return output_signal", "_____no_output_____" ], [ "iq = scipy.fromfile(open('/home/gaspar/git/fm_fhss/phantom2/uksetaga_f2405350000.00_fs2181818.18_rx6_30_0_band2000000.00.iq'), dtype=scipy.complex64)\niq = iq[500000:507000]\niq = awgn(iq, 10)", "_____no_output_____" ], [ "Pxx, freqs, bins, im = plt.specgram(iq, cmap=plt.cm.gist_heat, Fs= 1.0)\nplt.show()", "_____no_output_____" ], [ "iqs = iq\nplt.plot(iqs.real, label='Real')\nplt.plot(iqs.imag, label='Imag')\nplt.legend()\nplt.grid()\nplt.show()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code" ] ]
4aa4d33c0e1a0e0be2f12d8958631e3853c71af6
302,612
ipynb
Jupyter Notebook
LogisticRegression/main.ipynb
GodWriter/Machine-Learning
b38b15b91a89727bc1a29e63b09e39f7c5d4ee4b
[ "Apache-2.0" ]
2
2020-07-15T14:33:31.000Z
2020-07-15T17:21:32.000Z
LogisticRegression/main.ipynb
GodWriter/Machine-Learning
b38b15b91a89727bc1a29e63b09e39f7c5d4ee4b
[ "Apache-2.0" ]
null
null
null
LogisticRegression/main.ipynb
GodWriter/Machine-Learning
b38b15b91a89727bc1a29e63b09e39f7c5d4ee4b
[ "Apache-2.0" ]
null
null
null
275.101818
36,632
0.923704
[ [ [ "import os\n\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\n%matplotlib inline", "_____no_output_____" ] ], [ [ "## 1. 加载并可视化数据", "_____no_output_____" ] ], [ [ "path = 'LogiReg_data.txt'\n\npdData = pd.read_csv(path, header=None, names=['Exam 1', 'Exam 2', 'Admitted'])\npdData.head()", "_____no_output_____" ], [ "pdData.shape", "_____no_output_____" ], [ "positive = pdData[pdData['Admitted'] == 1]\nnegative = pdData[pdData['Admitted'] == 0]\n\nfig, ax = plt.subplots(figsize=(10, 5))\nax.scatter(positive['Exam 1'], positive['Exam 2'], s=30, c='b', marker='o', label='Admitted')\nax.scatter(negative['Exam 1'], negative['Exam 2'], s=30, c='r', marker='x', label='Not Admitted')\n\nax.legend()\nax.set_xlabel('Exam 1 Score')\nax.set_ylabel('Exam 2 Score')", "_____no_output_____" ] ], [ [ "## 2. Sigmoid函数", "_____no_output_____" ], [ "$$\ng(z) = \\frac{1}{1+e^{-z}} \n$$", "_____no_output_____" ] ], [ [ "def sigmoid(z):\n return 1 / (1 + np.exp(-z))", "_____no_output_____" ], [ "nums = np.arange(-10, 10, step=1)\n\nfig, ax = plt.subplots(figsize=(12, 4))\nax.plot(nums, sigmoid(nums), 'r')", "_____no_output_____" ] ], [ [ "## 3. 建立Model", "_____no_output_____" ], [ "$$\n\\begin{array}{ccc}\n\\begin{pmatrix}\\theta_{0} & \\theta_{1} & \\theta_{2}\\end{pmatrix} & \\times & \\begin{pmatrix}1\\\\\nx_{1}\\\\\nx_{2}\n\\end{pmatrix}\\end{array}=\\theta_{0}+\\theta_{1}x_{1}+\\theta_{2}x_{2}\n$$", "_____no_output_____" ] ], [ [ "def model(X, theta):\n return sigmoid(np.dot(X, theta.T))", "_____no_output_____" ], [ "# 在第0列插入1\npdData.insert(0, 'Ones', 1)\n\n# 获取<training data, y>\norig_data = pdData.values\ncols = orig_data.shape[1]\nX = orig_data[:, 0:cols-1]\ny = orig_data[:, cols-1:cols]\n\n# 初始化参数\ntheta = np.zeros([1, 3])", "_____no_output_____" ], [ "X[:5]", "_____no_output_____" ], [ "y[:5]", "_____no_output_____" ], [ "theta", "_____no_output_____" ] ], [ [ "## 4. 建立Loss Function", "_____no_output_____" ], [ "将对数似然函数去负号\n\n$$\nD(h_\\theta(x), y) = -y\\log(h_\\theta(x)) - (1-y)\\log(1-h_\\theta(x))\n$$\n求平均损失\n$$\nJ(\\theta)=\\frac{1}{n}\\sum_{i=1}^{n} D(h_\\theta(x_i), y_i)\n$$", "_____no_output_____" ] ], [ [ "def cost(X, y, theta):\n left = np.multiply(-y, np.log(model(X, theta)))\n right = np.multiply(1 - y, np.log(1 - model(X, theta)))\n \n return np.sum(left - right) / (len(X))", "_____no_output_____" ], [ "cost(X, y, theta)", "_____no_output_____" ] ], [ [ "## 5. 计算梯度", "_____no_output_____" ], [ "$$\n\\frac{\\partial J}{\\partial \\theta_j}=-\\frac{1}{m}\\sum_{i=1}^n (y_i - h_\\theta (x_i))x_{ij}\n$$", "_____no_output_____" ] ], [ [ "def gradient(X, y, theta):\n grad = np.zeros(theta.shape)\n error = (model(X, theta) - y).ravel()\n \n # 对于每一个参数,取出相关列的数据进行更新\n for j in range(len(theta.ravel())):\n term = np.multiply(error, X[:, j])\n grad[0, j] = np.sum(term) / len(X)\n \n return grad", "_____no_output_____" ] ], [ [ "## 6. 梯度下降", "_____no_output_____" ] ], [ [ "import time\nimport numpy.random", "_____no_output_____" ], [ "STOP_ITER = 0\nSTOP_COST = 1\nSTOP_GRAD = 2\n\ndef stopCriterion(dtype, value, threshold):\n if dtype == STOP_ITER:\n return value > threshold\n elif dtype == STOP_COST:\n return abs(value[-1] - value[-2]) < threshold\n elif dtype == STOP_GRAD:\n return np.linalg.norm(value) < threshold", "_____no_output_____" ], [ "def shuffleData(data):\n # 洗牌操作\n np.random.shuffle(data)\n \n cols = data.shape[1]\n X = data[:, 0:cols-1]\n y = data[:, cols-1:]\n \n return X, y", "_____no_output_____" ], [ "def descent(data, theta, batchSize, stopType, thresh, alpha):\n i = 0\n k = 0\n init_time = time.time()\n \n X, y = shuffleData(data)\n grad = np.zeros(theta.shape)\n costs = [cost(X, y, theta)]\n \n while True:\n grad = gradient(X[k: k+batchSize], y[k: k+batchSize], theta)\n k += batchSize\n \n if k >= n:\n k = 0\n X, y = shuffleData(data)\n \n theta = theta - alpha*grad\n costs.append(cost(X, y, theta))\n i += 1\n \n if stopType == STOP_ITER:\n value = i\n elif stopType == STOP_COST:\n value = costs\n elif stopType == STOP_GRAD:\n value = grad\n\n if stopCriterion(stopType, value, thresh):\n break\n \n return theta, i-1, costs, grad, time.time()-init_time", "_____no_output_____" ], [ "def runExpe(data, theta, batchSize, stopType, thresh, alpha):\n theta, iter, costs, grad, dur = descent(data, theta, batchSize, stopType, thresh, alpha)\n \n name = \"Original\" if (data[:,1]>2).sum() > 1 else \"Scaled\"\n name += \" data - learning rate: {} - \".format(alpha)\n \n if batchSize == n:\n strDescType = \"Gradient\"\n elif batchSize == 1:\n strDescType = \"Stochastic\"\n else:\n strDescType = \"Mini-batch ({})\".format(batchSize)\n\n name += strDescType + \" descent - Stop: \"\n \n if stopType == STOP_ITER:\n strStop = \"{} iterations\".format(thresh)\n elif stopType == STOP_COST:\n strStop = \"costs change < {}\".format(thresh)\n else:\n strStop = \"gradient norm < {}\".format(thresh)\n\n name += strStop\n print (\"***{}\\nTheta: {} - Iter: {} - Last cost: {:03.2f} - Duration: {:03.2f}s\".format(\n name, theta, iter, costs[-1], dur))\n\n fig, ax = plt.subplots(figsize=(12,4))\n ax.plot(np.arange(len(costs)), costs, 'r')\n ax.set_xlabel('Iterations')\n ax.set_ylabel('Cost')\n ax.set_title(name.upper() + ' - Error vs. Iteration')\n\n return theta", "_____no_output_____" ] ], [ [ "## 7. 不同的停止策略", "_____no_output_____" ], [ "### 设定迭代次数", "_____no_output_____" ] ], [ [ "#选择的梯度下降方法是基于所有样本的\nn=100\nrunExpe(orig_data, theta, n, STOP_ITER, thresh=5000, alpha=0.000001)", "***Original data - learning rate: 1e-06 - Gradient descent - Stop: 5000 iterations\nTheta: [[-0.00027127 0.00705232 0.00376711]] - Iter: 5000 - Last cost: 0.63 - Duration: 0.74s\n" ] ], [ [ "### 根据损失值停止", "_____no_output_____" ] ], [ [ "runExpe(orig_data, theta, n, STOP_COST, thresh=0.000001, alpha=0.001)", "***Original data - learning rate: 0.001 - Gradient descent - Stop: costs change < 1e-06\nTheta: [[-5.13364014 0.04771429 0.04072397]] - Iter: 109901 - Last cost: 0.38 - Duration: 16.51s\n" ] ], [ [ "### 根据梯度变化停止", "_____no_output_____" ] ], [ [ "runExpe(orig_data, theta, n, STOP_GRAD, thresh=0.05, alpha=0.001)", "***Original data - learning rate: 0.001 - Gradient descent - Stop: gradient norm < 0.05\nTheta: [[-2.37033409 0.02721692 0.01899456]] - Iter: 40045 - Last cost: 0.49 - Duration: 6.24s\n" ] ], [ [ "## 8. 不同的梯度下降方法", "_____no_output_____" ], [ "### Stochastic descent", "_____no_output_____" ] ], [ [ "runExpe(orig_data, theta, 1, STOP_ITER, thresh=5000, alpha=0.001)", "***Original data - learning rate: 0.001 - Stochastic descent - Stop: 5000 iterations\nTheta: [[-0.38641468 0.05196148 0.03771059]] - Iter: 5000 - Last cost: 1.76 - Duration: 0.23s\n" ], [ "# 降低学习率\nrunExpe(orig_data, theta, 1, STOP_ITER, thresh=15000, alpha=0.000002)", "***Original data - learning rate: 2e-06 - Stochastic descent - Stop: 15000 iterations\nTheta: [[-0.00201996 0.01010247 0.00102232]] - Iter: 15000 - Last cost: 0.63 - Duration: 0.69s\n" ] ], [ [ "结论: 速度快,但稳定性差,需要很小的学习率", "_____no_output_____" ], [ "### Mini-batch descent", "_____no_output_____" ] ], [ [ "runExpe(orig_data, theta, 16, STOP_ITER, thresh=15000, alpha=0.001)", "***Original data - learning rate: 0.001 - Mini-batch (16) descent - Stop: 15000 iterations\nTheta: [[-1.03744957 0.03274143 0.01980994]] - Iter: 15000 - Last cost: 0.81 - Duration: 0.93s\n" ], [ "from sklearn import preprocessing as pp\n\n# 对数据进行标准化 将数据按其属性(按列进行)减去其均值,然后除以其方差。最后得到的结果是,对每个属性/每列来说所有数据都聚集在0附近,方差值为1\nscaled_data = orig_data.copy()\nscaled_data[:, 1:3] = pp.scale(orig_data[:, 1:3])\n\nrunExpe(scaled_data, theta, n, STOP_ITER, thresh=5000, alpha=0.001)", "***Scaled data - learning rate: 0.001 - Gradient descent - Stop: 5000 iterations\nTheta: [[0.3080807 0.86494967 0.77367651]] - Iter: 5000 - Last cost: 0.38 - Duration: 0.79s\n" ] ], [ [ "结论: 原始数据为0.61,而预处理后0.38。数据做预处理非常重要", "_____no_output_____" ] ], [ [ "runExpe(scaled_data, theta, n, STOP_GRAD, thresh=0.02, alpha=0.001)", "***Scaled data - learning rate: 0.001 - Gradient descent - Stop: gradient norm < 0.02\nTheta: [[1.0707921 2.63030842 2.41079787]] - Iter: 59422 - Last cost: 0.22 - Duration: 9.80s\n" ], [ "theta = runExpe(scaled_data, theta, 1, STOP_GRAD, thresh=0.002/5, alpha=0.001)", "***Scaled data - learning rate: 0.001 - Stochastic descent - Stop: gradient norm < 0.0004\nTheta: [[1.14900975 2.79235457 2.56648979]] - Iter: 72582 - Last cost: 0.22 - Duration: 4.33s\n" ], [ "runExpe(scaled_data, theta, 16, STOP_GRAD, thresh=0.002*2, alpha=0.001)", "***Scaled data - learning rate: 0.001 - Mini-batch (16) descent - Stop: gradient norm < 0.004\nTheta: [[1.1496487 2.79261283 2.5672973 ]] - Iter: 55 - Last cost: 0.22 - Duration: 0.01s\n" ] ], [ [ "## 9. 测试精度", "_____no_output_____" ] ], [ [ "def predict(X, theta):\n return [1 if x >= 0.5 else 0 for x in model(X, theta)]", "_____no_output_____" ], [ "scaled_X = scaled_data[:, :3]\ny = scaled_data[:, 3]\n\npredictions = predict(scaled_X, theta)\ncorrect = [1 if ((a == 1 and b == 1) or (a == 0 and b == 0)) else 0 for (a, b) in zip(predictions, y)]\naccuracy = (sum(map(int, correct)) % len(correct))\n\nprint ('accuracy = {0}%'.format(accuracy))", "accuracy = 89%\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
4aa4d5080007679fe31a69c6079601ee66333a0d
26,947
ipynb
Jupyter Notebook
Examples/22-StepCalibration.ipynb
dapperfu/python_GCode
b5beee57a65d63a821b7f3d539f422ad12518cae
[ "BSD-3-Clause" ]
null
null
null
Examples/22-StepCalibration.ipynb
dapperfu/python_GCode
b5beee57a65d63a821b7f3d539f422ad12518cae
[ "BSD-3-Clause" ]
1
2019-10-28T02:35:20.000Z
2019-10-28T02:35:20.000Z
Examples/22-StepCalibration.ipynb
dapperfu/python_GCode
b5beee57a65d63a821b7f3d539f422ad12518cae
[ "BSD-3-Clause" ]
null
null
null
30.175812
67
0.419156
[ [ [ "# Figure out correct number of steps\n", "_____no_output_____" ] ], [ [ "import gcode\nimport numpy as np", "_____no_output_____" ], [ "# Draw a relative horizontal line.\nrel = gcode.GCode()\nrel.G91()\n\nbaseline = gcode.hline(X0=0, Xf=100, Y=0, n_points=2)\nline = rel+gcode.Line(baseline, power=200, feed=200)\nline", "_____no_output_____" ], [ "import grbl\n\ncnc = grbl.Grbl(\"/dev/ttyUSB0\")", "_____no_output_____" ], [ "def init_machine(**kwargs):\n prog = gcode.GCode(**kwargs)\n prog.G92(X=0, Y=0)\n prog.G90()\n prog.G21()\n return prog", "_____no_output_____" ], [ "# Draw a relative vertical line.\nrel = gcode.GCode()\nrel.G91()\nline_pts = gcode.vline(X=0, Y0=0, Yf=100, n_points=2)\nline = rel+gcode.Line(line_pts, power=200, feed=200)\nline\n\nrow_spacing=2\nstep_cal=init_machine(machine=cnc)\nfor row, steps_mm in enumerate(np.arange(80, 81, 0.025)): \n step_cal.G90()\n step_cal.G0(X=0, Y=np.round(row*row_spacing, 4))\n step_cal.buffer.append(f\"$100={steps_mm}\")\n step_cal.buffer.append(f\"$101={steps_mm}\")\n step_cal+=line\nprint(step_cal)\nstep_cal", "G92X0.0Y0.0\nG90\nG21\nG90\nG0X0.0Y0.0\n$100=80.0\n$101=80.0\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y2.0\n$100=80.025\n$101=80.025\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y4.0\n$100=80.05000000000001\n$101=80.05000000000001\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y6.0\n$100=80.07500000000002\n$101=80.07500000000002\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y8.0\n$100=80.10000000000002\n$101=80.10000000000002\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y10.0\n$100=80.12500000000003\n$101=80.12500000000003\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y12.0\n$100=80.15000000000003\n$101=80.15000000000003\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y14.0\n$100=80.17500000000004\n$101=80.17500000000004\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y16.0\n$100=80.20000000000005\n$101=80.20000000000005\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y18.0\n$100=80.22500000000005\n$101=80.22500000000005\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y20.0\n$100=80.25000000000006\n$101=80.25000000000006\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y22.0\n$100=80.27500000000006\n$101=80.27500000000006\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y24.0\n$100=80.30000000000007\n$101=80.30000000000007\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y26.0\n$100=80.32500000000007\n$101=80.32500000000007\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y28.0\n$100=80.35000000000008\n$101=80.35000000000008\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y30.0\n$100=80.37500000000009\n$101=80.37500000000009\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y32.0\n$100=80.40000000000009\n$101=80.40000000000009\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y34.0\n$100=80.4250000000001\n$101=80.4250000000001\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y36.0\n$100=80.4500000000001\n$101=80.4500000000001\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y38.0\n$100=80.47500000000011\n$101=80.47500000000011\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y40.0\n$100=80.50000000000011\n$101=80.50000000000011\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y42.0\n$100=80.52500000000012\n$101=80.52500000000012\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y44.0\n$100=80.55000000000013\n$101=80.55000000000013\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y46.0\n$100=80.57500000000013\n$101=80.57500000000013\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y48.0\n$100=80.60000000000014\n$101=80.60000000000014\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y50.0\n$100=80.62500000000014\n$101=80.62500000000014\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y52.0\n$100=80.65000000000015\n$101=80.65000000000015\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y54.0\n$100=80.67500000000015\n$101=80.67500000000015\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y56.0\n$100=80.70000000000016\n$101=80.70000000000016\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y58.0\n$100=80.72500000000016\n$101=80.72500000000016\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y60.0\n$100=80.75000000000017\n$101=80.75000000000017\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y62.0\n$100=80.77500000000018\n$101=80.77500000000018\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y64.0\n$100=80.80000000000018\n$101=80.80000000000018\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y66.0\n$100=80.82500000000019\n$101=80.82500000000019\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y68.0\n$100=80.8500000000002\n$101=80.8500000000002\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y70.0\n$100=80.8750000000002\n$101=80.8750000000002\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y72.0\n$100=80.9000000000002\n$101=80.9000000000002\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y74.0\n$100=80.92500000000021\n$101=80.92500000000021\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y76.0\n$100=80.95000000000022\n$101=80.95000000000022\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\nG90\nG0X0.0Y78.0\n$100=80.97500000000022\n$101=80.97500000000022\nG91\nG0X0.0Y0.0\nM4S200.0\nG1X0.0Y100.0F200.0\nM5\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
4aa4dced2b45c03a80711936b6f3b3050837435a
2,241
ipynb
Jupyter Notebook
dense_correspondence/dataset/simple_datasets_test.ipynb
Rubikplayer/pytorch-dense-correspondence
c5fc049892f126f3cd605d9d2bb5d031a4ad086d
[ "BSD-3-Clause" ]
1
2021-04-02T03:12:54.000Z
2021-04-02T03:12:54.000Z
dense_correspondence/dataset/simple_datasets_test.ipynb
purplearrow/pytorch-dense-correspondence
584c934576342cf97d2f45777192f98118d2dfa7
[ "BSD-3-Clause" ]
null
null
null
dense_correspondence/dataset/simple_datasets_test.ipynb
purplearrow/pytorch-dense-correspondence
584c934576342cf97d2f45777192f98118d2dfa7
[ "BSD-3-Clause" ]
null
null
null
29.103896
122
0.628291
[ [ [ "%%javascript\nIPython.OutputArea.auto_scroll_threshold = 9999;", "_____no_output_____" ], [ "from spartan_dataset_masked import SpartanDataset\nimport dense_correspondence_manipulation.utils.utils as utils\n\nutils.add_dense_correspondence_to_python_path()\nimport dense_correspondence.correspondence_tools.correspondence_finder as correspondence_finder\nimport dense_correspondence.correspondence_tools.correspondence_plotter as correspondence_plotter\nfrom dense_correspondence.dataset.dense_correspondence_dataset_masked import ImageType\n\nimport os\nimport torch\nimport numpy as np\n%matplotlib inline\n\ndataset_config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'dense_correspondence',\n 'dataset', 'composite',\n 'caterpillar_only_9.yaml')\n\ndataset_config = utils.getDictFromYamlFilename(dataset_config_filename)\n\ndataset = SpartanDataset(debug=True, config=dataset_config)", "_____no_output_____" ], [ "match_type, image_a_rgb, image_b_rgb, \\\nmatches_a, matches_b, masked_non_matches_a, \\\nmasked_non_matches_a, non_masked_non_matches_a, \\\nnon_masked_non_matches_b, blind_non_matches_a, \\\nblind_non_matches_b, metadata = dataset.get_single_object_within_scene_data()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code" ] ]
4aa4e776a27871e17eff6329f0be0c7b652bf621
23,737
ipynb
Jupyter Notebook
docs/min_diff/guide/min_diff_data_preparation.ipynb
sarvex/model-remediation
c0592dccfa4706577fe94786cf3d59d4b80cd825
[ "Apache-2.0" ]
27
2020-10-23T18:00:06.000Z
2022-03-28T02:50:47.000Z
docs/min_diff/guide/min_diff_data_preparation.ipynb
Saiprasad16/model-remediation
bd1d63354ca2d145af801705e7c6a069306e9f43
[ "Apache-2.0" ]
12
2020-11-13T20:51:30.000Z
2022-03-30T22:58:18.000Z
docs/min_diff/guide/min_diff_data_preparation.ipynb
Saiprasad16/model-remediation
bd1d63354ca2d145af801705e7c6a069306e9f43
[ "Apache-2.0" ]
14
2020-11-16T12:13:35.000Z
2022-03-27T09:14:28.000Z
42.3875
651
0.611661
[ [ [ "##### Copyright 2020 The TensorFlow Authors.", "_____no_output_____" ] ], [ [ "#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.", "_____no_output_____" ] ], [ [ "# MinDiff Data Preparation\n\n<div class=\"devsite-table-wrapper\"><table class=\"tfo-notebook-buttons\" align=\"left\">\n <td><a target=\"_blank\" href=\"https://tensorflow.org/responsible_ai/model_remediation/min_diff/guide/min_diff_data_preparation.ipynb\">\n <img src=\"https://www.tensorflow.org/images/tf_logo_32px.png\" />View on TensorFlow.org</a>\n</td>\n<td>\n <a target=\"_blank\" href=\"https://colab.research.google.com/github/tensorflow/model-remediation/blob/master/docs/min_diff/guide/min_diff_data_preparation.ipynb\">\n <img src=\"https://www.tensorflow.org/images/colab_logo_32px.png\">Run in Google Colab</a>\n</td>\n<td>\n <a target=\"_blank\" href=\"https://github.com/tensorflow/model-remediation/blob/master/docs/min_diff/guide/min_diff_data_preparation.ipynb\">\n <img width=32px src=\"https://www.tensorflow.org/images/GitHub-Mark-32px.png\">View source on GitHub</a>\n</td>\n<td>\n <a target=\"_blank\" href=\"https://storage.googleapis.com/tensorflow_docs/model-remediation/docs/min_diff/guide/min_diff_data_preparation.ipynb\"><img src=\"https://www.tensorflow.org/images/download_logo_32px.png\" />Download notebook</a>\n</td>\n</table></div>", "_____no_output_____" ], [ "##Introduction\n\nWhen implementing MinDiff, you will need to make complex decisions as you choose and shape your input before passing it on to the model. These decisions will largely determine the behavior of MinDiff within your model.\n\nThis guide will cover the technical aspects of this process, but will not discuss how to evaluate a model for fairness, or how to identify particular slices and metrics for evaluation. Please see the [Fairness Indicators guidance](https://www.tensorflow.org/responsible_ai/fairness_indicators/guide/guidance) for details on this.\n\nTo demonstrate MinDiff, this guide uses the [UCI income dataset](https://archive.ics.uci.edu/ml/datasets/census+income). The model task is to predict whether an individual has an income exceeding $50k, based on various personal attributes. This guide assumes there is a problematic gap in the FNR (false negative rate) between `\"Male\"` and `\"Female\"` slices and the model owner (you) has decided to apply MinDiff to address the issue. For more information on the scenarios in which one might choose to apply MinDiff, see the [requirements page](https://www.tensorflow.org/responsible_ai/model_remediation/min_diff/guide/requirements).\n\nNote: We recognize the limitations of the categories used in the original dataset, and acknowledge that these terms do not encompass the full range of vocabulary used in describing gender. Further, we acknowledge that this task doesn’t represent a real-world use case, and is used only to demonstrate the technical details of the MinDiff library.\n\nMinDiff works by penalizing the difference in distribution scores between examples in two sets of data. This guide will demonstrate how to choose and construct these additional MinDiff sets as well as how to package everything together so that it can be passed to a model for training.\n", "_____no_output_____" ], [ "##Setup", "_____no_output_____" ] ], [ [ "!pip install --upgrade tensorflow-model-remediation", "_____no_output_____" ], [ "import tensorflow as tf\nfrom tensorflow_model_remediation import min_diff\nfrom tensorflow_model_remediation.tools.tutorials_utils import uci as tutorials_utils", "_____no_output_____" ] ], [ [ "## Original Data\n\nFor demonstration purposes and to reduce runtimes, this guide uses only a sample fraction of the UCI Income dataset. In a real production setting, the full dataset would be utilized.", "_____no_output_____" ] ], [ [ "# Sampled at 0.3 for reduced runtimes.\ntrain = tutorials_utils.get_uci_data(split='train', sample=0.3)\n\nprint(len(train), 'train examples')", "_____no_output_____" ] ], [ [ "### Converting to `tf.data.Dataset`\n\n`MinDiffModel` requires that the input be a `tf.data.Dataset`. If you were using a different format of input prior to integrating MinDiff, you will have to convert your input data.\n\nUse `tf.data.Dataset.from_tensor_slices` to convert to `tf.data.Dataset`.\n\n```\ndataset = tf.data.Dataset.from_tensor_slices((x, y, weights))\ndataset.shuffle(...) # Optional.\ndataset.batch(batch_size)\n```\n\nSee [`Model.fit`](https://www.tensorflow.org/api_docs/python/tf/keras/Model#fit) documentation for details on equivalences between the two methods of input.\n\nIn this guide, the input is downloaded as a Pandas DataFrame and therefore, needs this conversion.", "_____no_output_____" ] ], [ [ "# Function to convert a DataFrame into a tf.data.Dataset.\ndef df_to_dataset(dataframe, shuffle=True):\n dataframe = dataframe.copy()\n labels = dataframe.pop('target')\n ds = tf.data.Dataset.from_tensor_slices((dict(dataframe), labels))\n if shuffle:\n ds = ds.shuffle(buffer_size=5000) # Reasonable but arbitrary buffer_size.\n return ds\n\n# Convert the train DataFrame into a Dataset.\noriginal_train_ds = df_to_dataset(train)", "_____no_output_____" ] ], [ [ "Note: The training dataset has not been batched yet but it will be later.", "_____no_output_____" ], [ "## Creating MinDiff data\n\nDuring training, MinDiff will encourage the model to reduce differences in predictions between two additional datasets (which may include examples from the original dataset). The selection of these two datasets is the key decision which will determine the effect MinDiff has on the model.\n\nThe two datasets should be picked such that the disparity in performance that you are trying to remediate is evident and well-represented. Since the goal is to reduce a gap in FNR between `\"Male\"` and `\"Female\"` slices, this means creating one dataset with only _positively_ labeled `\"Male\"` examples and another with only _positively_ labeled `\"Female\"` examples; these will be the MinDiff datasets.\n\nNote: The choice of using only _positively_ labeled examples is directly tied to the target metric. This guide is concerned with _false negatives_ which, by definition, are _positively_ labeled examples that were incorrectly classified.\n", "_____no_output_____" ], [ "First, examine the data present.", "_____no_output_____" ] ], [ [ "female_pos = train[(train['sex'] == ' Female') & (train['target'] == 1)]\nmale_pos = train[(train['sex'] == ' Male') & (train['target'] == 1)]\nprint(len(female_pos), 'positively labeled female examples')\nprint(len(male_pos), 'positively labeled male examples')", "_____no_output_____" ] ], [ [ "It is perfectly acceptable to create MinDiff datasets from subsets of the original dataset.\n\nWhile there aren't 5,000 or more positive `\"Male\"` examples as recommended in the [requirements guidance](https://www.tensorflow.org/responsible_ai/model_remediation/min_diff/guide/requirements#how_much_data_do_i_need), there are over 2,000 and it is reasonable to try with that many before collecting more data.", "_____no_output_____" ] ], [ [ "min_diff_male_ds = df_to_dataset(male_pos)", "_____no_output_____" ] ], [ [ "Positive `\"Female\"` examples, however, are much scarcer at 385. This is probably too small for good performance and so will require pulling in additional examples.\n\nNote: Since this guide began by reducing the dataset via sampling, this problem (and the corresponding solution) may seem contrived. However, it serves as a good example of how to approach concerns about the size of your MinDiff datasets.", "_____no_output_____" ] ], [ [ "full_uci_train = tutorials_utils.get_uci_data(split='train')\naugmented_female_pos = full_uci_train[((full_uci_train['sex'] == ' Female') &\n (full_uci_train['target'] == 1))]\nprint(len(augmented_female_pos), 'positively labeled female examples')", "_____no_output_____" ] ], [ [ "Using the full dataset has more than tripled the number of examples that can be used for MinDiff. It’s still low but it is enough to try as a first pass.", "_____no_output_____" ] ], [ [ "min_diff_female_ds = df_to_dataset(augmented_female_pos)", "_____no_output_____" ] ], [ [ "Both the MinDiff datasets are significantly smaller than the recommended 5,000 or more examples. While it is reasonable to attempt to apply MinDiff with the current data, you may need to consider collecting additional data if you observe poor performance or overfitting during training.", "_____no_output_____" ], [ "### Using `tf.data.Dataset.filter`\n\nAlternatively, you can create the two MinDiff datasets directly from the converted original `Dataset`.\n\nNote: When using `.filter` it is recommended to use `.cache()` if the dataset can easily fit in memory for runtime performance. If it is too large to do so, consider storing your filtered datasets in your file system and reading them in.", "_____no_output_____" ] ], [ [ "# Male\ndef male_predicate(x, y):\n return tf.equal(x['sex'], b' Male') and tf.equal(y, 0)\n\nalternate_min_diff_male_ds = original_train_ds.filter(male_predicate).cache()\n\n# Female\ndef female_predicate(x, y):\n return tf.equal(x['sex'], b' Female') and tf.equal(y, 0)\n\nfull_uci_train_ds = df_to_dataset(full_uci_train)\nalternate_min_diff_female_ds = full_uci_train_ds.filter(female_predicate).cache()", "_____no_output_____" ] ], [ [ "The resulting `alternate_min_diff_male_ds` and `alternate_min_diff_female_ds` will be equivalent in output to `min_diff_male_ds` and `min_diff_female_ds` respectively.", "_____no_output_____" ], [ "## Constructing your Training Dataset\n\nAs a final step, the three datasets (the two newly created ones and the original) need to be merged into a single dataset that can be passed to the model.", "_____no_output_____" ], [ "### Batching the datasets\n\nBefore merging, the datasets need to batched.\n\n* The original dataset can use the same batching that was used before integrating MinDiff.\n* The MinDiff datasets do not need to have the same batch size as the original dataset. In all likelihood, a smaller one will perform just as well. While they don't even need to have the same batch size as each other, it is recommended to do so for best performance.\n\nWhile not strictly necessary, it is recommended to use `drop_remainder=True` for the two MinDiff datasets as this will ensure that they have consistent batch sizes.\n\n\nWarning: The 3 datasets must be batched **before** they are merged together. Failing to do so will likely result in unintended input shapes that will cause errors downstream.", "_____no_output_____" ] ], [ [ "original_train_ds = original_train_ds.batch(128) # Same as before MinDiff.\n\n# The MinDiff datasets can have a different batch_size from original_train_ds\nmin_diff_female_ds = min_diff_female_ds.batch(32, drop_remainder=True)\n# Ideally we use the same batch size for both MinDiff datasets.\nmin_diff_male_ds = min_diff_male_ds.batch(32, drop_remainder=True)", "_____no_output_____" ] ], [ [ "### Packing the Datasets with `pack_min_diff_data`\n\nOnce the datasets are prepared, pack them into a single dataset which will then be passed along to the model. A single batch from the resulting dataset will contain one batch from each of the three datasets you prepared previously.\n\nYou can do this by using the provided `utils` function in the `tensorflow_model_remediation` package:\n", "_____no_output_____" ] ], [ [ "train_with_min_diff_ds = min_diff.keras.utils.pack_min_diff_data(\n original_dataset=original_train_ds,\n sensitive_group_dataset=min_diff_female_ds,\n nonsensitive_group_dataset=min_diff_male_ds)", "_____no_output_____" ] ], [ [ "And that's it! You will be able to use other `util` functions in the package to unpack individual batches if needed.", "_____no_output_____" ] ], [ [ "for inputs, original_labels in train_with_min_diff_ds.take(1):\n # Unpacking min_diff_data\n min_diff_data = min_diff.keras.utils.unpack_min_diff_data(inputs)\n min_diff_examples, min_diff_membership = min_diff_data\n # Unpacking original data\n original_inputs = min_diff.keras.utils.unpack_original_inputs(inputs)", "_____no_output_____" ] ], [ [ "With your newly formed data, you are now ready to apply MinDiff in your model! To learn how this is done, please take a look at the other guides starting with [Integrating MinDiff with MinDiffModel](./integrating_min_diff_with_min_diff_model).", "_____no_output_____" ], [ "### Using a Custom Packing Format (optional)\n\nYou may decide to pack the three datasets together in whatever way you choose. The only requirement is that you will need to ensure the model knows how to interpret the data. The default implementation of `MinDiffModel` assumes that the data was packed using `min_diff.keras.utils.pack_min_diff_data`.\n\nOne easy way to format your input as you want is to transform the data as a final step after you have used `min_diff.keras.utils.pack_min_diff_data`.", "_____no_output_____" ] ], [ [ "# Reformat input to be a dict.\ndef _reformat_input(inputs, original_labels):\n unpacked_min_diff_data = min_diff.keras.utils.unpack_min_diff_data(inputs)\n unpacked_original_inputs = min_diff.keras.utils.unpack_original_inputs(inputs)\n\n return {\n 'min_diff_data': unpacked_min_diff_data,\n 'original_data': (unpacked_original_inputs, original_labels)}\n\ncustomized_train_with_min_diff_ds = train_with_min_diff_ds.map(_reformat_input)", "_____no_output_____" ] ], [ [ "Your model will need to know how to read this customized input as detailed in the [Customizing MinDiffModel guide](./customizing_min_diff_model#customizing_default_behaviors_of_mindiffmodel).", "_____no_output_____" ] ], [ [ "for batch in customized_train_with_min_diff_ds.take(1):\n # Customized unpacking of min_diff_data\n min_diff_data = batch['min_diff_data']\n # Customized unpacking of original_data\n original_data = batch['original_data']", "_____no_output_____" ] ], [ [ "## Additional Resources\n\n* For an in depth discussion on fairness evaluation see the [Fairness Indicators guidance](https://www.tensorflow.org/responsible_ai/fairness_indicators/guide/guidance)\n* For general information on Remediation and MinDiff, see the [remediation overview](https://www.tensorflow.org/responsible_ai/model_remediation).\n* For details on requirements surrounding MinDiff see [this guide](https://www.tensorflow.org/responsible_ai/model_remediation/min_diff/guide/requirements).\n* To see an end-to-end tutorial on using MinDiff in Keras, see [this tutorial](https://www.tensorflow.org/responsible_ai/model_remediation/min_diff/tutorials/min_diff_keras).", "_____no_output_____" ], [ "## Utility Functions for other Guides\n\nThis guide outlines the process and decision making that you can follow whenever applying MinDiff. The rest of the guides build off this framework. To make this easier, logic found in this guide has been factored out into helper functions:\n\n* `get_uci_data`: This function is already used in this guide. It returns a `DataFrame` containing the UCI income data from the indicated split sampled at whatever rate is indicated (100% if unspecified).\n* `df_to_dataset`: This function converts a `DataFrame` into a `tf.data.Dataset` as detailed in this guide with the added functionality of being able to pass the batch_size as a parameter.\n* `get_uci_with_min_diff_dataset`: This function returns a `tf.data.Dataset` containing both the original data and the MinDiff data packed together using the Model Remediation Library util functions as described in this guide.\n\nWarning: These utility functions are **not** part of the official `tensorflow-model-remediation` package API and are subject to change at any time.\n\nThe rest of the guides will build off of these to show how to use other parts of the library.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ] ]
4aa4f7d0ec40388a5026a5f567db6234e3935127
740,009
ipynb
Jupyter Notebook
Lucid - library experiments/inceptionV3_experiments_mixed6d.ipynb
hegman12/Deep-learnig-with-tensorflow
3b50099674304242d8a20a861b2734bd5176986d
[ "Apache-2.0" ]
null
null
null
Lucid - library experiments/inceptionV3_experiments_mixed6d.ipynb
hegman12/Deep-learnig-with-tensorflow
3b50099674304242d8a20a861b2734bd5176986d
[ "Apache-2.0" ]
null
null
null
Lucid - library experiments/inceptionV3_experiments_mixed6d.ipynb
hegman12/Deep-learnig-with-tensorflow
3b50099674304242d8a20a861b2734bd5176986d
[ "Apache-2.0" ]
null
null
null
1,633.573951
61,557
0.959784
[ [ [ "import numpy as np\nimport tensorflow as tf\nimport lucid.modelzoo.vision_models as models\nfrom lucid.misc.io import show\nimport lucid.optvis.objectives as objectives\nimport lucid.optvis.param as param\nimport lucid.optvis.render as render\nimport lucid.optvis.transform as transform", "C:\\anaconda3\\lib\\site-packages\\h5py\\__init__.py:34: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n from ._conv import register_converters as _register_converters\n" ], [ "v3=models.inceptionV3()\nv3.load_graphdef()", "_____no_output_____" ], [ "l2=lambda layer: objectives.L2(layer=layer)\nl1=lambda layer: objectives.L1(layer=layer)\nsp=lambda layer,w: objectives.sigmoid_prob(layer=layer,w=w)\ntv=lambda layer: objectives.total_variation(layer=layer)\ndd=lambda layer: objectives.deepdream(layer=layer)\nlg=lambda layer,l: objectives.class_logit(layer=layer,label=l)\nchannel=lambda layer,n: objectives.channel(layer=layer,n_channel=n)\nchannel_sig=lambda layer,n: objectives.channel_sigmoid(layer=layer,n_channel=n)\nchannel_sig_neg=lambda layer,n: objectives.channel_sigmoid(layer=layer,n_channel=n)", "_____no_output_____" ], [ "LAYER='InceptionV3/InceptionV3/Mixed_6d/concat_v2'", "_____no_output_____" ], [ "obj=channel(LAYER,0)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 9.670146\n" ], [ "obj=channel_sig(LAYER,0)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 9.482896\n" ], [ "obj=channel_sig_neg(LAYER,0)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 7.853008\n" ], [ "obj=channel(LAYER,1)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 10.054392\n" ], [ "obj=channel_sig(LAYER,1)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 10.746293\n" ], [ "obj=channel_sig_neg(LAYER,1)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 10.226729\n" ], [ "obj=channel(LAYER,2)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 10.085575\n" ], [ "obj=channel_sig(LAYER,2)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 10.713465\n" ], [ "obj=channel_sig_neg(LAYER,2)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 8.558062\n" ], [ "obj=channel(LAYER,3)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 8.361439\n" ], [ "obj=channel_sig(LAYER,3)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 9.814279\n" ], [ "obj=channel_sig_neg(LAYER,3)\n_=render.render_vis(model=v3,objective_f=obj,thresholds=(1024,))", "1024 8.631662\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa5018aad94bfc7315280f78ce00cded0a54a1c
52,256
ipynb
Jupyter Notebook
lijin-THU:notes-python/08-object-oriented-programming/08.10-refactoring-the-forest-fire-simutation.ipynb
Maecenas/python-getting-started
2739444e0f4aa692123dcd0c1b9a44218281f9b6
[ "MIT" ]
null
null
null
lijin-THU:notes-python/08-object-oriented-programming/08.10-refactoring-the-forest-fire-simutation.ipynb
Maecenas/python-getting-started
2739444e0f4aa692123dcd0c1b9a44218281f9b6
[ "MIT" ]
null
null
null
lijin-THU:notes-python/08-object-oriented-programming/08.10-refactoring-the-forest-fire-simutation.ipynb
Maecenas/python-getting-started
2739444e0f4aa692123dcd0c1b9a44218281f9b6
[ "MIT" ]
null
null
null
223.316239
45,612
0.891515
[ [ [ "# 重定义森林火灾模拟", "_____no_output_____" ], [ "在前面的例子中,我们定义了一个 `BurnableForest`,实现了一个循序渐进的生长和燃烧过程。\n\n假设我们现在想要定义一个立即燃烧的过程(每次着火之后燃烧到不能燃烧为止,之后再生长,而不是每次只燃烧周围的一圈树木),由于燃烧过程不同,我们需要从 `BurnableForest` 中派生出两个新的子类 `SlowBurnForest`(原来的燃烧过程) 和 `InsantBurnForest`,为此\n\n- 将 `BurnableForest` 中的 `burn_trees()` 方法改写,不做任何操作,直接 `pass`(因为在 `advance_one_step()` 中调用了它,所以不能直接去掉)\n- 在两个子类中定义新的 `burn_trees()` 方法。", "_____no_output_____" ] ], [ [ "import numpy as np\nfrom scipy.ndimage.measurements import label\n\nclass Forest(object):\n \"\"\" Forest can grow trees which eventually die.\"\"\"\n def __init__(self, size=(150,150), p_sapling=0.0025):\n self.size = size\n self.trees = np.zeros(self.size, dtype=bool)\n self.p_sapling = p_sapling\n \n def __repr__(self):\n my_repr = \"{}(size={})\".format(self.__class__.__name__, self.size)\n return my_repr\n \n def __str__(self):\n return self.__class__.__name__\n \n @property\n def num_cells(self):\n \"\"\"Number of cells available for growing trees\"\"\"\n return np.prod(self.size)\n \n @property\n def tree_fraction(self):\n \"\"\"\n Fraction of trees\n \"\"\"\n num_trees = self.trees.sum()\n return float(num_trees) / self.num_cells\n \n def _rand_bool(self, p):\n \"\"\"\n Random boolean distributed according to p, less than p will be True\n \"\"\"\n return np.random.uniform(size=self.trees.shape) < p\n \n def grow_trees(self):\n \"\"\"\n Growing trees.\n \"\"\"\n growth_sites = self._rand_bool(self.p_sapling)\n self.trees[growth_sites] = True \n \n def advance_one_step(self):\n \"\"\"\n Advance one step\n \"\"\"\n self.grow_trees()\n\nclass BurnableForest(Forest):\n \"\"\"\n Burnable forest support fires\n \"\"\" \n def __init__(self, p_lightning=5.0e-6, **kwargs):\n super(BurnableForest, self).__init__(**kwargs)\n self.p_lightning = p_lightning \n self.fires = np.zeros((self.size), dtype=bool)\n \n def advance_one_step(self):\n \"\"\"\n Advance one step\n \"\"\"\n super(BurnableForest, self).advance_one_step()\n self.start_fires()\n self.burn_trees()\n \n @property\n def fire_fraction(self):\n \"\"\"\n Fraction of fires\n \"\"\"\n num_fires = self.fires.sum()\n return float(num_fires) / self.num_cells\n \n def start_fires(self):\n \"\"\"\n Start of fire.\n \"\"\"\n lightning_strikes = (self._rand_bool(self.p_lightning) & \n self.trees)\n self.fires[lightning_strikes] = True\n \n def burn_trees(self): \n pass\n \nclass SlowBurnForest(BurnableForest):\n def burn_trees(self):\n \"\"\"\n Burn trees.\n \"\"\"\n fires = np.zeros((self.size[0] + 2, self.size[1] + 2), dtype=bool)\n fires[1:-1, 1:-1] = self.fires\n north = fires[:-2, 1:-1]\n south = fires[2:, 1:-1]\n east = fires[1:-1, :-2]\n west = fires[1:-1, 2:]\n new_fires = (north | south | east | west) & self.trees\n self.trees[self.fires] = False\n self.fires = new_fires\n\nclass InstantBurnForest(BurnableForest):\n def burn_trees(self):\n # 起火点\n strikes = self.fires\n # 找到连通区域\n groves, num_groves = label(self.trees)\n fires = set(groves[strikes])\n self.fires.fill(False)\n # 将与着火点相连的区域都烧掉\n for fire in fires:\n self.fires[groves == fire] = True\n self.trees[self.fires] = False\n self.fires.fill(False)", "_____no_output_____" ] ], [ [ "测试:", "_____no_output_____" ] ], [ [ "forest = Forest()\nsb_forest = SlowBurnForest()\nib_forest = InstantBurnForest()\n\nforests = [forest, sb_forest, ib_forest]\n\ntree_history = []\n\nfor i in xrange(1500):\n for fst in forests:\n fst.advance_one_step()\n tree_history.append(tuple(fst.tree_fraction for fst in forests))", "_____no_output_____" ] ], [ [ "显示结果:", "_____no_output_____" ] ], [ [ "import matplotlib.pyplot as plt\n%matplotlib inline\n\nplt.figure(figsize=(10,6))\n\nplt.plot(tree_history)\nplt.legend([f.__str__() for f in forests])\n\nplt.show()", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4aa51259ae0de451ed32f8414fa78ea3e0ad32f2
107,435
ipynb
Jupyter Notebook
vime/notebooks/.ipynb_checkpoints/VIME Training-checkpoint.ipynb
aruberts/blogs
17ed1a155692e91ae876bb76385aafcc1c64e272
[ "Apache-2.0" ]
2
2022-01-15T20:37:40.000Z
2022-01-26T02:13:44.000Z
vime/notebooks/.ipynb_checkpoints/VIME Training-checkpoint.ipynb
aruberts/blogs
17ed1a155692e91ae876bb76385aafcc1c64e272
[ "Apache-2.0" ]
null
null
null
vime/notebooks/.ipynb_checkpoints/VIME Training-checkpoint.ipynb
aruberts/blogs
17ed1a155692e91ae876bb76385aafcc1c64e272
[ "Apache-2.0" ]
3
2022-01-26T02:09:48.000Z
2022-03-29T03:14:43.000Z
102.02754
46,820
0.841541
[ [ [ "# VIME: Self/Semi Supervised Learning for Tabular Data", "_____no_output_____" ], [ "# Setup", "_____no_output_____" ] ], [ [ "import matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport seaborn as sns\nimport tensorflow as tf\nimport umap\nfrom sklearn.metrics import (average_precision_score, mean_squared_error,\n roc_auc_score)\nfrom sklearn.model_selection import train_test_split\nfrom tqdm import tqdm\nfrom vime import VIME, VIME_Self\nfrom vime_data import (\n labelled_loss_fn, mask_generator_tf,\n pretext_generator_tf, semi_supervised_generator,\n to_vime_dataset, unlabelled_loss_fn\n)", "_____no_output_____" ], [ "%matplotlib inline\n%load_ext autoreload\n%autoreload 2\n\nplt.rcParams[\"figure.figsize\"] = (20,10)", "_____no_output_____" ] ], [ [ "# Data", "_____no_output_____" ], [ "The example data is taken from [Kaggle](https://www.kaggle.com/c/ieee-fraud-detection) but it's already pre-processed and ready to be used. You can checkout the pre-processing notebook in the same folder to get some understanding about what transformations were done to the features.", "_____no_output_____" ] ], [ [ "train = pd.read_csv(\"fraud_train_preprocessed.csv\")\ntest = pd.read_csv(\"fraud_test_preprocessed.csv\")", "_____no_output_____" ], [ "# Drop nan columns as they are not useful for reconstruction error\nnan_columns = [f for f in train.columns if 'nan' in f]\ntrain = train.drop(nan_columns, axis=1)\ntest = test.drop(nan_columns, axis=1)", "_____no_output_____" ], [ "# Also, using only numerical columns because NNs have issue with one-hot encoding \nnum_cols = train.columns[:-125]", "_____no_output_____" ], [ "# Validation size is 10%\nval_size = int(train.shape[0] * 0.1)\nX_train = train.iloc[:-val_size, :]\nX_val = train.iloc[-val_size:, :]\n\n# Labelled 1% of data, everything else is unlabelled\nX_train_labelled = train.sample(frac=0.01)\ny_train_labelled = X_train_labelled.pop('isFraud')\n\nX_val_labelled = X_val.sample(frac=0.01)\ny_val_labelled = X_val_labelled.pop('isFraud')\n\nX_train_unlabelled = X_train.loc[~X_train.index.isin(X_train_labelled.index), :].drop('isFraud', axis=1)\nX_val_unlabelled = X_val.loc[~X_val.index.isin(X_val_labelled.index), :].drop('isFraud', axis=1)", "_____no_output_____" ], [ "X_train_labelled = X_train_labelled[num_cols]\nX_val_labelled = X_val_labelled[num_cols]\n\nX_train_unlabelled = X_train_unlabelled[num_cols]\nX_val_unlabelled = X_val_unlabelled[num_cols]", "_____no_output_____" ], [ "X_val_labelled.shape, X_train_labelled.shape", "_____no_output_____" ], [ "print(\"Labelled Fraudsters\", y_train_labelled.sum())\n\nprint(\n \"Labelled Proportion:\", \n np.round(X_train_labelled.shape[0] / (X_train_unlabelled.shape[0] + X_train_labelled.shape[0]), 5)\n)", "Labelled Fraudsters 184.0\nLabelled Proportion: 0.0111\n" ] ], [ [ "The following model will be trained with these hyperparameters:", "_____no_output_____" ] ], [ [ "vime_params = {\n 'alpha': 4, \n 'beta': 10, \n 'k': 5, \n 'p_m': 0.36\n}", "_____no_output_____" ] ], [ [ "## Self-Supervised Learning", "_____no_output_____" ], [ "### Data Prep", "_____no_output_____" ], [ "The model needs 1 input - corrupted X, and 2 outputs - mask and original X. ", "_____no_output_____" ] ], [ [ "batch_size = 1024\n\n# Datasets\ntrain_ds, train_m = to_vime_dataset(X_train_unlabelled, vime_params['p_m'], batch_size=batch_size, shuffle=True)\nval_ds, val_m = to_vime_dataset(X_val_unlabelled, vime_params['p_m'], batch_size=batch_size)\nnum_features = X_train_unlabelled.shape[1]\n\nprint('Proportion Corrupted:', np.round(train_m.numpy().mean(), 2))\n\n# Training\nvime_s = VIME_Self(num_features)\nvime_s.compile(\n optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),\n loss={\n 'mask': 'binary_crossentropy', \n 'feature': 'mean_squared_error'},\n loss_weights={'mask':1, 'feature': vime_params['alpha']}\n)\n\ncbs = [tf.keras.callbacks.EarlyStopping(\n monitor=\"val_loss\", patience=10, restore_best_weights=True\n )]\n\nvime_s.fit(\n train_ds,\n validation_data=val_ds,\n epochs=1000,\n callbacks=cbs\n)\n\nvime_s.save('./vime_self')", "_____no_output_____" ], [ "vime_s = tf.keras.models.load_model('./vime_self')", "_____no_output_____" ] ], [ [ "### Evaluation", "_____no_output_____" ], [ "All the evaluation will be done on the validation set", "_____no_output_____" ] ], [ [ "val_self_preds = vime_s.predict(val_ds)", "_____no_output_____" ] ], [ [ "To evaluate the mask reconstruction ability we can simply check the ROC AUC score for mask predictions across all the features. ", "_____no_output_____" ] ], [ [ "feature_aucs = []\nfor i in tqdm(range(X_val_unlabelled.shape[1])):\n roc = roc_auc_score(val_m.numpy()[:, i], val_self_preds['mask'][:, i])\n feature_aucs.append(roc)", "100%|██████████| 110/110 [00:02<00:00, 49.70it/s]\n" ], [ "self_metrics = pd.DataFrame({\"metric\": 'mask_auc', \n \"metric_values\": feature_aucs})", "_____no_output_____" ] ], [ [ "Now, we can evaluate the feature reconstruction ability using RMSE and correlation coefficients", "_____no_output_____" ] ], [ [ "feature_corrs = []\nfor i in tqdm(range(X_val_unlabelled.shape[1])):\n c = np.corrcoef(X_val_unlabelled.values[:, i], val_self_preds['feature'][:, i])[0, 1]\n feature_corrs.append(c)\n \nself_metrics = pd.concat([\n self_metrics,\n pd.DataFrame({\"metric\": 'feature_correlation', \n \"metric_values\": feature_corrs})\n])", "_____no_output_____" ] ], [ [ "From the plot and table above, we can see that the model has learned to reconstruct most of the features. Half of the features are reconstructed with relatively strong correlation with original data. Only a handful of features are not properly reconstructed. Let's check the RMSE across all the features", "_____no_output_____" ] ], [ [ "rmses = []\nfor i in tqdm(range(X_val_unlabelled.shape[1])):\n mse = mean_squared_error(X_val_unlabelled.values[:, i], val_self_preds['feature'][:, i])\n rmses.append(np.sqrt(mse))\n \nself_metrics = pd.concat([\n self_metrics,\n pd.DataFrame({\"metric\": 'RMSE', \n \"metric_values\": rmses})\n])", "_____no_output_____" ], [ "sns.boxplot(x=self_metrics['metric'], y=self_metrics['metric_values'])\nplt.title(\"Self-Supervised VIME Evaluation\")", "_____no_output_____" ] ], [ [ "RMSE distribution further indicates that mjority of the features are well-reconstructed.", "_____no_output_____" ], [ "Another way to evaluate the self-supervised model is to look at the embeddings. Since the whole point of corrupting the dataset is to learn to generate robust embeddings, we can assume that if a sample was corrupted 5 times, all 5 embeddings should be relatively close to each other in the vector space. Let's check this hypothesis by corrupting 10 different samples 5 times and projecting their embeddings to 2-dimensional space using UMAP.", "_____no_output_____" ] ], [ [ "from tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Input, Dense, Dropout\n\n\ndef generate_k_corrupted(x, k, p_m):\n x_u_list = []\n for i in range(k):\n mask = mask_generator_tf(p_m, x)\n _, x_corr = pretext_generator_tf(mask, tf.constant(x, dtype=tf.float32))\n x_u_list.append(x_corr)\n \n \n # Prepare input with shape (n, k, d)\n x_u_corrupted = np.zeros((x.shape[0], k, x.shape[1]))\n for i in range(x.shape[0]):\n for j in range(k):\n x_u_corrupted[i, j, :] = x_u_list[j][i, :]\n \n return x_u_corrupted", "_____no_output_____" ], [ "vime_s = tf.keras.models.load_model('./vime_self')\n\n# Sequential model to produce embeddings\nencoding_model = Sequential(\n [\n Input(num_features),\n vime_s.encoder\n ]\n)\n\ndense_model = Sequential(\n [\n Input(num_features),\n Dense(num_features, activation=\"relu\"),\n \n ]\n)\n\n# Create corrupted sample\nsamples = X_val_unlabelled.sample(10)\nsample_corrupted = generate_k_corrupted(\n x=samples,\n k=5,\n p_m=0.4\n)\n\nval_encoding = encoding_model.predict(sample_corrupted, batch_size=batch_size)\nrandom_encoding = dense_model.predict(sample_corrupted, batch_size=batch_size)", "WARNING:tensorflow:Model was constructed with shape (None, 110) for input KerasTensor(type_spec=TensorSpec(shape=(None, 110), dtype=tf.float32, name='input_41'), name='input_41', description=\"created by layer 'input_41'\"), but it was called on an input with incompatible shape (None, 5, 110).\nWARNING:tensorflow:Model was constructed with shape (None, 110) for input KerasTensor(type_spec=TensorSpec(shape=(None, 110), dtype=tf.float32, name='dense_104_input'), name='dense_104_input', description=\"created by layer 'dense_104_input'\"), but it was called on an input with incompatible shape (None, 5, 110).\nWARNING:tensorflow:Model was constructed with shape (None, 110) for input KerasTensor(type_spec=TensorSpec(shape=(None, 110), dtype=tf.float32, name='input_42'), name='input_42', description=\"created by layer 'input_42'\"), but it was called on an input with incompatible shape (None, 5, 110).\n" ], [ "fig, axs = plt.subplots(1, 2)\n\n# Project corrupted samples\nu = umap.UMAP(n_neighbors=5, min_dist=0.8)\ncorrupted_umap = u.fit_transform(val_encoding.reshape(-1, val_encoding.shape[2]))\nsample_ids = np.array([np.repeat(i, 5) for i in range(10)]).ravel()\nsns.scatterplot(corrupted_umap[:, 0], corrupted_umap[:, 1], hue=sample_ids, palette=\"tab10\", ax=axs[0])\naxs[0].set_title('VIME Embeddings of Corrupted Samples')\nplt.legend(title='Sample ID')\n\n# Project corrupted samples\nu = umap.UMAP(n_neighbors=5, min_dist=0.8)\ncorrupted_umap = u.fit_transform(random_encoding.reshape(-1, random_encoding.shape[2]))\nsample_ids = np.array([np.repeat(i, 5) for i in range(10)]).ravel()\nsns.scatterplot(corrupted_umap[:, 0], corrupted_umap[:, 1], hue=sample_ids, palette=\"tab10\", ax=axs[1])\naxs[1].set_title('Not-trained Embeddings of Corrupted Samples')\n\nplt.legend(title='Sample ID')\nplt.show()", "Pass the following variables as keyword args: x, y. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.\nNo handles with labels found to put in legend.\nPass the following variables as keyword args: x, y. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.\n" ] ], [ [ "As you can see, the embeddings indeed put the same samples closer to each other, even though some of their values were corrupted. According to the authors, this means that the model has learned useful information about the feature correlations which can be helpful in the downstream tasks. Now, we can use this encoder in the next semi-supervised part.", "_____no_output_____" ], [ "## Semi-Supervised Learning", "_____no_output_____" ] ], [ [ "semi_batch_size = 512\nnum_features = X_train_unlabelled.shape[1]", "_____no_output_____" ] ], [ [ "Since we have different number of labelled and unlabelled examples we need to use generators. They will shuffle and select appropriate number of rows for each training iteration.", "_____no_output_____" ] ], [ [ "def train_semi_generator():\n return semi_supervised_generator(\n X_train_labelled.values,\n X_train_unlabelled.values, \n y_train_labelled.values, \n bs=semi_batch_size\n )\n\ndef val_semi_generator():\n return semi_supervised_generator(\n X_val_labelled.values,\n X_val_unlabelled.values, \n y_val_labelled.values, \n bs=semi_batch_size\n )\n\nsemi_train_dataset = tf.data.Dataset.from_generator(\n train_semi_generator,\n output_signature=(\n tf.TensorSpec(shape=(semi_batch_size, num_features), dtype=tf.float32),\n tf.TensorSpec(shape=(semi_batch_size), dtype=tf.float32),\n tf.TensorSpec(shape=(semi_batch_size, num_features), dtype=tf.float32)\n )\n)\n\nsemi_val_dataset = tf.data.Dataset.from_generator(\n val_semi_generator,\n output_signature=(\n tf.TensorSpec(shape=(semi_batch_size, num_features), dtype=tf.float32),\n tf.TensorSpec(shape=(semi_batch_size), dtype=tf.float32),\n tf.TensorSpec(shape=(semi_batch_size, num_features), dtype=tf.float32)\n )\n)", "_____no_output_____" ] ], [ [ "## Self Supervised VIME", "_____no_output_____" ] ], [ [ "def train_vime_semi(encoder, train_dataset, val_dataset, train_params, vime_params):\n # Model\n vime = VIME(encoder)\n \n # Training parameters\n iterations = train_params['iterations']\n optimizer = tf.keras.optimizers.Adam(train_params['learning_rate'])\n early_stop = train_params['early_stop']\n \n # Set metrics to track\n best_loss = 1e10\n no_improve = 0\n \n # Begining training loop\n for it in range(iterations):\n # Grab a batch for iteration\n it_train = iter(train_dataset)\n X_l, y_l, X_u = next(it_train)\n \n # Generate unlabelled batch with k corrupted examples per sample\n X_u_corrupted = generate_k_corrupted(X_u, vime_params['k'], vime_params['p_m'])\n \n with tf.GradientTape() as tape:\n # Predict labelled & unlabelled\n labelled_preds = vime(X_l)\n unlabelled_preds = vime(X_u_corrupted)\n\n # Calculate losses\n labelled_loss = labelled_loss_fn(y_l, labelled_preds)\n unlabelled_loss = unlabelled_loss_fn(unlabelled_preds)\n\n # Total loss\n semi_supervised_loss = unlabelled_loss + vime_params['beta'] * labelled_loss\n \n if it % 10 == 0:\n val_iter_losses = []\n print(f\"\\nMetrics for Iteration {it}\")\n for i in range(5):\n # Grab a batch\n it_val = iter(val_dataset)\n X_l_val, y_l_val, X_u_val = next(it_val)\n\n # Generate unlabelled batch with k corrupted examples per sample\n X_u_corrupted = generate_k_corrupted(X_u_val, vime_params['k'], vime_params['p_m'])\n\n # Predict labelled & unlabelled\n labelled_preds_val = vime(X_l_val)\n unlabelled_preds_val = vime(X_u_corrupted)\n\n # Calculate losses\n labelled_loss_val = labelled_loss_fn(y_l_val, labelled_preds_val)\n unlabelled_loss_val = unlabelled_loss_fn(unlabelled_preds_val)\n semi_supervised_loss_val = unlabelled_loss_val + vime_params['beta'] * labelled_loss_val\n val_iter_losses.append(semi_supervised_loss_val)\n \n # Average loss over 5 validation iterations\n semi_supervised_loss_val = np.mean(val_iter_losses)\n\n print(f\"Train Loss {np.round(semi_supervised_loss, 5)}, Val Loss {np.round(semi_supervised_loss_val, 5)}\")\n # Update metrics if val_loss is better\n if semi_supervised_loss_val < best_loss:\n best_loss = semi_supervised_loss_val\n no_improve = 0\n vime.save('./vime')\n else:\n no_improve += 1\n print(f\"Validation loss not improved {no_improve} times\")\n\n # Early stopping\n if no_improve == early_stop:\n break\n \n # Update weights\n grads = tape.gradient(semi_supervised_loss, vime.trainable_weights)\n optimizer.apply_gradients(zip(grads, vime.trainable_weights))\n\n vime = tf.keras.models.load_model('./vime')\n\n return vime", "_____no_output_____" ], [ "train_params = {\n 'num_features': num_features,\n 'iterations': 1000,\n 'early_stop': 20,\n 'learning_rate': 0.001\n}\n\nvime_self = tf.keras.models.load_model('./vime_self')\n\nvime_semi = train_vime_semi(\n encoder = vime_self.encoder,\n train_dataset = semi_train_dataset, \n val_dataset = semi_val_dataset,\n train_params = train_params,\n vime_params = vime_params\n)\n", "_____no_output_____" ], [ "test_ds = tf.data.Dataset.from_tensor_slices(test[num_cols]).batch(batch_size)\n\nvime_tuned_preds = vime_semi.predict(test_ds)\npr = average_precision_score(test['isFraud'], vime_tuned_preds)\nprint(pr)", "WARNING:tensorflow:No training configuration found in save file, so the model was *not* compiled. Compile it manually.\n0.38815360485173106\n" ] ], [ [ "## Evaluation", "_____no_output_____" ], [ "Re-training the model 10 times to get distribution of PR AUC scores.", "_____no_output_____" ] ], [ [ "vime_prs = []\ntest_ds = tf.data.Dataset.from_tensor_slices(test[num_cols]).batch(batch_size)\n\nfor i in range(10):\n train_params = {\n 'num_features': num_features,\n 'iterations': 1000,\n 'early_stop': 10,\n 'learning_rate': 0.001\n }\n vime_self = tf.keras.models.load_model('./vime_self')\n vime_self.encoder.trainable = False\n vime_semi = train_vime_semi(\n encoder = vime_self.encoder,\n train_dataset = semi_train_dataset, \n val_dataset = semi_val_dataset,\n train_params = train_params,\n vime_params = vime_params\n )\n # fine-tune\n vime_semi = tf.keras.models.load_model('./vime')\n vime_semi.encoder.trainable\n\n\n vime_tuned_preds = vime_semi.predict(test_ds)\n pr = average_precision_score(test['isFraud'], vime_tuned_preds)\n vime_prs.append(pr)\n print('VIME Train', i, \"PR AUC:\", pr)", "_____no_output_____" ] ], [ [ "### Compare with MLP and RF", "_____no_output_____" ] ], [ [ "mlp_prs = []\n\nfor i in range(10):\n base_mlp = Sequential([\n Input(shape=num_features),\n Dense(num_features),\n Dense(128),\n Dropout(0.2),\n Dense(128),\n Dropout(0.2),\n Dense(1, activation='sigmoid')\n ])\n\n base_mlp.compile(\n optimizer=tf.keras.optimizers.Adam(learning_rate=0.0001),\n loss='binary_crossentropy'\n )\n\n # Early stopping based on validation loss \n cbs = [tf.keras.callbacks.EarlyStopping(\n monitor=\"val_loss\", patience=20, restore_best_weights=True\n )]\n\n base_mlp.fit(\n x=X_train_labelled.values,\n y=y_train_labelled,\n validation_data=(X_val_labelled.values, y_val_labelled),\n epochs=1000,\n callbacks=cbs\n )\n\n base_mlp_preds = base_mlp.predict(test_ds)\n mlp_prs.append(average_precision_score(test['isFraud'], base_mlp_preds))", "_____no_output_____" ], [ "from lightgbm import LGBMClassifier\n\ntrain_tree_X = pd.concat([X_train_labelled, X_val_labelled])\ntrain_tree_y = pd.concat([y_train_labelled, y_val_labelled])\n\nrf_prs = []\nfor i in tqdm(range(10)):\n rf = RandomForestClassifier(max_depth=4)\n rf.fit(train_tree_X.values, train_tree_y)\n rf_preds = rf.predict_proba(test[X_train_labelled.columns])\n rf_prs.append(average_precision_score(test['isFraud'], rf_preds[:, 1]))", "100%|██████████| 10/10 [00:07<00:00, 1.28it/s]\n" ], [ "metrics_df = pd.DataFrame({\"MLP\": mlp_prs,\n \"VIME\": vime_prs,\n \"RF\": rf_prs})\n\nmetrics_df.boxplot()\nplt.ylabel(\"PR AUC\")\nplt.show()", "_____no_output_____" ], [ "metrics_df.describe()", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
4aa523d961bcb3cc5402207ab21c0a7549e40dc1
89,697
ipynb
Jupyter Notebook
RN_Sort_of_Clevr.ipynb
kritiksoman/Relation-Network
7dfc14968ed1fa7480c22e72343ca6642773b1ad
[ "MIT" ]
null
null
null
RN_Sort_of_Clevr.ipynb
kritiksoman/Relation-Network
7dfc14968ed1fa7480c22e72343ca6642773b1ad
[ "MIT" ]
null
null
null
RN_Sort_of_Clevr.ipynb
kritiksoman/Relation-Network
7dfc14968ed1fa7480c22e72343ca6642773b1ad
[ "MIT" ]
null
null
null
69.104006
4,874
0.703636
[ [ [ "from google.colab import drive\ndrive.mount('/content/gdrive')\n%cd gdrive/My\\ Drive/Colab\\ Notebooks/neural\\ project\\ new/", "_____no_output_____" ], [ "# import functions\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom torch.autograd import Variable\nimport os\nimport pickle\nimport random\nfrom sklearn.model_selection import train_test_split\nimport matplotlib.pyplot as plt\nimport cv2\n", "_____no_output_____" ] ], [ [ "# **Dataset Generation**", "_____no_output_____" ] ], [ [ "train_objSize = 9800\ntest_objSize = 200\nimSize = 75 # size of image\nobjSize = 5 #size of objects in image \nqSize = 11 #6 for one-hot vector of color, 2 for question type, 3 for question subtype\nnQuestion = 10 # Answer : [yes, no, rectangle, circle, r, g, b, o, k, y]\ndatadir = './data' # directory to store training data\ncolors = [(0,0,255),(0,255,0),(255,0,0),(0,156,255),(128,128,128),(0,255,255)]#r,g,b,o,k,y\n\ntry:\n os.makedirs(datadir)\nexcept:\n print('Data directory already exists.')\n \ndef createCenter(objects):\n while True:\n pas = True\n center = np.random.randint(objSize, imSize - objSize, 2) \n if len(objects) > 0:# if an object is already present, then find center that is 2X away for all old centers\n for name,c,shape in objects:\n if ((center - c) ** 2).sum() < ((objSize * 2) ** 2):\n pas = False\n if pas:# if no old object is present, then no need to check anything\n return center\n\ndef putObjs():# function create 1 image and list of location,color,shape of 6 objects\n objects = []\n img = np.ones((imSize,imSize,3)) * 255 #plain white BG image\n for color_id,color in enumerate(colors): # put object of every color \n center = createCenter(objects)\n # take 50% of objects as circles and 50% as rectangles\n if random.random()<0.5:\n start = (center[0]-objSize, center[1]-objSize)#xmin,ymin\n end = (center[0]+objSize, center[1]+objSize)#xmax,ymax\n cv2.rectangle(img, start, end, color, -1)\n objects.append((color_id,center,'r'))\n else:\n center_ = (center[0], center[1])\n cv2.circle(img, center_, objSize, color, -1)\n objects.append((color_id,center,'c'))\n return objects,img\n \ndef genOneVQA():# function will create 20 QA for 1 image\n objects,img = putObjs()\n\n rel_questions = []\n norel_questions = []\n rel_answers = []\n norel_answers = []\n \n # 10 Non-relational questions\n for idx in range(nQuestion):\n question = np.zeros((qSize))\n color = random.randint(0,5)\n question[color] = 1\n question[6] = 1\n subtype = random.randint(0,2)\n question[subtype+8] = 1\n norel_questions.append(question)\n if subtype == 0:\n \"\"\"query shape->rectangle/circle\"\"\"\n if objects[color][2] == 'r':\n answer = 2\n else:\n answer = 3\n\n elif subtype == 1:\n \"\"\"query horizontal position->yes/no\"\"\"\n if objects[color][1][0] < imSize / 2:\n answer = 0\n else:\n answer = 1\n\n elif subtype == 2:\n \"\"\"query vertical position->yes/no\"\"\"\n if objects[color][1][1] < imSize / 2:\n answer = 0\n else:\n answer = 1\n norel_answers.append(answer)\n \n # 10 Relational questions\n for idx in range(nQuestion):\n question = np.zeros((qSize))\n color = random.randint(0,5)\n question[color] = 1\n question[7] = 1\n subtype = random.randint(0,2)\n question[subtype+8] = 1\n rel_questions.append(question)\n\n if subtype == 0:\n \"\"\"closest-to->rectangle/circle\"\"\"\n my_obj = objects[color][1]\n dist_list = [((my_obj - obj[1]) ** 2).sum() for obj in objects]\n dist_list[dist_list.index(0)] = 999\n closest = dist_list.index(min(dist_list))\n if objects[closest][2] == 'r':\n answer = 2\n else:\n answer = 3\n \n elif subtype == 1:\n \"\"\"furthest-from->rectangle/circle\"\"\"\n my_obj = objects[color][1]\n dist_list = [((my_obj - obj[1]) ** 2).sum() for obj in objects]\n furthest = dist_list.index(max(dist_list))\n if objects[furthest][2] == 'r':\n answer = 2\n else:\n answer = 3\n\n elif subtype == 2:\n \"\"\"count->1~6\"\"\"\n my_obj = objects[color][2]\n count = -1\n for obj in objects:\n if obj[2] == my_obj:\n count +=1 \n answer = count+4\n\n rel_answers.append(answer)\n\n relations = (rel_questions, rel_answers)\n norelations = (norel_questions, norel_answers)\n \n img = img/255. #normalize image\n dataset = (img, relations, norelations)\n return dataset\n\n\nprint('Building train and test datasets.')\ntest_datasets = [genOneVQA() for _ in range(test_size)]\ntrain_datasets = [genOneVQA() for _ in range(train_size)]\n\n \nfilename = os.path.join(datadir,'sort-of-clevr.pickle')\nwith open(filename, 'wb') as f:\n pickle.dump((train_datasets, test_datasets), f)\nprint('Dataset saved.')", "Building train and test datasets.\nDataset saved.\n" ], [ "# !rm -rf data", "_____no_output_____" ] ], [ [ "# **Standard Functions**", "_____no_output_____" ] ], [ [ "def tensor_data(data, i):\n img = torch.from_numpy(np.asarray(data[0][bs*i:bs*(i+1)]))\n qst = torch.from_numpy(np.asarray(data[1][bs*i:bs*(i+1)]))\n ans = torch.from_numpy(np.asarray(data[2][bs*i:bs*(i+1)]))\n\n input_img.data.resize_(img.size()).copy_(img)\n input_qst.data.resize_(qst.size()).copy_(qst)\n label.data.resize_(ans.size()).copy_(ans)\n\n\ndef cvt_data_axis(data):\n img = [e[0] for e in data]\n qst = [e[1] for e in data]\n ans = [e[2] for e in data]\n return (img,qst,ans)\n\n \ndef train(epoch, rel, norel):\n model.train() \n random.shuffle(rel)\n random.shuffle(norel)\n\n rel = cvt_data_axis(rel)\n norel = cvt_data_axis(norel)\n\n for batch_idx in range(len(rel[0]) // bs):\n tensor_data(rel, batch_idx)\n accuracy_rel = model.train_(input_img, input_qst, label)\n\n tensor_data(norel, batch_idx)\n accuracy_norel = model.train_(input_img, input_qst, label)\n\n if batch_idx % args.log_interval == 0:\n print('Train Epoch '+ str(epoch) +' : Relations accuracy: ' + str(accuracy_rel.numpy())+'% : Non Relations accuracy: ' + str(accuracy_norel.numpy())+'%') \n\ndef test(testFlag, rel, norel):\n model.eval()\n \n rel = cvt_data_axis(rel)\n norel = cvt_data_axis(norel)\n\n accuracy_rels = []\n accuracy_norels = []\n for batch_idx in range(len(rel[0]) // bs):\n tensor_data(rel, batch_idx)\n accuracy_rels.append(model.test_(input_img, input_qst, label)[1])\n\n tensor_data(norel, batch_idx)\n accuracy_norels.append(model.test_(input_img, input_qst, label)[1])\n\n accuracy_rel = sum(accuracy_rels) / len(accuracy_rels)\n accuracy_norel = sum(accuracy_norels) / len(accuracy_norels)\n print('\\n '+ testFlag + ' : Relations accuracy: ' + str(accuracy_rel.numpy())+'% : Non Relations accuracy: ' + str(accuracy_norel.numpy())+'%') \n\n", "_____no_output_____" ] ], [ [ "# **RN Model**", "_____no_output_____" ] ], [ [ "# function for finding coordinates of visual words\ndef findCoords(i):\n if i>=0 and i<=4:\n tmp=[0,i]\n elif i>=5 and i<=9:\n tmp=[1,i%5]\n elif i>=10 and i<=14:\n tmp=[2,i%5]\n elif i>=15 and i<=19:\n tmp=[3,i%5]\n elif i>=20 and i<=24:\n tmp=[4,i%5]\n return list((np.array(tmp)/2)-1)\n\n# cnn model \nclass ConvInputModel(nn.Module):\n def __init__(self):\n super(ConvInputModel, self).__init__() \n self.conv1 = nn.Conv2d(in_channels=3, out_channels=32, kernel_size=3, stride=2, padding=1)#input channel,output channel,kernel size\n self.batchNorm1 = nn.BatchNorm2d(32)\n self.conv2 = nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, stride=2, padding=1)\n self.batchNorm2 = nn.BatchNorm2d(64)\n self.conv3 = nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, stride=2, padding=1)\n self.batchNorm3 = nn.BatchNorm2d(128)\n self.conv4 = nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, stride=2, padding=1)\n self.batchNorm4 = nn.BatchNorm2d(256)\n \n def forward(self, in_img):\n x = self.conv1(in_img)\n x = F.relu(x)\n x = self.batchNorm1(x)\n x = self.conv2(x)\n x = F.relu(x)\n x = self.batchNorm2(x)\n x = self.conv3(x)\n x = F.relu(x)\n x = self.batchNorm3(x)\n x = self.conv4(x)\n x = F.relu(x)\n x = self.batchNorm4(x)\n return x\n\n# f_phi model \nclass FCOutputModel(nn.Module):\n def __init__(self):\n super(FCOutputModel, self).__init__()\n# self.fc = nn.Linear(2000, 1000)\n self.fc2 = nn.Linear(1000, 500)\n self.fc3 = nn.Linear(500, 10)\n\n def forward(self, x):\n# x = self.fc(x)\n# x = F.relu(x)\n x = self.fc2(x)\n x = F.relu(x)\n x = F.dropout(x)\n x = self.fc3(x)\n return F.log_softmax(x,dim=1)\n\n# base model\nclass BasicModel(nn.Module):\n def __init__(self, args, name):\n super(BasicModel, self).__init__()\n self.name=name\n\n def train_(self, input_img, input_qst, label): #model.train() is predefined, so i created train_\n self.optimizer.zero_grad()\n output = self(input_img, input_qst)\n loss = F.nll_loss(output, label)\n loss.backward()\n self.optimizer.step()\n pred = output.data.max(1)[1]\n correct = pred.eq(label.data).cpu().sum()\n accuracy = correct * 100. / len(label)\n return accuracy\n \n def test_(self, input_img, input_qst, label):\n output = self(input_img, input_qst)\n pred = output.data.max(1)[1]\n correct = pred.eq(label.data).cpu().sum()\n accuracy = correct * 100. / len(label)\n return pred,accuracy\n\n def save_model(self, epoch):\n torch.save(self.state_dict(), 'model/epoch_{}_{:02d}.pth'.format(self.name, epoch))\n\n\n# RN model \nclass RN(BasicModel):\n def __init__(self, args):\n super(RN, self).__init__(args, 'RN')\n \n self.conv = ConvInputModel()\n \n # g_theta\n ##(number of filters per object+coordinate of object)*2+question vector\n self.g_fc1 = nn.Linear((256+2)*2+11, 2000)\n self.g_fc2 = nn.Linear(2000, 2000)\n self.g_fc3 = nn.Linear(2000, 2000)\n self.g_fc4 = nn.Linear(2000, 2000)\n \n self.coordinates = torch.FloatTensor(args.batch_size, 25, 2)\n if args.cuda:\n self.coordinates = self.coordinates.cuda()\n self.coordinates = Variable(self.coordinates)\n np_coordinates = np.zeros((args.batch_size, 25, 2))\n for i in range(25):\n np_coordinates[:,i,:] = np.array( findCoords(i) )\n self.coordinates.data.copy_(torch.from_numpy(np_coordinates))\n\n # f_phi\n self.f_fc1 = nn.Linear(2000, 1000)\n self.fcout = FCOutputModel()\n \n # optimiser\n self.optimizer = optim.Adam(self.parameters(), lr=args.lr)\n\n\n def forward(self, img, q):\n x = self.conv(img) ## x = (bs x 256 x 5 x 5) \n \n bs = x.size()[0]\n c = x.size()[1]\n d = x.size()[2]\n x = x.view(bs,c,d*d).permute(0,2,1)#(bs x 25 x 256)\n \n # add coordinates to all the visual words\n x = torch.cat([x, self.coordinates],2)#(bs x 25 x 258)\n \n # repeat question as many times as the no. of objects\n q = torch.unsqueeze(q, 1)\n q = q.repeat(1,25,1)\n q = torch.unsqueeze(q, 2)\n \n # combine all object pairs with questions\n o1 = torch.unsqueeze(x,1) # (bsx1x25x258)\n o1 = o1.repeat(1,25,1,1) # (bsx25x25x258)\n o2 = torch.unsqueeze(x,2) # (bsx25x1x258)\n o2 = torch.cat([o2,q],3)# (bsx25x1x(258+11))\n o2 = o2.repeat(1,1,25,1) # (bsx25x25x(258+11))\n \n # concatenate all together\n x = torch.cat([o1,o2],3) # (bsx25x25x(258+258+11))\n \n # reshape for passing through network\n x = x.view(bs*d*d*d*d,527) #527=258X2+11\n x = F.relu(self.g_fc1(x))\n x = F.relu(self.g_fc2(x))\n x = F.relu(self.g_fc3(x))\n x = F.relu(self.g_fc4(x))\n \n # sum: polling to introduce order invariance amoung objects\n x = x.view(bs,d*d*d*d,2000)\n x = x.sum(1).squeeze()\n \n # f_phi\n x = F.relu(self.f_fc1(x))\n \n return self.fcout(x)\n", "_____no_output_____" ] ], [ [ "# **Training**", "_____no_output_____" ] ], [ [ "# Load data\ndef load_data():\n dirs = './data'\n filename = os.path.join(dirs,'sort-of-clevr.pickle')\n with open(filename, 'rb') as f:\n train_datasets, test_datasets = pickle.load(f)\n rel_tmp = []\n rel_test = []\n norel_tmp = []\n norel_test = []\n \n for img, relations, norelations in train_datasets:\n img = np.swapaxes(img,0,2)\n for qst,ans in zip(relations[0], relations[1]):\n rel_tmp.append((img,qst,ans))\n for qst,ans in zip(norelations[0], norelations[1]):\n norel_tmp.append((img,qst,ans))\n\n for img, relations, norelations in test_datasets:\n img = np.swapaxes(img,0,2)\n for qst,ans in zip(relations[0], relations[1]):\n rel_test.append((img,qst,ans))\n for qst,ans in zip(norelations[0], norelations[1]):\n norel_test.append((img,qst,ans))\n \n #use 10% of training data as validation\n rel_train, rel_val = train_test_split(rel_tmp, test_size=0.10, random_state=42)\n norel_train, norel_val = train_test_split(norel_tmp, test_size=0.10, random_state=42)\n \n return (rel_train, rel_test, rel_val, norel_val, norel_train, norel_test)\n \n\nrel_train, rel_test, rel_val, norel_val, norel_train, norel_test = load_data()\nprint('Data loaded.') ", "Data loaded.\n" ], [ "# create model object\nfrom argparse import Namespace\n# resumeFlag=None\nresumeFlag='final_epoch_RN_03.pth'\nbs=64\nmydict={'batch_size':bs,'cuda':True,'epochs':20,'log_interval':10,'lr':0.0001,'model':'RN','no_cuda':False,'resume':resumeFlag,'seed':1}\nargs = Namespace(**mydict)\nmodel=RN(args)\n# Detect if we have a GPU available\ndevice = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\n", "_____no_output_____" ], [ "model", "_____no_output_____" ], [ "# Training the model and validation in batches\nmodel_dirs = './model'\nbs = args.batch_size\ninput_img = torch.FloatTensor(bs, 3, 75, 75)\ninput_qst = torch.FloatTensor(bs, 11)\nlabel = torch.LongTensor(bs)\n\nif args.cuda:\n model.cuda()\n input_img = input_img.cuda()\n input_qst = input_qst.cuda()\n label = label.cuda()\n\ninput_img = Variable(input_img)\ninput_qst = Variable(input_qst)\nlabel = Variable(label)\n\ntry:\n os.makedirs(model_dirs)\nexcept:\n print('Model directory already exists.')\n\nif args.resume:\n filename = os.path.join(model_dirs, args.resume)\n if os.path.isfile(filename):\n checkpoint = torch.load(filename)\n model.load_state_dict(checkpoint)\n print('Checkpoint '+filename+' loaded.')\n\nfor epoch in range(1, args.epochs + 1):\n train(epoch, rel_train, norel_train)\n test('Validation', rel_val, norel_val)\n model.save_model(epoch)", "_____no_output_____" ] ], [ [ "# **Testing**", "_____no_output_____" ] ], [ [ "from argparse import Namespace\nmodel_dirs = './model'\n\n# resumeFlag=None\nresumeFlag='final_epoch_RN_03.pth'#has 92% test acc.\nbs=64\n# bs=80\nmydict={'batch_size':bs,'cuda':True,'epochs':20,'log_interval':10,'lr':0.0001,'model':'RN','no_cuda':False,'resume':resumeFlag,'seed':1}\nargs = Namespace(**mydict)\nmodel=RN(args)\n# Detect if we have a GPU available\ndevice = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\n\nif args.resume:\n filename = os.path.join(model_dirs, args.resume)\n if os.path.isfile(filename):\n checkpoint = torch.load(filename)\n model.load_state_dict(checkpoint)\n print('Checkpoint '+filename+' loaded.')\n \ntest('Test', rel_test, norel_test)", "Checkpoint ./model/final_epoch_RN_03.pth loaded.\n\n Test : Relations accuracy: 92% : Non Relations accuracy: 99%\n" ] ], [ [ "# **Result Visualization**", "_____no_output_____" ] ], [ [ "#Load the data\ndirs = './data'\nfilename = os.path.join(dirs,'sort-of-clevr.pickle')\nwith open(filename, 'rb') as f:\n train_datasets, test_datasets = pickle.load(f)\nrel_train = []\nrel_test = []\nnorel_train = []\nnorel_test = []\n\nfor img, relations, norelations in train_datasets:\n img = np.swapaxes(img,0,2)\n for qst,ans in zip(relations[0], relations[1]):\n rel_train.append((img,qst,ans))\n for qst,ans in zip(norelations[0], norelations[1]):\n norel_train.append((img,qst,ans))\n\nfor img, relations, norelations in test_datasets:\n img = np.swapaxes(img,0,2)\n for qst,ans in zip(relations[0], relations[1]):\n rel_test.append((img,qst,ans))\n for qst,ans in zip(norelations[0], norelations[1]):\n norel_test.append((img,qst,ans))", "_____no_output_____" ], [ "from argparse import Namespace\nresumeFlag='final_epoch_RN_03.pth'#has 92% test acc.\nmydict={'batch_size':64,'cuda':True,'epochs':20,'log_interval':10,'lr':0.0001,'model':'RN','no_cuda':False,'resume':resumeFlag,'seed':1}\nargs = Namespace(**mydict)\nmodel=RN(args)\nmodel.load_state_dict(torch.load('model/final_epoch_RN_03.pth'))\nmodel.eval();", "_____no_output_____" ], [ "# function plotting and visualizing results\ncolors = ['red ', 'green ', 'blue ', 'orange ', 'gray ', 'yellow ']\nanswer_sheet = ['yes', 'no', 'rectangle', 'circle', '1', '2', '3', '4', '5', '6']\n\ndef plot_RN_result(idx):\n img2 = np.swapaxes(input_img[idx].cpu().detach().numpy(),0,2)\n plt.imshow(np.dstack((img2[:,:,2],img2[:,:,1],img2[:,:,0])));\n plt.grid(False)\n question=input_qst[idx]\n if question[6] == 1:\n query = 'Q: The object with color '\n query += colors[question.tolist()[0:6].index(1)]+', ' \n if question[8] == 1:\n query += 'has what shape?'\n if question[9] == 1:\n query += 'is towards the left?'\n if question[10] == 1:\n query += 'is towards the top?'\n if question[7] == 1:\n query = 'Q: For the object with color '\n query += colors[question.tolist()[0:6].index(1)]+', ' \n if question[8] == 1:\n query += 'the shape of the closest object is?'\n if question[9] == 1:\n query += 'the shape of the furthest object is??'\n if question[10] == 1:\n query += 'the number of objects having the same shape is?'\n print(query) \n print('A: ',answer_sheet[accuracy_rels[0][idx]],' (Predicted)')\n print('A:' ,answer_sheet[label[idx]],' (Desired)')\n", "_____no_output_____" ] ], [ [ " ## *Relational*", "_____no_output_____" ] ], [ [ "epoch=0\nbs=64\ninput_img = torch.FloatTensor(bs, 3, 75, 75)\ninput_qst = torch.FloatTensor(bs, 11)\nlabel = torch.LongTensor(bs)\n\nmodel.cuda()\ninput_img = input_img.cuda()\ninput_qst = input_qst.cuda()\nlabel = label.cuda()\n\naccuracy_rels = []\nrel_test2 = cvt_data_axis(rel_test)\nbatch_idx=1\n# for batch_idx in range(len(rel_test2[0]) // bs):\ntensor_data(rel_test2, batch_idx);\naccuracy_rels.append(model.test_(input_img, input_qst, label)[0]);\n", "_____no_output_____" ], [ "idx=4#index of img,question,answer\nplot_RN_result(idx)", "Q: For the object with color green , the shape of the closest object is?\nA: circle (Predicted)\nA: circle (Desired)\n" ], [ "idx=30#index of img,question,answer\nplot_RN_result(idx)", "Q: For the object with color blue , the number of objects having the same shape is?\nA: 4 (Predicted)\nA: 4 (Desired)\n" ], [ "idx=45#index of img,question,answer\nplot_RN_result(idx)", "Q: For the object with color green , the shape of the furthest object is??\nA: rectangle (Predicted)\nA: rectangle (Desired)\n" ], [ "idx=63#index of img,question,answer\nplot_RN_result(idx) #XXXXXXX", "Q: For the object with color green , the shape of the furthest object is??\nA: circle (Predicted)\nA: circle (Desired)\n" ], [ "idx=34#index of img,question,answer\nplot_RN_result(idx)", "Q: For the object with color orange , the shape of the furthest object is??\nA: circle (Predicted)\nA: circle (Desired)\n" ] ], [ [ " ## *Non-Relational*", "_____no_output_____" ] ], [ [ "epoch=0\nbs=64\ninput_img = torch.FloatTensor(bs, 3, 75, 75)\ninput_qst = torch.FloatTensor(bs, 11)\nlabel = torch.LongTensor(bs)\n\nmodel.cuda()\ninput_img = input_img.cuda()\ninput_qst = input_qst.cuda()\nlabel = label.cuda()\n\naccuracy_rels = []\nnorel_test2 = cvt_data_axis(norel_test)\nbatch_idx=1\n# for batch_idx in range(len(rel_test2[0]) // bs):\ntensor_data(norel_test2, batch_idx);\naccuracy_rels.append(model.test_(input_img, input_qst, label)[0]);\n", "_____no_output_____" ], [ "idx=4#index of img,question,answer\nplot_RN_result(idx)", "Q: The object with color yellow , is towards the top?\nA: yes (Predicted)\nA: yes (Desired)\n" ], [ "idx=20#index of img,question,answer\nplot_RN_result(idx)", "Q: The object with color gray , is towards the left?\nA: no (Predicted)\nA: no (Desired)\n" ], [ "idx=52#index of img,question,answer\nplot_RN_result(idx)", "Q: The object with color green , has what shape?\nA: rectangle (Predicted)\nA: rectangle (Desired)\n" ], [ "idx=33#index of img,question,answer\nplot_RN_result(idx)", "Q: The object with color gray , has what shape?\nA: circle (Predicted)\nA: circle (Desired)\n" ], [ "idx=40#index of img,question,answer\nplot_RN_result(idx)", "Q: The object with color red , has what shape?\nA: rectangle (Predicted)\nA: rectangle (Desired)\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ] ]
4aa52af70e7c527a93bc9664b7135687b56c8b0f
60,326
ipynb
Jupyter Notebook
Error Analysis.ipynb
AndreFCruz/han-attention-plot
24cf052e14e09b659db450a554a819f57300cd54
[ "MIT" ]
3
2020-05-27T08:46:58.000Z
2020-07-18T13:44:59.000Z
Error Analysis.ipynb
AndreFCruz/han-attention-plot
24cf052e14e09b659db450a554a819f57300cd54
[ "MIT" ]
null
null
null
Error Analysis.ipynb
AndreFCruz/han-attention-plot
24cf052e14e09b659db450a554a819f57300cd54
[ "MIT" ]
null
null
null
150.064677
28,108
0.888191
[ [ [ "JSON_PATH = 'by-article-train_attn-data.json'", "_____no_output_____" ], [ "from json import JSONDecoder\ndata = JSONDecoder().decode(open(JSON_PATH).read())", "_____no_output_____" ], [ "word = 'Sponsored'\nhyper_count = dict()\nmain_count = dict()\n\nfor i, article in enumerate(data):\n if word in article['normalizedText'][-1]:\n energies = [e for w, e in article['activations'][-1][0] if w == word]\n if article['hyperpartisan'] == 'true':\n hyper_count[i] = {\n 'energies': energies,\n 'truth': article['hyperpartisan'],\n 'prediction': article['prediction'],\n 'pred_value': article['pred_value'],\n 'last_sent_e': article['activations'][-1][-1],\n }\n elif article['hyperpartisan'] == 'false':\n main_count[i] = {\n 'energies': energies,\n 'truth': article['hyperpartisan'],\n 'prediction': article['prediction'],\n 'pred_value': article['pred_value'],\n 'last_sent_e': article['activations'][-1][-1],\n }\n else:\n raise RuntimeError('json format invalid')", "_____no_output_____" ], [ "# Average word energy of 1st 'Sponsored' tag\navg_final_e = [el['energies'][0] * el['last_sent_e'] for el in hyper_count.values()]\nprint('AVG:', sum(avg_final_e) / len(avg_final_e))\navg_final_e", "AVG: 0.6635681491087156\n" ], [ "# Average final energy of 1st 'Sponsored' tag (word_e * sentence_e)\navg_final_e = [el['energies'][0] * el['last_sent_e'] for el in hyper_count.values()]\nprint('AVG:', sum(avg_final_e) / len(avg_final_e))\navg_final_e", "AVG: 0.6635681491087156\n" ], [ "### ### ###", "_____no_output_____" ], [ "hyper_articles = [el for el in data if el['hyperpartisan'] == 'true']\nmain_articles = [el for el in data if el['hyperpartisan'] == 'false']\nassert len(hyper_articles) + len(main_articles) == len(data)", "_____no_output_____" ], [ "hyper_sent_att = [activ[-1] for a in hyper_articles for activ in a['activations']]\nmain_sent_att = [activ[-1] for a in main_articles for activ in a['activations']]", "_____no_output_____" ], [ "import seaborn as sns\nimport matplotlib.pyplot as plt\nsns.distplot(hyper_sent_att, hist=False, rug=False, label=\"hyperpartisan\")\nsns.distplot(main_sent_att, hist=False, rug=False, label=\"mainstream\")\n\nplt.gcf().savefig('imgs/sentence_energy_distribution.png', dpi=400)\nplt.show()", "_____no_output_____" ], [ "## Describe distribution\nfrom scipy import stats\nprint('Hyperpartisan Sentence Energy distribution:')\nprint(stats.describe(hyper_sent_att), end='\\n\\n')\n\nprint('Mainstream Sentence Energy distribution:')\nprint(stats.describe(main_sent_att), end='\\n\\n')", "Hyperpartisan Sentence Energy distribution:\nDescribeResult(nobs=7519, minmax=(2.3341956058453867e-20, 0.9999939203262329), mean=0.03127569871829999, variance=0.018976125018488714, skewness=5.418113652206876, kurtosis=30.10368002392932)\n\nMainstream Sentence Energy distribution:\nDescribeResult(nobs=7906, minmax=(1.918610192922653e-17, 0.9999979734420776), mean=0.05102500207225129, variance=0.03304226684127542, skewness=4.101548782503461, kurtosis=16.13745577230267)\n\n" ], [ "## Average attention on most important sentence\nhyper_most_imp_sent = [max(activ[-1] for activ in a['activations']) for a in hyper_articles]\nmain_most_imp_sent = [max(activ[-1] for activ in a['activations']) for a in main_articles]", "_____no_output_____" ], [ "print('Avg Hyperpartisan:', sum(hyper_most_imp_sent) / len(hyper_most_imp_sent))\nprint('Avg Mainstream:', sum(main_most_imp_sent) / len(main_most_imp_sent))", "Avg Hyperpartisan: 0.7294364526930741\nAvg Mainstream: 0.7810985228133729\n" ], [ "sns.distplot(hyper_most_imp_sent, hist=False, rug=False, label=\"hyperpartisan\")\nsns.distplot(main_most_imp_sent, hist=False, rug=False, label=\"mainstream\")\n\nplt.gcf().savefig('imgs/most_important_sentence_energy_distribution.png', dpi=400)\nplt.show()", "_____no_output_____" ], [ "## Number of sentences with attention above a given threshold of importance\nTHRESHOLD = 0.3\nhyper_important_sentences = [sum(1 for activ in a['activations'] if activ[-1] > THRESHOLD) for a in hyper_articles]\nmain_important_sentences = [sum(1 for activ in a['activations'] if activ[-1] > THRESHOLD) for a in main_articles]", "_____no_output_____" ], [ "print('Average number of sentences above {}:'.format(THRESHOLD))\nprint('\\thyperpartisan: {}'.format(sum(hyper_important_sentences) / len(hyper_important_sentences)))\nprint('\\tmainstream: {}'.format(sum(main_important_sentences) / len(main_important_sentences)))", "Average number of sentences above 0.3:\n\thyperpartisan: 1.1265822784810127\n\tmainstream: 1.1105651105651106\n" ], [ "### ### ###", "_____no_output_____" ], [ "## Calculating statistical significance that the two distributions are distinct\n## Welch's t-test: https://en.wikipedia.org/wiki/Welch%27s_t-test\nt_val, p_val = stats.ttest_ind(hyper_sent_att, main_sent_att, equal_var=False)\nprint('p-value for the hypothesis that the two distributions have equal mean:', p_val)", "p-value for the hypothesis that the two distributions have equal mean: 2.529547738438885e-14\n" ], [ "## Statistical significance of hypothesis:\n## attention of most important sentence of a mainstream article is larger than that of a hyperpartisan article\nfrom statsmodels.stats import weightstats as stests\n_, p_val = stests.ztest(hyper_most_imp_sent, main_most_imp_sent, value=0)\nprint(p_val)", "0.0028554798903885777\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa54e7403076480d1152cc98a826cab7455aff5
54,318
ipynb
Jupyter Notebook
examples/notebooks/profiling.ipynb
rdelosreyes/myctapipe
dad0784b60de986d5ee871e7b61a951e948998d6
[ "BSD-3-Clause" ]
null
null
null
examples/notebooks/profiling.ipynb
rdelosreyes/myctapipe
dad0784b60de986d5ee871e7b61a951e948998d6
[ "BSD-3-Clause" ]
null
null
null
examples/notebooks/profiling.ipynb
rdelosreyes/myctapipe
dad0784b60de986d5ee871e7b61a951e948998d6
[ "BSD-3-Clause" ]
null
null
null
352.714286
51,014
0.928753
[ [ [ "# Profiling code\n\nThere are lots of ways to profile code to identify speed bottlenecks in python\n\n* cProfile\n* line_profiler\n* pycallgraph (https://pycallgraph.readthedocs.org)\n\nHere is an example with pycallgraph:\n(if you don'thave the module, run `pip install --user pycallgraph`)", "_____no_output_____" ] ], [ [ "from pycallgraph import PyCallGraph\nfrom pycallgraph.output import GraphvizOutput", "_____no_output_____" ], [ "import astropy.units as u\nfrom astropy.time import Time\nfrom astropy.coordinates import SkyCoord, Angle\nfrom ctapipe.coordinates import CameraFrame, TelescopeFrame\nfrom ctapipe.reco import hillas_parameters\nimport numpy as np\n\ndef test_camera_telescope_transform():\n x = np.linspace(-1,1,100)\n y = np.linspace(-1,1,100)\n \n camera_coord = CameraFrame(x=x*u.m, y=y*u.m, z=0*u.m)\n telescope_coord = camera_coord.transform_to(TelescopeFrame)\n camera_coord2 = telescope_coord.transform_to(CameraFrame)\n \n im = np.random.uniform(size=x.shape)\n hillas = hillas_parameters(x,y,im)", "_____no_output_____" ], [ "\n# just wrap any code to test in this \"with\" statement:\nwith PyCallGraph(output=GraphvizOutput()):\n test_camera_telescope_transform()", "_____no_output_____" ] ], [ [ "the output by default is just a PNG file:", "_____no_output_____" ] ], [ [ "!ls *.png", "pycallgraph.png\r\n" ], [ "from IPython.display import Image\nImage(filename='pycallgraph.png') ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
4aa55b09d12a0b9e34696d4a9dd7697e47eec6fb
13,560
ipynb
Jupyter Notebook
Notebooks/KITTI_VAL.ipynb
Malga-Vision/fastervideo
34e212fe04398262e5dac7f74ac4c2365fc1e03f
[ "Apache-2.0" ]
1
2021-05-20T08:11:53.000Z
2021-05-20T08:11:53.000Z
Notebooks/KITTI_VAL.ipynb
Malga-Vision/fastervideo
34e212fe04398262e5dac7f74ac4c2365fc1e03f
[ "Apache-2.0" ]
null
null
null
Notebooks/KITTI_VAL.ipynb
Malga-Vision/fastervideo
34e212fe04398262e5dac7f74ac4c2365fc1e03f
[ "Apache-2.0" ]
null
null
null
36.648649
184
0.5191
[ [ [ "import detectron2\nfrom detectron2.utils.logger import setup_logger\nsetup_logger()\n\nimport numpy as np\n\nimport random\n\n\nfrom detectron2.engine import DefaultPredictor\nfrom detectron2.config import get_cfg\nfrom detectron2.utils.visualizer import Visualizer\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.modeling import build_model\nfrom detectron2.evaluation import COCOEvaluator,PascalVOCDetectionEvaluator\nimport matplotlib.pyplot as plt\nimport torch.tensor as tensor\nfrom detectron2.data import build_detection_test_loader\nfrom detectron2.evaluation import inference_on_dataset\nimport torch\nfrom detectron2.structures.instances import Instances\nfrom detectron2.modeling import build_model\nfrom detectron2.modeling.meta_arch.tracker import Tracker\nfrom detectron2.modeling.meta_arch.soft_tracker import SoftTracker\n%matplotlib inline\n", "_____no_output_____" ] ], [ [ "\n\n\n\n## Loading Weights", "_____no_output_____" ] ], [ [ "cfg = get_cfg()\n\ncfg.merge_from_file(\"../configs/COCO-Detection/faster_rcnn_R_50_FPN_3x_Video.yaml\")\n\n\ncfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.4 # set threshold for this model\ncfg.MODEL.WEIGHTS = '/media/DATA/Users/Issa/models_pub/kitti_jde.pth'\n#cfg.MODEL.WEIGHTS = \"../models_pub/kitti_jde.pth\"\n\n\nprint(cfg.MODEL)\n", "_____no_output_____" ] ], [ [ "## functions to validate annotated data using devkit_tracking from KITTI", "_____no_output_____" ] ], [ [ "from contextlib import contextmanager\nimport sys, os\n@contextmanager\ndef suppress_stdout():\n with open(os.devnull, \"w\") as devnull:\n old_stdout = sys.stdout\n sys.stdout = devnull\n try: \n yield\n finally:\n sys.stdout = old_stdout\ndef print_val_results(results_name):\n\n with suppress_stdout():\n print(\"Now you don't\")\n os.system('python2 /home/issa/devkit_tracking/python/validate_tracking.py val')\n \n \n labels = {1:'MOTA',2:'MOTP',3:'MOTAL',4:'MODA',5:'MODP',7:'R',8:'P',12:'MT',13:'PT',14:'ML',18:'FP',19:'FN',22:'IDs'}\n summary_heading = 'Metric\\t'\n for label in labels.keys():\n summary_heading+=labels[label] + '\\t'\n summary_cars = 'Cars\\t'\n summary_peds = 'Peds\\t'\n with open('/home/issa/devkit_tracking/python/results/'+results_name+'/summary_car.txt') as f:\n i=0\n for line in f:\n if(i==0):\n i+=1\n continue\n if(i in labels.keys()):\n summary_cars+= str(round(float(line[len(line)-9:len(line)-1].strip()),2))+'\\t'\n i+=1\n\n\n \n print(summary_heading)\n print(summary_cars)\n \ndef print_test_results(results_name):\n\n #with suppress_stdout():\n print(\"Now you don't\")\n os.system('python2 ../devkit_tracking/python/evaluate_tracking.py test')\n \n \n labels = {1:'MOTA',2:'MOTP',3:'MOTAL',4:'MODA',5:'MODP',7:'R',8:'P',12:'MT',13:'PT',14:'ML',18:'FP',19:'FN',22:'IDs'}\n summary_heading = 'Metric\\t'\n for label in labels.keys():\n summary_heading+=labels[label] + '\\t'\n summary_cars = 'Cars\\t'\n summary_peds = 'Peds\\t'\n with open('../devkit_tracking/python/results/'+results_name+'/summary_car.txt') as f:\n i=0\n for line in f:\n if(i==0):\n i+=1\n continue\n if(i in labels.keys()):\n summary_cars+= str(round(float(line[len(line)-9:len(line)-1].strip()),2))+'\\t'\n i+=1\n\n\n \n print(summary_heading)\n print(summary_cars)\n \n\n \n\n", "_____no_output_____" ] ], [ [ "## Inference : Joint Detection and Tracking", "_____no_output_____" ] ], [ [ "import json\nimport os\nimport cv2 as cv2\nimport time\nfrom tqdm.notebook import tqdm\ncolors = [[0,0,128],[0,255,0],[0,0,255],[255,0,0],[0,128,128],[128,0,128],[128,128,0],[255,255,0],[0,255,255],[255,255,0],[128,0,0],[0,128,0]\n ,[0,128,255],[0,255,128],[255,0,128],[128,255,0],[255,128,0],[128,255,255],[128,0,255],[128,128,128],[128,255,128]]\n#dirC = '/../datasets/KITTI/tracking/data_tracking_image_2/training/image_02/'\ndirC = '/media/DATA/Datasets/KITTI/tracking/data_tracking_image_2/training/image_02/'\n#dirDets = '../datasets/KITTI/tracking/data_tracking_det_2_lsvm/training/det_02/'\nnames = []\narr = {2:'Car'}\n\nif(not os.path.exists(\"../results\")):\n os.mkdir('../results')\n os.mkdir('../results/KITTI')\nelse:\n if(not os.path.exists(\"../results/KITTI\")):\n os.mkdir('../results/KITTI')\noutput_path = '/home/issa/devkit_tracking/python/results'\nsettings = [\n \n dict(props=20, #number of proposals to use by rpn\n st=1.05, #acceptance distance percentage for soft tracker\n sup_fp = True, # fp suppression based on Intersection over Union for new detections\n alpha = 0.6, # the percentage of the new embedding in track embedding update (emb = alpha * emb(t) +(1-alpha) emb(t-1)) \n fp_thresh=0.95, # iou threshold above which the new detection is considered a fp\n T=True, #use past tracks as proposals\n D='cosine', # distance metric for embeddings\n Re=True, #use the embedding head \n A=True, # use appearance information\n K=True, # use kalman for motion prediction\n E=False, #use raw FPN features as appearance descriptors\n measurement=0.001, #measruement noise for the kalman filter\n process=1, #process noise for the kalman filter\n dist_thresh=1.5, # the normalization factor for the appearance distance\n track_life=7, #frames for which a track is kept in memory without an update\n track_vis=2, #frames for which a track is displayed without an update\n \n ),\n \n \n\n]\ntrain_folders = ['0000','0002','0003','0004','0005','0009','0011','0017','0020']\nval_folders = ['0001','0006','0008','0016','0018','0019']\ntest_folders = ['0014','0015','0016','0018','0019','0001','0006','0008','0010','0012','0013']\nsubmission_folders = ['0000','0001','0002','0003','0004','0005','0006','0007',\n '0008','0009','0010','0011','0012','0013','0014','0015','0016','0017',\n '0018','0019','0020','0021','0022','0023','0024','0025','0026','0027','0028']\nfinal_test_folders = ['0014']\nfor setting in settings:\n test_name = 'val'\n exp_name = output_path+ '/'+test_name\n \n if(not os.path.exists(exp_name)):\n os.mkdir(exp_name)\n os.mkdir(exp_name+'/data')\n avg=0\n for folder_name in val_folders:\n dets = {}\n public_det=False\n if public_det==True:\n with open(dirDets+folder_name+'.txt') as det_file:\n for line in det_file:\n parts = line.split(' ')\n\n if(parts[0] not in dets):\n dets[parts[0]] = []\n if(parts[2] =='Car' and float(parts[17])>-1):\n\n dets[parts[0]].append([float(parts[6])\n ,float(parts[7]),float(parts[8]) \n ,float(parts[9]),float(parts[6]),float(parts[17])])\n\n predictor = DefaultPredictor(cfg,True)\n predictor.model.tracker = Tracker()\n predictor.model.tracking_proposals = setting['T']\n predictor.model.tracker.track_life = setting['track_life']\n predictor.model.tracker.track_visibility = setting['track_vis']\n predictor.model.tracker.use_appearance = setting['A']\n predictor.model.tracker.use_kalman = setting['K']\n predictor.model.tracker.embed = setting['E']\n predictor.model.tracker.reid = setting['Re']\n predictor.model.tracker.dist = setting['D']\n predictor.model.tracker.measurement_noise=setting['measurement']\n predictor.model.tracker.process_noise = setting['process']\n predictor.model.tracker.dist_thresh = setting['dist_thresh']\n predictor.model.use_reid = setting['Re']\n predictor.model.tracker.soft_thresh = setting['st']\n predictor.model.tracker.suppress_fp = setting['sup_fp']\n predictor.model.tracker.fp_thresh = setting['fp_thresh']\n predictor.model.tracker.embed_alpha = setting['alpha']\n max_distance = 0.2\n \n \n \n \n output_file = open('%s/data/%s.txt'%(exp_name,folder_name),'w')\n \n frames = {}\n frame_counter = 0\n prev_path = 0\n elapsed = 0\n predictor.model.prev_path = 0\n for photo_name in sorted(os.listdir(dirC+folder_name+'/')):\n frames[frame_counter] = {}\n img_path = dirC+folder_name+'/'+photo_name\n img = cv2.imread(img_path)\n inp = {}\n inp['width'] = img.shape[1]\n inp['height'] = img.shape[0]\n\n \n\n inp['file_name'] = photo_name\n inp['image_id'] = photo_name\n \n predictor.model.photo_name = img_path\n start = time.time()\n outputs = predictor(img,setting['props'])\n end = time.time()\n elapsed +=(end-start)\n \n for i in outputs:\n \n \n if(i.pred_class in arr):\n output_file.write(\"%d %d %s 0 0 -0.20 %d %d %d %d 1.89 0.48 1.20 1.84 1.47 8.41 0.01 %f\\n\"%(frame_counter\n ,i.track_id,arr[i.pred_class],i.xmin,i.ymin,i.xmax,i.ymax,i.conf))\n \n frame_counter +=1\n predictor.model.prev_path = img_path\n \n avg += (frame_counter/elapsed)\n \n output_file.close()\n \n print(setting)\n print('avg_time :',avg/len(val_folders))\n print_val_results(test_name)\n\n ", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4aa562fce5a5bed9bd3a278cc80cde684904301b
1,677
ipynb
Jupyter Notebook
.ipynb_checkpoints/NYC_Citibike_Challenge-checkpoint.ipynb
jzebker/bikesharing
8b6aa2554f74a1ecf39dc16b362b27ca8bef3393
[ "MIT" ]
null
null
null
.ipynb_checkpoints/NYC_Citibike_Challenge-checkpoint.ipynb
jzebker/bikesharing
8b6aa2554f74a1ecf39dc16b362b27ca8bef3393
[ "MIT" ]
null
null
null
.ipynb_checkpoints/NYC_Citibike_Challenge-checkpoint.ipynb
jzebker/bikesharing
8b6aa2554f74a1ecf39dc16b362b27ca8bef3393
[ "MIT" ]
null
null
null
18.842697
72
0.523554
[ [ [ "import pandas as pd", "_____no_output_____" ], [ "# 1. Create a DataFrame for the 201908-citibike-tripdata data. \ntripdata = pd.read_csv(\"201908-citibike-tripdata.csv\")\nprint(tripdata)", "_____no_output_____" ], [ "# 2. Check the datatypes of your columns. \n", "_____no_output_____" ], [ "# 3. Convert the 'tripduration' column to datetime datatype.\n", "_____no_output_____" ], [ "# 4. Check the datatypes of your columns. \n", "_____no_output_____" ], [ "# 5. Export the Dataframe as a new CSV file without the index.\n", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code" ] ]
4aa56ca390235450e372382d9543a913a2fb0957
88,689
ipynb
Jupyter Notebook
genre by playlist.ipynb
gearmonkey/genre_playlist_analysis
2b172235b1f6a4d5925835a4a0f953ddcb76223d
[ "MIT" ]
null
null
null
genre by playlist.ipynb
gearmonkey/genre_playlist_analysis
2b172235b1f6a4d5925835a4a0f953ddcb76223d
[ "MIT" ]
null
null
null
genre by playlist.ipynb
gearmonkey/genre_playlist_analysis
2b172235b1f6a4d5925835a4a0f953ddcb76223d
[ "MIT" ]
null
null
null
80.261538
45,773
0.73486
[ [ [ " # setup\n importing stuff and such", "_____no_output_____" ] ], [ [ "import spotipy\nimport matplotlib\nimport numpy as np\n%matplotlib notebook\nfrom matplotlib import pylab as plt\nfrom matplotlib import mlab\nsp = spotipy.Spotify()", "_____no_output_____" ] ], [ [ "# fetch all the playlist details that have 'punk' in the name\n(note that this doesn't get the track lists, we'll do that a bit later)", "_____no_output_____" ] ], [ [ "results = sp.search(type='playlist', q='punk', limit=50)['playlists']\nprint \"gathering details about\", results['total'], \"playlists\"\npunk_playlists = results['items']\nwhile results['next']:\n results = sp.next(results)['playlists']\n punk_playlists += results['items']\n", "gathering details about 5351 playlists\n" ] ], [ [ "# basic stats\nto get a feel for the dataset, let's to do some basic stats before we plow ahead with the track analysis\n## title length\nwe expect a peak a 4 characters (the minimal 'Punk'), what else happens?", "_____no_output_____" ] ], [ [ "print \"number of results:\", len(punk_playlists)\n\nprint \ntitle_lengths = filter(lambda c:c<100, map(lambda pl:len(pl['name']), punk_playlists))\nn, bins, patches = plt.hist(title_lengths, 50, normed=1, facecolor='green', alpha=0.75)\nmu = np.mean(title_lengths)\nsigma = np.std(title_lengths)\n# add a 'best fit' line\ny = mlab.normpdf( bins, mu, sigma)\nl = plt.plot(bins, y, 'r--', linewidth=1)\n\nplt.xlabel('Number of Characters')\nplt.ylabel('Probability')\nplt.title(r'$\\mathrm{Histogram\\ of\\ Punk playlist title lengths:}\\ \\mu='+str(mu)+',\\ \\sigma='+str(sigma)+'$')\n# plt.axis([40, 160, 0, 0.03])\nplt.grid(True)\n\n\n\n", "number of results: 5351\n\n" ] ], [ [ "ok, so _a_ peak where expected, but the vast majority are longer, mean is just over 16 characters.\n\n---\n\n## word counts in the titles\n\nSo picking that apart a little more, let's take a look at some lightly cleaned word counts across all the titles", "_____no_output_____" ] ], [ [ "from collections import Counter\nfrom string import punctuation\nstopwords = \"and of the or in\".split()\nprint \"top words in titles\"\nword_count = Counter()\nfor pl in punk_playlists:\n word_count.update([w.strip(punctuation) for w in pl['name'].lower().split() if w not in stopwords and len(w) > 2])\nword_count.most_common(10)", "top words in titles\n" ] ], [ [ "remember friends: Daft Punk may be playing in your house, your house, but it's a pretty good guess that when 'punk' is proceeded by 'daft' its probably not actually a punk playlist...\n\nthe other results here are basically the expected neighbouring genres (e.g. 'pop', 'rock', 'metal') and of course some self labelling ('playlist')\n\nsmall aside, this seems to indicate that some of the playlists don't mention 'punk' in the name. Is that a problem? (this makes me wonder how the search algorithm works...). Let's see how many there are and what they look like.\n\n### That's not punk.\n\n", "_____no_output_____" ] ], [ [ "print len([pl['name'] for pl in punk_playlists if \"punk\" not in pl['name'].lower()]), \"of the search results don't say punk.\\n here they are:\"\nprint '\\n'.join([pl['name'] for pl in punk_playlists if \"punk\" not in pl['name'].lower()])", "87 of the search results don't say punk.\n here they are:\nPünk\nWorkout Shout!\nFrench Touch\nPost-Hardcore Crash Course\n💰🔪☠\nPost Garage Wave Revival\n.01\nNew Rock\n🌚 cool songs 🌝\nIs It New Wave?\nmoosic\nLocos x los 2010\nTeenage Dirtbag\nSkunk Rock\nCrossfit Hutto Rock\nmath rock !\nDance Anthems - Ministry of Sound\nHits 2013\nFrom The Garage\nCheap Beer & Dirty Basements\nProper Naughty\nCanadians Rock!\nEverything Rock\nrandom\nTHE DEFINITIVE 70s\ncurrent songs\nArctic Monkeys: Origens\nEmo/Emoish/Indierock\nRock 2\n70's ROCK - The Ultimate Playlist\nPØP PÛNK\nMain Playlist\nGod tier 2.0\nTOPSIFY 80s HITS\nfor when its cold outside\nAll Things Post\nNOW 85\nDie 257ers Party Playlist\nBare tunes\npoleng's\nSweat Through This\nTank\nHighschool Hits\nVidar\nQuote Songs🖊💘\nBest of '80s Indie • Alternative eighties\nGRUNGE FOR LIFE\nMain Playlist\nDe Boa na Praia\nMi playlist \nToday's Best Boy Bands Piano\nMixed Bag\nIdk.\nSunday Breakfast\nFrench Touch\nGood. \nOriginals vs Covers + Sampled\nEMO\ncool tunes \nHard. Heavy. Loud.\nLounge & Warm-up\n\"Satans Musik\"\nDon't Call It An Emo Revival\nHard Rock y Rock 70'\nfavourite shit\nFiltr - Playlist MARIAGE\nFiltr I LOVE 10s\nSWR3 Rock Hits\nSummer Hits\nTerrorgruppe - Maximilian Playlist\nWaning Light Mix\nalernative music\nCROSSING all OVER! ► by rock.de\nThis Is Rewind - 90s\nMarvin\nDiscover: Dischord Records\nFiltr - Playlist FRIDAY NIGHT\n♥ (by tumblr user macleod)\nCollege Rock Playlist | 80s Campus Radio / Alternative / Indie / Grunge | Feat. Sonic Youth, Weezer, Pavement, R.E.M. & more\nTHE CLASH - 50 CLASSICS\nEMO 101\nElectrospective / Dance Focus: 1988-'97\nAlexa Chung's Picks\nBest of Lou Reed\nPlaylist do Rick - Dead Fish\nUp Songs\nBuild My Throne\n" ] ], [ [ "ok so there's 87 and many of which mention neighbouring genres, and a handful are only not matching 'punk' because they use some latin-1 misspellings (e.g. 'Pünk', 'PØP PÛNK')\n\nI'm going to just go with the full search results, as the base dataset, filtering ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
4aa57948381c2b835818e1d93e213507604b5f27
45,593
ipynb
Jupyter Notebook
intermediate/ML_Med_nonlin.ipynb
virati/Med_ML
d2c51db67b17f8873f30c9ba0032409f8326c220
[ "MIT" ]
null
null
null
intermediate/ML_Med_nonlin.ipynb
virati/Med_ML
d2c51db67b17f8873f30c9ba0032409f8326c220
[ "MIT" ]
null
null
null
intermediate/ML_Med_nonlin.ipynb
virati/Med_ML
d2c51db67b17f8873f30c9ba0032409f8326c220
[ "MIT" ]
1
2020-02-17T21:26:15.000Z
2020-02-17T21:26:15.000Z
152.48495
18,308
0.892834
[ [ [ "# Machine learning for medicine\n## Linear measures of non-linear things\n\n## Overview\nIn this notebook we're going to address a major limitation of correlations and linear regressions in data analysis.\n\n## Code Setup", "_____no_output_____" ] ], [ [ "import numpy as np\nimport scipy\nimport matplotlib.pyplot as plt\nfrom ipywidgets import interact, interactive, fixed, interact_manual\nimport ipywidgets as widgets\nimport scipy.stats as stats", "_____no_output_____" ] ], [ [ "<a id='test'></a>\n## What is a nonlinear relationship?\n\nLinear relationships between variables are really nice.\nIt's easy to draw a line, it's easy to explain.\n\nThere are many things around us in our daily lives that *don't* behave linearly.\nThink about this: is there anything you do that wouldn't just be doubly-good if you doubled the effort you put it?\nFor example, would you get from home->work in half the time if you pressed the gas pedal twice as hard?\n\nA nonlinear relationship is what it sounds like: you can't *reasonable* draw a line between two variables.\n", "_____no_output_____" ] ], [ [ "x = np.linspace(-10,10,1000)\n\nlin_f = -2 * x\nnonlin_f = (x-3)*(x+3)*x\n\nplt.figure()\n#plt.plot(x,lin_f,'--')\nplt.plot(x,nonlin_f,color='orange')\nplt.ylim((-50,50));", "_____no_output_____" ] ], [ [ "The orange line represents a nonlinear relationship that's more complicated.\nYou can't just multiply x by a number and get that curve.\nThe actual equation for the orange curve is $y = (x-3)(x+3)x = x^3 - 9x$.\n\nAnother example is $y = (x-3)(x+3)x^2 = x^4 - 9x^2$.\n", "_____no_output_____" ] ], [ [ "nonlin_f = (x-3)*(x+3)*x*x\n\nplt.figure()\n#plt.plot(x,lin_f,'--')\nplt.plot(x,nonlin_f,color='orange')\nplt.ylim((-50,50));", "_____no_output_____" ] ], [ [ "A last example is something that we find very useful in science/engineering: a $\\sin$ function.\n$y = \\sin(x)$", "_____no_output_____" ] ], [ [ "nonlin_f = 45*np.sin(x)\n\nplt.figure()\n#plt.plot(x,lin_f,'--')\nplt.plot(x,nonlin_f,color='orange')\nplt.ylim((-50,50));", "_____no_output_____" ] ], [ [ "All of these relationships are *nonlinear* but we're lucky because we can *see that clearly*.\nThings can get more complicated when we look at this from a simulated experiment.", "_____no_output_____" ] ], [ [ "def nonlin(noise,samples,do_stats=False):\n truth = lambda x: 3 * (x-2) * (x+2) * x * x\n x = np.random.uniform(-5,5,size=(samples,))\n y = np.random.normal(truth(x),noise,size=x.shape)\n x_c = np.linspace(-5,5,100)\n\n plt.figure()\n plt.scatter(x,y)\n plt.xlim((-5,5))\n plt.ylim((-30,30))\n plt.plot(x_c,truth(x_c),'--',alpha=0.5)\n \n if do_stats:\n pears = stats.pearsonr(x,y)\n spears = stats.spearmanr(x,y)\n plt.title('Correlation: ' + str(pears[0]) + ' p-val: ' + str(pears[1]))\n plt.plot(x_c,pears[0] * x_c)\n \ninteract(nonlin,noise=(0.0,10.0,1.0),samples=(0,200,10),do_stats=fixed(False));\n", "_____no_output_____" ] ], [ [ "## Correlation in a non-linear relationship\n\nLet's take a look at what happens if we just find the correlation between two variables that are non-linearly related.", "_____no_output_____" ] ], [ [ "interact(nonlin,noise=fixed(0.0),samples=fixed(0.0),do_stats=fixed(False));\n", "_____no_output_____" ] ], [ [ "This shows us a major, major problem: the p-value is not significant.\nIn other words, the probability of us seeing this data given x and y are *not* related is about ~80%.\nBut then you re-run the code and it changes drastically.", "_____no_output_____" ] ], [ [ "interact(nonlin,noise=(0.0,10.0,0.5),samples=fixed(100));", "_____no_output_____" ] ], [ [ "## Where linear is \"good enough\"\nTo finish out this discussion we're going to demonstrate that even if the \"line\" is wrong, it may be *useful*.\n\nLet's revisit the example from our [first section](#test).\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
4aa57e66542d17feb79cf949d0d80d3f0b98bcf9
273,873
ipynb
Jupyter Notebook
notebooks/score_and_features.ipynb
gbelouze/mva-time-series
536ba8ecf37ebb8ea7434878da84fae4528c6620
[ "BSD-2-Clause" ]
null
null
null
notebooks/score_and_features.ipynb
gbelouze/mva-time-series
536ba8ecf37ebb8ea7434878da84fae4528c6620
[ "BSD-2-Clause" ]
null
null
null
notebooks/score_and_features.ipynb
gbelouze/mva-time-series
536ba8ecf37ebb8ea7434878da84fae4528c6620
[ "BSD-2-Clause" ]
null
null
null
251.029331
134,576
0.887291
[ [ [ "from anomaly import io, tmm, adm\nfrom sklearn.metrics import f1_score\nimport scipy\nimport pandas as pd\nimport numpy as np\n\nimport anomaly.utils.modelselect_utils as mu\nimport anomaly.utils.statsutils as su\nimport matplotlib.pyplot as plt\nimport seaborn as sns", "_____no_output_____" ] ], [ [ "## The pipeline ", "_____no_output_____" ], [ "We demonstrate below how the anomaly detection pipeline is used", "_____no_output_____" ] ], [ [ "predictor = tmm.ARMA()\ndetector = adm.KSigma()\n\nbench = io.BenchmarkDataset(2)\ndf = bench.read(8)\ndf.head()", "_____no_output_____" ], [ "ts = df.value\n\npredictor.fit(np.array(ts))\nts_predicted = predictor.predict()\nresiduals = ts_predicted - ts\n\ndetector.fit(ts, ts_predicted)\nis_anomaly = detector.detect()", "_____no_output_____" ], [ "fig, axs = plt.subplots(1, 2, figsize=(15, 5))\n\nplt.sca(axs[0])\nplt.plot(ts, label=\"Original data\")\nplt.plot(ts_predicted, label=\"Predicted data\")\nplt.legend()\n\nplt.sca(axs[1])\nplt.plot(residuals, label=\"Residuals\")\nplt.plot(residuals[df.is_anomaly == 1], linestyle=\"\", marker=\"x\", color=\"green\", label=\"True anomaly\")\nplt.plot(residuals[is_anomaly], linestyle=\"\", marker=\"+\", color=\"red\", label=\"Detected anomaly\")\nplt.legend()\n\nplt.show()", "_____no_output_____" ] ], [ [ "Here we can observe a failure of the method. ARMA fits the data too closely, and almost becomes a naive predictor which predicts $\\hat{s}_t = s_{t-1}$. As a consequence, each down peak in the residual is followed by an up peak because the predictor has lagged the outlier. Of course, this can be fixed by choosing a predictors which handles well trend and seasonality.", "_____no_output_____" ], [ "### Do the residuals follow a gaussian distribution ?", "_____no_output_____" ] ], [ [ "plt.hist(residuals, bins=50)\nplt.show()", "_____no_output_____" ] ], [ [ "Here, the gaussian assumption seems ok from far away. However, this is not always the case, and more importantly, it is not gaussian in the statistical sense.", "_____no_output_____" ] ], [ [ "_, pvalue = scipy.stats.normaltest(residuals)\nprint(pvalue)", "3.7140666983958233e-155\n" ] ], [ [ "## Experiments", "_____no_output_____" ] ], [ [ "predictor_dict = {\n \"naive_predictor\" : tmm.NaivePredictor(),\n \"ar5_predictor\" : tmm.AR(order=5),\n \"ma5_predictor\" : tmm.MA(order=5),\n \"arma55_predictor\" : tmm.ARMA(order_ar=5, order_ma=5),\n \"arima525_predictor\" : tmm.ARIMA.ARIMA(order=[5,2,5]),\n \"poly5_predictor\" : tmm.Polynomial(degree=5),\n \"trigonometric\": tmm.Trigonometric(),\n \"poly+arma\": tmm.Sequential(predictors=[tmm.Polynomial(), tmm.ARMA()]),\n \"poly+trigo\":tmm.Sequential(predictors=[tmm.Polynomial(), tmm.Trigonometric()]),\n \"poly+trigo+arma\": tmm.Sequential(predictors=[tmm.Polynomial(), tmm.Trigonometric(), tmm.ARMA()]),\n}", "_____no_output_____" ] ], [ [ "### Compute the features and the scores of the models for each time series in the benchmark", "_____no_output_____" ], [ "⚠️ Don't run those cells, the results are already saved ! ⚠️", "_____no_output_____" ], [ "Compute features:", "_____no_output_____" ] ], [ [ "%%capture --no-stdout\nassert False, \"Don't run this cell unless you want to recompute all features\"\nfor benchmark_index in range(1,3):\n bench = io.BenchmarkDataset(benchmark_index)\n \n features = mu.compute_benchmark_features(bench)\n features.to_csv(f\"saved_data/features_{benchmark_index}.csv\", index_label=\"ts_index\")", "_____no_output_____" ] ], [ [ "Compute scores:", "_____no_output_____" ] ], [ [ "%%capture --no-stdout\n\nassert False, \"Don't run this cell unless you want to recompute all scores (long!)\"\nfor benchmark_index in range(1,3):\n bench = io.BenchmarkDataset(benchmark_index)\n\n score_dict = mu.compute_predictor_scores(predictor_dict, bench, detector=adm.KSigma())\n score_df = pd.concat([score_dict[model_name].assign(model_name=model_name)\n for model_name in score_dict.keys()])\n \n score_df.to_csv(f\"saved_data/score_df_{benchmark_index}.csv\", index_label=\"ts_index\")", "100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 67/67 [15:57<00:00, 14.30s/it]\n100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 100/100 [25:40<00:00, 15.40s/it]\n" ] ], [ [ "### Analyse the results", "_____no_output_____" ], [ "#### Read the saved data", "_____no_output_____" ] ], [ [ "benchmark_index = 1", "_____no_output_____" ], [ "score_df = pd.read_csv(f\"saved_data/score_df_{benchmark_index}.csv\").set_index(\"ts_index\")\nfeatures = pd.read_csv(f\"saved_data/features_{benchmark_index}.csv\").set_index(\"ts_index\")", "_____no_output_____" ], [ "features_normalized = (features - features.mean()) / features.std()\nfeatures_normalized", "_____no_output_____" ], [ "score_df", "_____no_output_____" ], [ "def get_features(benchmark_index):\n features = pd.read_csv(f\"saved_data/features_{benchmark_index}.csv\").set_index(\"ts_index\")\n features_normalized = (features - features.mean()) / features.std()\n return features_normalized.to_numpy()\n\ndef get_best_model(benchmark_index):\n score_df = pd.read_csv(f\"saved_data/score_df_{benchmark_index}.csv\").set_index(\"ts_index\")\n pivot_init = score_df[[\"f1\", \"model_name\"]]\n df = pd.pivot_table(pivot_init, index=\"ts_index\", columns=\"model_name\", values=\"f1\")\n df_np = df.to_numpy()\n return df_np.argmax(axis=1)", "_____no_output_____" ] ], [ [ "#### Plot the results depending on the features using PCA", "_____no_output_____" ] ], [ [ "from sklearn import decomposition\n\nfeatures_np = get_features(benchmark_index=1)\nbest_model = get_best_model(benchmark_index=1)\n\npca = decomposition.PCA(n_components=2)\nXY = pca.fit_transform(features_np)", "_____no_output_____" ], [ "plot_df = pd.DataFrame(XY, columns=[\"X\", \"Y\"])\nplot_df[\"category\"] = np.array(list(predictor_dict.keys()))[best_model]\n\ngroups = plot_df.groupby(\"category\")\nfor name, group in groups:\n plt.plot(group[\"X\"], group[\"Y\"], marker=\"o\", linestyle=\"\", label=name)\nplt.legend(loc='center left', bbox_to_anchor=(1, 0.5))\nplt.title(\"Best model depending on the time-series features, as shown using PCA\")\n\nplt.savefig(\"figs/best_model_PCA.png\", bbox_inches = 'tight')", "_____no_output_____" ] ], [ [ "### See how each feature affects the fscore using a linear regression", "_____no_output_____" ] ], [ [ "import statsmodels.formula.api as smf\n\nfor predictor_name in [\"poly5_predictor\", \"ma5_predictor\"]:\n print(f\"------------------------- {predictor_name} --------------------------------\")\n score_model_df = score_df[score_df[\"model_name\"] == predictor_name].reset_index(drop=True)\n \n df = features_normalized.join(score_model_df, on=\"ts_index\")\n \n smresults = smf.ols('f1 ~ trend_score + seasonality_score + nonlinearity + skew + kurtosis + hurst + lyapunov', df).fit()\n smresults_robust = smresults.get_robustcov_results()\n print(smresults_robust.summary())", "------------------------- poly5_predictor --------------------------------\n OLS Regression Results \n==============================================================================\nDep. Variable: f1 R-squared: 0.332\nModel: OLS Adj. R-squared: 0.252\nMethod: Least Squares F-statistic: 27.51\nDate: Wed, 30 Mar 2022 Prob (F-statistic): 2.27e-16\nTime: 23:05:05 Log-Likelihood: -8.2682\nNo. Observations: 67 AIC: 32.54\nDf Residuals: 59 BIC: 50.17\nDf Model: 7 \nCovariance Type: HC1 \n=====================================================================================\n coef std err t P>|t| [0.025 0.975]\n-------------------------------------------------------------------------------------\nIntercept 0.4847 0.036 13.600 0.000 0.413 0.556\ntrend_score -0.0527 0.043 -1.224 0.226 -0.139 0.033\nseasonality_score 0.0330 0.030 1.088 0.281 -0.028 0.094\nnonlinearity -0.0648 0.008 -7.657 0.000 -0.082 -0.048\nskew -0.1641 0.121 -1.355 0.181 -0.407 0.078\nkurtosis 0.2123 0.103 2.067 0.043 0.007 0.418\nhurst 0.1379 0.040 3.438 0.001 0.058 0.218\nlyapunov 0.0927 0.033 2.784 0.007 0.026 0.159\n==============================================================================\nOmnibus: 3.390 Durbin-Watson: 2.121\nProb(Omnibus): 0.184 Jarque-Bera (JB): 2.154\nSkew: 0.225 Prob(JB): 0.341\nKurtosis: 2.246 Cond. No. 6.74\n==============================================================================\n\nNotes:\n[1] Standard Errors are heteroscedasticity robust (HC1)\n------------------------- ma5_predictor --------------------------------\n OLS Regression Results \n==============================================================================\nDep. Variable: f1 R-squared: 0.340\nModel: OLS Adj. R-squared: 0.261\nMethod: Least Squares F-statistic: 10.88\nDate: Wed, 30 Mar 2022 Prob (F-statistic): 1.01e-08\nTime: 23:05:05 Log-Likelihood: 12.996\nNo. Observations: 67 AIC: -9.991\nDf Residuals: 59 BIC: 7.646\nDf Model: 7 \nCovariance Type: HC1 \n=====================================================================================\n coef std err t P>|t| [0.025 0.975]\n-------------------------------------------------------------------------------------\nIntercept 0.3891 0.026 14.995 0.000 0.337 0.441\ntrend_score -0.0467 0.029 -1.596 0.116 -0.105 0.012\nseasonality_score 0.0319 0.017 1.838 0.071 -0.003 0.067\nnonlinearity -0.0354 0.009 -4.141 0.000 -0.052 -0.018\nskew 0.0122 0.115 0.107 0.915 -0.217 0.242\nkurtosis 0.0575 0.095 0.607 0.546 -0.132 0.247\nhurst 0.0843 0.039 2.174 0.034 0.007 0.162\nlyapunov 0.0402 0.030 1.334 0.187 -0.020 0.100\n==============================================================================\nOmnibus: 11.191 Durbin-Watson: 1.995\nProb(Omnibus): 0.004 Jarque-Bera (JB): 3.618\nSkew: 0.196 Prob(JB): 0.164\nKurtosis: 1.931 Cond. No. 6.74\n==============================================================================\n\nNotes:\n[1] Standard Errors are heteroscedasticity robust (HC1)\n" ] ], [ [ "### Compare fscores of the pipeline", "_____no_output_____" ] ], [ [ "keep = [\n \"naive_predictor\",\n \"ar5_predictor\",\n \"poly+trigo\",\n \"poly+trigo+arma\",\n]\n\nfig, axes = plt.subplots(2,2, sharex=False, sharey=False, figsize=(20,7))\nfig.suptitle('Distributions of F1-scores and recall using three different predictors on the four datasets')\n\nscores = [\"f1\", \"recall\"]\nbenchmarks = [f\"benchmark_{i}\" for i in range(1,3)]\n\n\nfor j in range(len(benchmarks)):\n benchmark_index = j + 1\n score_df = pd.read_csv(f\"saved_data/score_df_{benchmark_index}.csv\").set_index(\"ts_index\")\n \n for i in range(len(scores)):\n for model_name in keep:\n score_df_to_plot = score_df[score_df.model_name == model_name].reset_index()\n sns.kdeplot(data=score_df_to_plot, x=scores[i],\n bw_adjust=.8, cut=0, ax=axes[i,j])\n \n axes[i,j].set_ylabel(\"\")\n axes[i,j].set_xlabel(f\"{scores[i]} on benchmark {benchmark_index}\")\n \nfrom matplotlib.lines import Line2D\ncustom_lines = [Line2D([0], [0], color=\"blue\", lw=4),\n Line2D([0], [0], color=\"orange\", lw=4),\n Line2D([0], [0], color=\"green\", lw=4),\n Line2D([0], [0], color=\"red\", lw=4)]\n\n\n\nfig.legend(custom_lines, keep, loc=\"center left\")\nplt.savefig(\"figs/F1_recall.png\", bbox_inches = 'tight')", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4aa58c1780d639b447c090f1f717abea78e0b8bb
35,312
ipynb
Jupyter Notebook
benchmark/.ipynb_checkpoints/locust-runner-checkpoint.ipynb
dmitryhd/aiohttp-vs-tornado-benchmark
8eb442f8e83bee8428d3233fdb70283e04c7f093
[ "MIT" ]
6
2018-10-08T08:29:56.000Z
2021-06-20T14:14:03.000Z
benchmark/.ipynb_checkpoints/locust-runner-checkpoint.ipynb
dmitryhd/aiohttp-vs-tornado-benchmark
8eb442f8e83bee8428d3233fdb70283e04c7f093
[ "MIT" ]
null
null
null
benchmark/.ipynb_checkpoints/locust-runner-checkpoint.ipynb
dmitryhd/aiohttp-vs-tornado-benchmark
8eb442f8e83bee8428d3233fdb70283e04c7f093
[ "MIT" ]
1
2019-02-26T07:37:02.000Z
2019-02-26T07:37:02.000Z
44.473552
300
0.593424
[ [ [ "- ./bin/sample_service.py # run service\n- locust -f benchmark/locustfile.py --host=http://127.0.0.1:8890 # run locust\n- open web ui http://127.0.0.1:8089/\n\ndocs at https://docs.locust.io/en/stable/quickstart.html", "_____no_output_____" ], [ "- `locust -f benchmark/sleep50_locust.py --master --host=http://127.0.0.1:8890`\n- `locust -f benchmark/sleep50_locust.py --slave --host=http://127.0.0.1:8890`", "_____no_output_____" ], [ "\ngo\n", "_____no_output_____" ], [ "```\nlocust -f benchmark/dummy.py --master --master-bind-host=127.0.0.1 --master-bind-port=5557\n\ncd benchmark/boomer\ngo build -o a.out http_benchmark.go\n\n./a.out --url=http://127.0.0.1:8890/sleep50 --master-port=5557 --rpc=zeromq\n```", "_____no_output_____" ], [ "https://docs.locust.io/en/latest/running-locust-without-web-ui.html", "_____no_output_____" ] ], [ [ "master_locust = 'locust -f dummy.py --master --master-bind-host=127.0.0.1 --master-bind-port=5557 --no-web -c 10 -r 10 -n 1000 --expect-slaves 1'\nslave_locust = './a.out --url=http://127.0.0.1:8890/sleep50 --master-port=5557 --rpc=zeromq'", "_____no_output_____" ], [ "csv_name = 'example'", "_____no_output_____" ], [ "master_locust += f' --csv={csv_name}'\nmaster_locust", "_____no_output_____" ], [ "slave_locust", "_____no_output_____" ], [ "import subprocess\nimport os", "_____no_output_____" ], [ "def run(command: str):\n p = subprocess.Popen(command.split(' '), stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n while True:\n retcode = p.poll() #returns None while subprocess is running\n if retcode is not None:\n print(f'retcode: {retcode}')\n print(command)\n print_process_output(p)\n break\n print_process_output(p)\n \ndef print_process_output(p):\n out = ''\n for l in p.stdout.readlines():\n if l:\n out += l.decode('utf-8') + '\\n'\n for l in p.stderr.readlines():\n if l:\n out += l.decode('utf-8') + '\\n'\n if out:\n print(out) ", "_____no_output_____" ], [ "run('go get github.com/myzhan/boomer')\nrun('go build -o a.out http_benchmark.go')", "retcode: 0\ngo get github.com/myzhan/boomer\nretcode: 0\ngo build -o a.out http_benchmark.go\n" ], [ "import threading\nimport time", "_____no_output_____" ], [ "master = threading.Thread(target=run, args=(master_locust,))\n#master.daemon = True\nslave = threading.Thread(target=run, args=(slave_locust,))\n#slave.daemon = True\nmaster.start()\ntime.sleep(0.5)\nslave.start()\n\nmaster.isAlive(), slave.isAlive()", "_____no_output_____" ], [ "master.isAlive(), slave.isAlive()", "_____no_output_____" ], [ "master.terminate()", "_____no_output_____" ], [ "!ls", "\u001b[31ma.out\u001b[m\u001b[m http_benchmark.go locust.ipynb\r\ndummy.py info_locust.py sleep50_locust.py\r\n" ], [ "master.", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa5905542833eaec14cdb88fe1c6cdc00d10a33
6,673
ipynb
Jupyter Notebook
4_6_Matrices_and_Transformation_of_State/5_matrix_transpose.ipynb
Abdulrahman-Adel/CVND_Localization_Exercises
b694ad09ec484ff46ee119cd649c07b24cac6cbd
[ "MIT" ]
null
null
null
4_6_Matrices_and_Transformation_of_State/5_matrix_transpose.ipynb
Abdulrahman-Adel/CVND_Localization_Exercises
b694ad09ec484ff46ee119cd649c07b24cac6cbd
[ "MIT" ]
null
null
null
4_6_Matrices_and_Transformation_of_State/5_matrix_transpose.ipynb
Abdulrahman-Adel/CVND_Localization_Exercises
b694ad09ec484ff46ee119cd649c07b24cac6cbd
[ "MIT" ]
null
null
null
36.464481
290
0.468155
[ [ [ "# Transpose of a Matrix\n\nIn this set of exercises, you will work with the transpose of a matrix.\n\nYour first task is to write a function that takes the transpose of a matrix. Think about how to use nested for loops efficiently.\n\nThe second task will be to write a new matrix multiplication function that takes advantage of your matrix transposition function.", "_____no_output_____" ] ], [ [ "### TODO: Write a function called transpose() that \n### takes in a matrix and outputs the transpose of the matrix\n\ndef transpose(matrix):\n matrix_transpose = []\n \n for i in range(len(matrix[0])):\n row = []\n for j in range(len(matrix)):\n row.append(matrix[j][i])\n matrix_transpose.append(row)\n \n return matrix_transpose", "_____no_output_____" ], [ "### TODO: Run the code in the cell below. If there is no \n### output, then your answers were as expected\n\nassert transpose([[5, 4, 1, 7], [2, 1, 3, 5]]) == [[5, 2], [4, 1], [1, 3], [7, 5]]\nassert transpose([[5]]) == [[5]]\nassert transpose([[5, 3, 2], [7, 1, 4], [1, 1, 2], [8, 9, 1]]) == [[5, 7, 1, 8], [3, 1, 1, 9], [2, 4, 2, 1]]\n", "_____no_output_____" ] ], [ [ "### Matrix Multiplication\n\nNow that you have your transpose function working, write a matrix multiplication function that takes advantage of the transpose. \n\nAs part of the matrix multiplication code, you might want to re-use your dot product function from the matrix multiplication exercises. But you won't need your get_row and get_column functions anymore because the tranpose essentially takes care of turning columns into row vectors.\n\nRemember that if matrix A is mxn and matrix B is nxp, then the resulting product will be mxp.", "_____no_output_____" ] ], [ [ "### TODO: Write a function called matrix_multiplication() that\n### takes in two matrices and outputs the product of the two\n### matrices\n\n### TODO: Copy your dot_product() function here so that you can\n### use it in your matrix_multiplication function\ndef dot_product(vectorA, vectorB):\n result = 0\n \n for i in range(len(vectorA)):\n result += vectorA[i] * vectorB[i]\n \n return result\n\n# Takes in two matrices and outputs the product of the two matrices\ndef matrix_multiplication(matrixA, matrixB):\n product = []\n \n ## TODO: Take the transpose of matrixB and store the result\n ## in a new variable\n \n \n ## TODO: Use a nested for loop to iterate through the rows\n ## of matrix A and the rows of the tranpose of matrix B\n \n ## TODO: Calculate the dot product between each row of matrix A\n ## with each row in the transpose of matrix B\n \n ## TODO: As you calculate the results inside your for loops,\n ## store the results in the product variable\n \n # Take the transpose of matrixB and store the result\n transposeB = transpose(matrixB)\n \n # Use a nested for loop to iterate through the rows\n # of matrix A and the rows of the tranpose of matrix B\n for r1 in range(len(matrixA)):\n new_row = []\n for r2 in range(len(transposeB)):\n # Calculate the dot product between each row of matrix A\n # with each row in the transpose of matrix B\n dp = dot_product(matrixA[r1], transposeB[r2])\n new_row.append(dp)\n # Store the results in the product variable\n product.append(new_row)\n\n ## TODO: \n return product ", "_____no_output_____" ], [ "### TODO: Run the code in the cell below. If there is no \n### output, then your answers were as expected\n\nassert matrix_multiplication([[5, 3, 1], \n [6, 2, 7]], \n [[4, 2], \n [8, 1], \n [7, 4]]) == [[51, 17], \n [89, 42]]\n\nassert matrix_multiplication([[5]], [[4]]) == [[20]]\n\nassert matrix_multiplication([[2, 8, 1, 2, 9],\n [7, 9, 1, 10, 5],\n [8, 4, 11, 98, 2],\n [5, 5, 4, 4, 1]], \n [[4], \n [2], \n [17], \n [80], \n [2]]) == [[219], [873], [8071], [420]]\n\n\nassert matrix_multiplication([[2, 8, 1, 2, 9],\n [7, 9, 1, 10, 5],\n [8, 4, 11, 98, 2],\n [5, 5, 4, 4, 1]], \n [[4, 1, 2], \n [2, 3, 1], \n [17, 8, 1], \n [1, 3, 0], \n [2, 1, 4]]) == [[61, 49, 49], [83, 77, 44], [329, 404, 39], [104, 65, 23]]", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
4aa5982409047d0cc68da36b2a45bd8fe7baa498
17,150
ipynb
Jupyter Notebook
00.ipynb
kangwonlee/18pycpp-01
6ad9720951e53e44c2245ea26642b6a0bb38577a
[ "BSD-3-Clause" ]
null
null
null
00.ipynb
kangwonlee/18pycpp-01
6ad9720951e53e44c2245ea26642b6a0bb38577a
[ "BSD-3-Clause" ]
null
null
null
00.ipynb
kangwonlee/18pycpp-01
6ad9720951e53e44c2245ea26642b6a0bb38577a
[ "BSD-3-Clause" ]
null
null
null
42.555831
311
0.63207
[ [ [ "# Introducing `git` version control system", "_____no_output_____" ], [ "* A [version control](https://en.wikipedia.org/wiki/Version_control) system helps keeping track of changes in software source code.\n* With a version control system, trying and testing possibly risky attempts can be easier.\n* Currently in the late 2010s, [`git`](https://en.wikipedia.org/wiki/List_of_version_control_software) is one of the [available version control softwares](https://en.wikipedia.org/wiki/List_of_version_control_software), \n* Linus Torvalds created `git` in 2005 to maintain the Linux kernel.\n* `git` is an [open source](https://github.com/git/git) distributed version control system. A repository may have remote versions and local versions that are (practically) identical.\n\n", "_____no_output_____" ], [ "[![Git Data Transport Commands](https://images.osteele.com/2008/git-transport.png)](https://blog.osteele.com/2008/05/my-git-workflow/)", "_____no_output_____" ], [ "[[ref0](https://git-scm.com/book/en/v2), [ref1](https://github.com/progit)]", "_____no_output_____" ], [ "| command | expected behavior | example |\n|:-------:|:-----------------:|:-------:|\n| `init` | initialize a git repository | `git init` |\n| `clone` | clone a git repository | `git clone <repo url>`<br>`git clone file://<path>` |\n| `log` | list the commit history | `git log`<br>`git log --help`<br>`git log --stat`<br>`git log --oneline --graph --all` |\n| `status` | current status of a git repository | `git status` |\n| `diff` | visualize changes after last commit and/or staging | `git diff`<br>`git diff HEAD`<br>`git diff HEAD^` |\n| `config` | list or adjust configuration | `git config --list`<br>`git config --global --unset credential.helper` |\n| `config user.name` | specify the user's name | `git config user.name <your name>` |\n| `config user.email` | specify the user's email address | `git config user.email <your email>` |\n| `remote` | manage remote repositories | `git remote add origin <remote repo>` |\n| `add` | stage some change to commit | `git add <path to a changed file>`<br>`git add -p` |\n| `commit` | create an entry of change | `git commit`<br>`git commit -m <message>` |\n| `push` | upload the changes to a remote repository | `git push`<br>`git push -u origin <branch name>` |\n| `checkout ` | switch workspace to a certain commit | `git checkout <commit hash>`<br>`git checkout -b <new branch>`<br>`git checkout -- <file to undo>` |\n| [`branch`](https://git-scm.com/docs/git-branch) | manage branches | `git branch`<br>`git branch -r` |\n| `blame` | relates each line of code with commits | `git blame <file path>`|\n| [`rebase`](https://git-scm.com/docs/git-rebase) | move current branch on top of another branch | `git rebase <branch>`<br>`git rebase -i <commit>` |\n| [`merge`](https://git-scm.com/book/en/v2/Git-Branching-Basic-Branching-and-Merging) | merge another branch to the current branch | `git merge --no-ff <other branch>`|\n\n", "_____no_output_____" ], [ "# Practice", "_____no_output_____" ], [ "1. Go to the github website and log in.\n1. Go to one of the repositories of your interest.<br>For this example, this page would use Wes McKinney's [Python for Data Analysis](https://github.com/wesm/pydata-book).<br>\nIts repository addres is : https://github.com/wesm/pydata-book\n1. Let's try cloning the repository.<br>\n`git clone https://github.com/wesm/pydata-book`<br>\n1. Now try `cd pydata-book` and `ls -a` commands.\n<br>Note if a folder `.git` is visible.\n1. Enter `pwd` to check the current full path.<br>\nLet's assume the folder is : `/home/user/Documents/pydata-book`\n1. `git remote` would list of available remote repository names.\n1. `git remote get-url origin` would show the link to the `origin` repository.<br>If developers contribute to the [Python for Data Analysis](https://github.com/wesm/pydata-book), you would be able to update this repository using `git pull origin`.\n1. If your name and email address are \"ABC\" and [email protected] respectively, enter `git config user.name ABC` and `git config user.email [email protected]`.\n1. `git config --list` would show configurations of this repository.\n1. Try `echo \"test\" > test.txt` to create a sample text file.\n1. `git status` would show:\n<br>The current branch \n<br>Sync with the branch of the remote repository\n<br>One file that `git` is not trcking\n1. Enter `git add test.txt`\n1. `git status` would show:\n<br>Branch and sync information would not change\n<br>One file added to the stage (or index) to be committed\n1. Enter `git commit -m \"Added test.txt\"`<br>\n`git` would show messages.\n1. Check `git status`.\n1. Now `git log --state` would show the hash value, date & time, your name & email, commit message, and the file change of the commits.\n1. Open the new file using an editor : `vi test.txt` or `nano test.txt`.\n1. Add one more line, save, and exit the editor.\n1. `git status` would show one file is changed.\n1. `git diff` would show the changes in the files.\n1. `git add test.txt` and `git commit -m \"Changed test.txt\"` would commit the file.\n1. Check `git status` and `git log --stat`.\n1. `git log --oneline --graph --all` would show the commit tree.\n1. `git branch` would list local branch names.<br>Other repositories may have different branch names.\n1. `git branch --all` would show both local and remote branches.\n1. `ls`\n1. `git checkout 1st-edition` will activate branch `1st-edition`.<br>If it was not one of the local branches, `git` will create a new local branch.\n1. `ls` again and compare the content of the folder.\n1. `git checkout 2nd-edition` will switch to branch `2nd-edition`.<br>`ls` again to check.\n1. Enter `cd ..` to move up one level.\n1. Enter `git clone /home/user/Documents/pydata-book temp`.<br>`git` would clone anoter repository in folder `temp`. (This example is just to show cloning a local repository is possible)\n1. Enter `cd temp` and `git log`.\n1. Enter `git remote`.\n1. Enter `git remote get-url origin`. You would be able to see the remote repository location.\n\n1. Try `git config --list`\n1. Try `git remote add upstream https://github.com/wesm/pydata-book`\n1. Now try `git remote` and/or `git remote get-url upstream`.<br>`git pull upstream` would update this local repository.\n\n", "_____no_output_____" ], [ "## Creating a `github` account\n\n", "_____no_output_____" ], [ "[![github](https://avatars1.githubusercontent.com/u/9919?s=200&v=4)](https://www.github.com)", "_____no_output_____" ], [ "* [`github`](https://www.github.com) is one of `git` remote [repository hosting services](https://en.wikipedia.org/wiki/Comparison_of_source_code_hosting_facilities#Version_control_systems).\n* [`dev.naver.com`](https://developers.naver.com) used to provide such service until recent years.\n* `github` also has an [education](https://education.github.com) service.\n* May require to verify email address.\n\n", "_____no_output_____" ], [ "* A free user account can generate indefinite number of Public repositories.\n* Usually a github repository address has following form:<br>`https://github.com/<github id>/<repository name>(.git)`<br>\nex : [`https://github.com/tensorflow/tensorflow.git`](https://github.com/tensorflow/tensorflow)\n* A user can `fork` a public repository.<br>ex : `https://github.com/<github id>/tensorflow.git`<br>This is equivalent to having a clone with a link.\n* If planning to use only one user account for a specific repository, following command is possible.<br>`git remote add <remote name> https://<github id>@github.com/<github id>/<repository name>(.git)`\n\n* With an academic email address and a school ID card image, an instructor (or a [student](https://education.github.com/pack)) may upgrade to an education account; possible to create private repositories.\n* Depending on the situation, an instructor may create an organization on the github; then a repository may have following form :<br>`https://(<github id>@)github.com/<organization id>/<repository name>(.git)`\n\n", "_____no_output_____" ], [ "### Authentication", "_____no_output_____" ], [ "* To avoid unauthorized source code change, a remote repository may require id+password authentication.\n* To improve productivity during frequent pushes, `git` may utilize credential helper.\n* A credential helper stores the authentication information possibly after encryption.\n* Following command shows current (global) credential helper:<br>`git config (--global) credential.helper`\n* However, credential information might be sensitive so please use with caution.\n\n", "_____no_output_____" ], [ "## Creating branches and switching between them", "_____no_output_____" ], [ "* Assume you want to test a *radical* new feature; if successful, it would be great new addition.\n* However, you want the existing code base intact until success is certain.\n* Then you can start a new branch.<br>Only when the new feature is successful, you would merge into the existing code base.", "_____no_output_____" ], [ "[![git branch](https://git-scm.com/book/en/v2/images/advance-master.png)](https://git-scm.com/book/en/v2/Git-Branching-Branches-in-a-Nutshell)", "_____no_output_____" ], [ "1. `git branch (--list)` would list branches of the local repository.<br>`git branch -r` to list remote branches.\n1. `git branch <new branch name>` would start a new branch.\n1. `git checkout <new branch name>` would switch to the new branch.\n1. `git checkout -b <new branch name>` would do both steps above.\n1. From now on, this new branch would accumulate commits.\n1. After a few commits, when `git status` shows no uncommitted changes, try `git checkout <previous branch name>`.<br>Then check the files that you changed after previous step.\n1. And then try `git checkout <new branch name>` again. What happened to your changes?", "_____no_output_____" ], [ "## Synchronizing after fork or distribution", "_____no_output_____" ], [ "* Click following to start a video tutorial.", "_____no_output_____" ], [ "[![sync upstream playlist](https://i.ytimg.com/vi/P39pzSQx5rY/hqdefault.jpg)](https://www.youtube.com/watch?v=P39pzSQx5rY&list=PLA6B0Lmr9oJNDafh3ndnmbXv0I9wddv63)", "_____no_output_____" ], [ "* When you click on the `fork` button of a repository, you can duplicate it so that you can make changes.\n* However, the developers may continue to the original (or *upstream*) repository; fix bugs and add more features.\n* At some point of time, you may want to update your duplicate repository.\n* [Github](https://help.github.com/articles/syncing-a-fork/) described the procedure to synchronize a fork repository with the upstream repository.", "_____no_output_____" ], [ "1. If not done yet, clone your remote fork repository to a local repository.\n1. `git remote` will list names of remote repositories. Let's assume `origin` points to your fork repository.\n1. Add the *upstream* repository address as `upstream`. <br>`git remote add upstream <upstream repository address>`\n1. `git fetch upstream` would download updates from the upstream repository. However, this alone would not change your workspace yet.\n1. Try `git log --oneline --graph --all`. This would show you all the histories of local and remote branches.\n1. Choose one of the local branches that you want to update. Let's assume its name is `first_branch`.\n1. Try `git rebase first_branch upstream/first_branch`. This would apply new commits in `upstream/first_branch` after fork to your local branch.<br>Depending on the situation, collsion may occur; then we should manually [resolve](https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging) the conflict.\n1. Now `git push origin first_branch` to apply the new commits to the remote fork repository.\n1. Repeat from `git log --oneline --graph -all` for all branches of interest.", "_____no_output_____" ], [ "## Travis-CI and Continuous Integration", "_____no_output_____" ], [ "* In short, if you have an open source software project, [Travis-CI](https://www.travis-ci.org) would be able to build, run test software, and reply reports as specified.\n* Please refer to the [Travis-CI documentation](https://docs.travis-ci.com/) for details.", "_____no_output_____" ], [ "## Exercises", "_____no_output_____" ], [ "### 00 : Your first commit", "_____no_output_____" ], [ "1. Clone your repository to your PC\n1. Configure your name and email address\n1. Make a new text file with a plain text\n<br>What would you want to write to the file?\n1. Add the new file\n1. Commit the changes to your local repository with an appropriate message\n1. Push your changes to your repository", "_____no_output_____" ], [ "### 01 : Sync with upstream", "_____no_output_____" ], [ "1. Clone your repository to your PC\n1. Add the upstream repository url as remote\n1. Fetch from the upstream repository\n<br>Try `git log --oneline --all --graph`\n1. Merge with the upstream branch\n<br>How can we use `git merge` command?\n<br>Did you see a message of \"CONFLICT\"?\n1. Push your changes to your repository", "_____no_output_____" ], [ "### 02* : ipynb", "_____no_output_____" ], [ "* This is an optional assignment\n* Please use a separate file : .txt, .ipynb, or .md", "_____no_output_____" ], [ "1. Propose a possible procedure to version-control .ipynb files\n1. Propose a possible procedure to resolve conflict of an .ipynb file", "_____no_output_____" ], [ "## `git` and `github` on Harvard CS50 Twitch", "_____no_output_____" ], [ "* Following is a video tutorial (2hr) on `git` and `github` by [Harvard CS50](https://www.youtube.com/channel/UCcabW7890RKJzL968QWEykA).", "_____no_output_____" ], [ "[![CS50 on Twitch - EP. 4 - git and GitHub](https://i.ytimg.com/vi/dAHgwd2U0Jg/hqdefault.jpg)](https://www.youtube.com/watch?v=dAHgwd2U0Jg)", "_____no_output_____" ] ] ]
[ "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
4aa59f3665d2f052724a98b778ceb5042bfda0e8
3,820
ipynb
Jupyter Notebook
12Noviembre_1310.ipynb
Alejandrico14/git_poo_2209
5a77664e6a6ae6282cee481ec2f5d82910f3c79f
[ "MIT" ]
null
null
null
12Noviembre_1310.ipynb
Alejandrico14/git_poo_2209
5a77664e6a6ae6282cee481ec2f5d82910f3c79f
[ "MIT" ]
null
null
null
12Noviembre_1310.ipynb
Alejandrico14/git_poo_2209
5a77664e6a6ae6282cee481ec2f5d82910f3c79f
[ "MIT" ]
null
null
null
28.507463
241
0.451309
[ [ [ "<a href=\"https://colab.research.google.com/github/Alejandrico14/git_poo_2209/blob/master/12Noviembre_1310.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "#class Nodo:\n # def __init__( self , dato ):\n # self.dato = dato\n # self.siguiente = None\n\n#ejemplo 1\n\n#a= Nodo(12)\n#print(a.dato)\n#print(a.siguiente)\n\n#Empieza agregar mas elementos\n#Ejemplo 2\n\n#a.siguiente=Nodo(20)\n#print(a.siguiente)\n\n#Ejemplo 3\n\n#a.siguiente.siguiente= Nodo(30)\n\n#Ejemplo 4\n\n#a.siguiente.siguiente.siguiente= Nodo(40)\n\n#Ejemplo 5\n\n#a.siguiente.siguiente.siguiente.siguiente=Nodo(50)\n\n#Eliminando nodo 30, ejemplo 6\n\n#a.siguiente.siguiente=a.siguiente.siguiente.siguiente\n\n#Ejemplo 7 (reemplazar un dato)\n\n#a.siguiente.siguiente.dato= 45\n\n#Ejemplo 8 Insertar un dato entre dos.\n\n#tmp= a.siguiente.siguiente.siguiente # se crea la temporal para no perder la referencia del 50\n#a.siguiente.siguiente.siguiente = Nodo(48) #creando nodo que se va a insertar\n#a.siguiente.siguiente.siguiente.siguiente = tmp\n\n\nclass Nodo:\n def __init__( self , dato , sig=None ):\n self.dato = dato\n self.siguiente = sig\n\n\n# ejemplo 1\na = Nodo( 10 )\n\n# ejemplo 2 RECONSTRUCCION\na = Nodo(10 , Nodo(20) )\n\na = Nodo(10 , Nodo(20, Nodo(30)) )\n\na = Nodo(10 , Nodo(20, Nodo(30,Nodo(40,Nodo(50)))) )\n\n\n\n#Corrido Transversal\ncurr_node=a\nprint(curr_node.dato, \"==>\", end=\"\")\nwhile(curr_node.siguiente != None):\n curr_node = curr_node.siguiente\n print(curr_node.dato,\"==>\",end=\"\")\n #si ponemos el print en linea con el anterior print, nos va a \n # dar una lista en escalera, very interesting\n #print(\"\")\nprint(\"\")\n\n", "10 ==>20 ==>30 ==>40 ==>50 ==>\n" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ] ]
4aa5a061b8097ab9dda0772810cdaaec28d8bc7d
28,181
ipynb
Jupyter Notebook
01_Getting_&_Knowing_Your_Data/Occupation/Exercise_with_Solution.ipynb
liuhui998/pandas_exercises
8124aa87652e8ad64512fb281871b5041178b3bd
[ "BSD-3-Clause" ]
3
2020-06-16T04:22:49.000Z
2020-10-28T01:18:10.000Z
01_Getting_&_Knowing_Your_Data/Occupation/Exercise_with_Solution.ipynb
liuhui998/pandas_exercises
8124aa87652e8ad64512fb281871b5041178b3bd
[ "BSD-3-Clause" ]
null
null
null
01_Getting_&_Knowing_Your_Data/Occupation/Exercise_with_Solution.ipynb
liuhui998/pandas_exercises
8124aa87652e8ad64512fb281871b5041178b3bd
[ "BSD-3-Clause" ]
null
null
null
25.184093
145
0.349739
[ [ [ "\n# 用户职业数据分析\n\n这里有 [视频](https://www.youtube.com/watch?v=W8AB5s-L3Rw&list=PLgJhDSE2ZLxaY_DigHeiIDC1cD09rXgJv&index=4) 可以看看老师怎么做这个练习(不过这个在Youtube上,还是英文的)\n\n", "_____no_output_____" ], [ "这一次我们直接从网上拉取数据,特别感谢:https://github.com/justmarkham 提供数据\n\n### Step 1. 导入必须的Python库", "_____no_output_____" ] ], [ [ "import pandas as pd", "_____no_output_____" ] ], [ [ "### Step 2. 从这里 [下载地址](https://raw.githubusercontent.com/justmarkham/DAT8/master/data/u.user) 导入数据\n", "_____no_output_____" ], [ "### Step 3. 将这文件中的数据放到变量 users, 并且使用 user_id 做为索引", "_____no_output_____" ] ], [ [ "\n# pd.read_csv 可以直接读网上的数据\n# 使用 read_csv 函数,由于用 '|' 来分隔字段,所以增加了 sep='|' 的参数\n# 同时使用 index_col 来指定了索引字段\n# read_csv 返回的是一个 DataFrame 对象,约等于 Excel 里的一个sheet\n# \nusers = pd.read_csv('https://raw.githubusercontent.com/justmarkham/DAT8/master/data/u.user', \n sep='|', index_col='user_id')", "_____no_output_____" ] ], [ [ "### Step 4. 看 users 前25行数据", "_____no_output_____" ] ], [ [ "users.head(25)", "_____no_output_____" ] ], [ [ "### 看 users 最后10行数据", "_____no_output_____" ] ], [ [ "users.tail(10)", "_____no_output_____" ] ], [ [ "### Step 6. users 里有多少行数据?", "_____no_output_____" ] ], [ [ "users.shape[0]", "_____no_output_____" ] ], [ [ "### Step 7. users 里有多少列数据?", "_____no_output_____" ] ], [ [ "users.shape[1]", "_____no_output_____" ] ], [ [ "### Step 8. 输出 users 的所有列名", "_____no_output_____" ] ], [ [ "users.columns", "_____no_output_____" ] ], [ [ "### Step 9. users 的索引是什么?", "_____no_output_____" ] ], [ [ "# index 是表示用什么标识 DataFrame 里的的某一行\n# 类似 列表(list) 的里的下标,字典(dict)里的 key\n#\n# 在 DataFrame 里一般是用 0~n 的数字做索引\n# 也可以用日期/做索引, 类销量表里的日期值 \n# 类似于标签 (Label)\nusers.index", "_____no_output_____" ] ], [ [ "### Step 10. 每一列的数据类型是什么?", "_____no_output_____" ] ], [ [ "users.dtypes", "_____no_output_____" ] ], [ [ "### Step 11. 只打出职业(occupation)列的数据", "_____no_output_____" ] ], [ [ "# 方法1\nusers.occupation\n\n#or\n# 方法2\nusers['occupation']", "_____no_output_____" ] ], [ [ "### Step 12. users 里有多少种不同的职业?", "_____no_output_____" ] ], [ [ "# 使用nunique() 来统计去重的职业(\"occupation\") 数量\nusers.occupation.nunique()\n\n# 下面的是另一种解决,在前面练习讲解过\n# 使用 value_counts() 统计每一种不同职业(\"occupation\") 数量,再调用 count() 来统计\n#users.occupation.value_counts().count()", "_____no_output_____" ] ], [ [ "### Step 13. users中最常出现的职业是什么?", "_____no_output_____" ] ], [ [ "# 统计每一种不同职业(\"occupation\") 数量,再用 head 来取第一行,再从索引(index)中取到职业信息\nusers.occupation.value_counts().head(1).index[0]\n\n#或是 用value_counts() 后加 head() 来显示前5名的职业\n# users.occupation.value_counts().head()", "_____no_output_____" ] ], [ [ "### Step 14. 统计 users 的数据情况.", "_____no_output_____" ] ], [ [ "#注意,一般只会对数字类型的字段进行统计\nusers.describe() \n# 大家可以看到 总数(count),平均值(mean), 标准差(std) 等重要数据信息,后面我们讲机器预期时可以用到", "_____no_output_____" ] ], [ [ "### Step 15. 统计 users 所有字段的数据情况", "_____no_output_____" ] ], [ [ "#注意,一般只会对数字类型的字段进行统计\nusers.describe(include = \"all\") \n# 举个例子:\n# 大家可看到职业(\"occupation\") 提示有\n# unique 不重复职业 21个,\n# top 出现最多的职业是 student,\n# freq student 有 196个\n# 是不是很好玩", "_____no_output_____" ] ], [ [ "### Step 16. 再统计一下职业(occupation)列", "_____no_output_____" ] ], [ [ "# 上面解释过,这里不重复\nusers.occupation.describe()", "_____no_output_____" ] ], [ [ "### Step 17. users 中的平均用户年龄?", "_____no_output_____" ] ], [ [ "# 对 users 的 age 列取平均,现用 round 取整\nround(users.age.mean())", "_____no_output_____" ] ], [ [ "### Step 18. users 中最出现最少的年龄是哪些?", "_____no_output_____" ] ], [ [ "# 先对年龄(\"age\")列中不重复的年龄出现的次数进行统计,再用 tail() 看最后几行\nusers.age.value_counts().tail() #7, 10, 11, 66 and 73 years -> only 1 occurrence", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4aa5a2d49315553324e6ff20c0408209dd2010fb
27,996
ipynb
Jupyter Notebook
rl_roboschool_ray/.ipynb_checkpoints/rl_roboschool_ray_distributed-checkpoint.ipynb
daekeun-ml/sagemaker-rl-kr
0b9658ecb21864644d6e612eb8ced5a9b1268eb8
[ "MIT-0", "MIT" ]
1
2021-01-18T10:41:37.000Z
2021-01-18T10:41:37.000Z
rl_roboschool_ray/rl_roboschool_ray_distributed.ipynb
daekeun-ml/sagemaker-rl-kr
0b9658ecb21864644d6e612eb8ced5a9b1268eb8
[ "MIT-0", "MIT" ]
null
null
null
rl_roboschool_ray/rl_roboschool_ray_distributed.ipynb
daekeun-ml/sagemaker-rl-kr
0b9658ecb21864644d6e612eb8ced5a9b1268eb8
[ "MIT-0", "MIT" ]
5
2020-04-17T13:05:35.000Z
2022-02-20T11:23:00.000Z
34.520345
556
0.555615
[ [ [ "# Amazon SageMaker로 다중 노드들 간 분산 RL을 이용해 Roboschool 에이전트 훈련\n---\n이 노트북은 `rl_roboschool_ray.ipynb` 의 확장으로, Ray와 TensorFlow를 사용한 강화 학습의 수평(horizontal) 스케일링을 보여줍니다.", "_____no_output_____" ], [ "## 해결해야 할 Roboschool 문제 선택\n\nRoboschool은 가상 로봇 시스템에 대한 RL 정책을 훈련시키는 데 주로 사용되는 [오픈 소스](https://github.com/openai/roboschool/tree/master/roboschool) 물리 시뮬레이터입니다. Roboschool은 다양한 로봇 문제에 해당하는 [다양한](https://github.com/openai/roboschool/blob/master/roboschool/__init__.py) gym 환경을 정의합니다. 아래는 다양한 난이도 중 몇 가지를 보여줍니다.\n\n- **Reacher (쉬움)** - 2개의 조인트만 있는 매우 간단한 로봇이 목표물에 도달합니다.\n- **호퍼 (중간)** - 한쪽 다리와 발이 달린 간단한 로봇이 트랙을 뛰어 내리는 법을 배웁니다.\n- **휴머노이드 (어려움)** - 두 개의 팔, 두 개의 다리 등이 있는 복잡한 3D 로봇은 넘어지지 않고 균형을 잡은 다음 트랙에서 달리는 법을 배웁니다.\n\n간단한 문제들은 적은 계산 리소스 상에서 더 빨리 훈련됩니다. 물론 더 복잡한 문제들은 훈련이 느리지만 더 재미있습니다.\n", "_____no_output_____" ] ], [ [ "# Uncomment the problem to work on\n#roboschool_problem = 'reacher'\n#roboschool_problem = 'hopper'\nroboschool_problem = 'humanoid'", "_____no_output_____" ] ], [ [ "## 전제 조건(Pre-requisites)\n\n### 라이브러리 임포트\n\n시작하기 위해, 필요한 Python 라이브러리를 가져와서 권한 및 구성을 위한 몇 가지 전제 조건으로 환경을 설정합니다.", "_____no_output_____" ] ], [ [ "import sagemaker\nimport boto3\nimport sys\nimport os\nimport glob\nimport re\nimport subprocess\nfrom IPython.display import HTML, Markdown\nimport time\nfrom time import gmtime, strftime\nsys.path.append(\"common\")\nfrom misc import get_execution_role, wait_for_s3_object\nfrom docker_utils import build_and_push_docker_image\nfrom sagemaker.rl import RLEstimator, RLToolkit, RLFramework\nfrom markdown_helper import generate_help_for_s3_endpoint_permissions, create_s3_endpoint_manually", "_____no_output_____" ] ], [ [ "### S3 버킷 설정\n\n체크포인트(checkpoint) 및 메타데이터에 사용하려는 S3 버킷에 대한 연결 및 인증을 설정합니다.", "_____no_output_____" ] ], [ [ "sage_session = sagemaker.session.Session()\ns3_bucket = sage_session.default_bucket() \ns3_output_path = 's3://{}/'.format(s3_bucket)\nprint(\"S3 bucket path: {}\".format(s3_output_path))", "_____no_output_____" ] ], [ [ "### 변수 설정\n\n훈련 작업의 작업 접두사(job prefix)와 *컨테이너의 이미지 경로(BYOC 인 경우에만)와 같은 변수*를 정의합니다.", "_____no_output_____" ] ], [ [ "# create a descriptive job name \njob_name_prefix = 'rl-roboschool-distributed-' + roboschool_problem\naws_region = boto3.Session().region_name", "_____no_output_____" ] ], [ [ "### 훈련이 진행되는 위치 구성\n\nSageMaker 노트북 인스턴스 또는 로컬 노트북 인스턴스를 사용하여 RL 훈련 작업을 훈련할 수 있습니다. 로컬 모드는 SageMaker Python SDK를 사용하여 SageMaker에 배포하기 전에 로컬 컨테이너에서 코드를 실행합니다. 이렇게 하면 , 익숙한 Python SDK 인터페이스를 사용하면서 반복 테스트 및 디버깅 속도를 높일 수 있습니다. 여러분은 `local_mode = True` 만 설정하면 됩니다.", "_____no_output_____" ] ], [ [ "# run in local_mode on this machine, or as a SageMaker TrainingJob?\nlocal_mode = False\n\nif local_mode:\n instance_type = 'local'\nelse:\n # If on SageMaker, pick the instance type\n instance_type = \"ml.c5.2xlarge\"\n \ntrain_instance_count = 3", "_____no_output_____" ] ], [ [ "### IAM 역할 생성\n\nSageMaker 노트북 `role = sagemaker.get_execution_role()`을 실행할 때 실행 역할(execution role)을 얻거나 로컬 시스템에서 실행할 때 utils 메소드 `role = get_execution_role()`을 사용하여 실행 역할을 작성하세요.", "_____no_output_____" ] ], [ [ "try:\n role = sagemaker.get_execution_role()\nexcept:\n role = get_execution_role()\n\nprint(\"Using IAM role arn: {}\".format(role))", "_____no_output_____" ] ], [ [ "### `로컬` 모드용 도커 설치\n\n로컬 모드에서 작업하려면 도커(docker)가 설치되어 있어야 합니다. 로컬 머신에서 실행할 때는 docker 또는 docker-compose(로컬 CPU 머신의 경우) 및 nvidia-docker(로컬 GPU 머신의 경우)가 설치되어 있는지 확인하세요. 또는, SageMaker 노트북 인스턴스에서 실행할 때 다음 스크립트를 실행하여 관련 패키지들을 설치할 수 있습니다.\n\n참고로, 한 번에 하나의 로컬 노트북만 실행할 수 있습니다.", "_____no_output_____" ] ], [ [ "# only run from SageMaker notebook instance\nif local_mode:\n !/bin/bash ./common/setup.sh", "_____no_output_____" ] ], [ [ "## 도커 컨테이너 빌드\n\nRoboschool이 설치된 사용자 정의 도커 컨테이너를 빌드해야 합니다. 컨테이너 빌드 작업은 아래 과정을 거쳐 처리됩니다.\n\n1. 기본 컨테이너 이미지 가져오기\n2. Roboschool 및 의존성 패키지 설치\n3. 새 컨테이너 이미지를 ECR에 업로드\n\n인터넷 연결이 느린 컴퓨터에서 실행 중인 경우, 이 단계에서 시간이 오래 걸릴 수 있습니다. 노트북 인스턴스가 SageMaker 또는 EC2 인 경우 인스턴스 유형에 따라 3-10 분이 걸립니다.", "_____no_output_____" ] ], [ [ "%%time\n\ncpu_or_gpu = 'gpu' if instance_type.startswith('ml.p') else 'cpu'\nrepository_short_name = \"sagemaker-roboschool-ray-%s\" % cpu_or_gpu\ndocker_build_args = {\n 'CPU_OR_GPU': cpu_or_gpu, \n 'AWS_REGION': boto3.Session().region_name,\n}\ncustom_image_name = build_and_push_docker_image(repository_short_name, build_args=docker_build_args)\nprint(\"Using ECR image %s\" % custom_image_name)", "_____no_output_____" ] ], [ [ "## 훈련 코드 작성\n\n훈련 코드는 `/src` 디렉토리에 업로드된 `“train-{roboschool_problem}.py”` 파일에 작성됩니다. 먼저 환경 파일과 사전 설정 파일을 가져온 다음, `main()` 함수를 정의하세요.", "_____no_output_____" ] ], [ [ "!pygmentize src/train-{roboschool_problem}.py", "_____no_output_____" ] ], [ [ "## Ray 동종 스케일링 - train_instance_count > 1 지정", "_____no_output_____" ], [ "동종(Homogeneous) 스케일링을 통해 동일한 유형의 여러 인스턴스를 사용할 수 있습니다.", "_____no_output_____" ] ], [ [ "metric_definitions = RLEstimator.default_metric_definitions(RLToolkit.RAY)\n \nestimator = RLEstimator(entry_point=\"train-%s.py\" % roboschool_problem,\n source_dir='src',\n dependencies=[\"common/sagemaker_rl\"],\n image_name=custom_image_name,\n role=role,\n train_instance_type=instance_type,\n train_instance_count=train_instance_count,\n output_path=s3_output_path,\n base_job_name=job_name_prefix,\n metric_definitions=metric_definitions,\n hyperparameters={\n # Attention scientists! You can override any Ray algorithm parameter here:\n \n # 3 m4.2xl with 8 cores each. We have to leave 1 core for ray scheduler.\n # Don't forget to change this on the basis of instance type.\n \"rl.training.config.num_workers\": (8 * train_instance_count) - 1\n \n #\"rl.training.config.horizon\": 5000,\n #\"rl.training.config.num_sgd_iter\": 10,\n }\n )\n\nestimator.fit(wait=local_mode)\njob_name = estimator.latest_training_job.job_name\nprint(\"Training job: %s\" % job_name)", "_____no_output_____" ] ], [ [ "## 시각화\n\nRL 훈련에는 시간이 오래 걸릴 수 있습니다. 따라서 훈련 작업이 동작하는 동안 훈련 작업의 진행 상황을 추적할 수 있는 다양한 방법들이 있습니다. 훈련 도중 일부 중간 출력이 S3에 저장되므로, 이를 캡처하도록 설정합니다.", "_____no_output_____" ] ], [ [ "print(\"Job name: {}\".format(job_name))\n\ns3_url = \"s3://{}/{}\".format(s3_bucket,job_name)\n\nif local_mode:\n output_tar_key = \"{}/output.tar.gz\".format(job_name)\nelse:\n output_tar_key = \"{}/output/output.tar.gz\".format(job_name)\n\nintermediate_folder_key = \"{}/output/intermediate/\".format(job_name)\noutput_url = \"s3://{}/{}\".format(s3_bucket, output_tar_key)\nintermediate_url = \"s3://{}/{}\".format(s3_bucket, intermediate_folder_key)\n\nprint(\"S3 job path: {}\".format(s3_url))\nprint(\"Output.tar.gz location: {}\".format(output_url))\nprint(\"Intermediate folder path: {}\".format(intermediate_url))\n \ntmp_dir = \"/tmp/{}\".format(job_name)\nos.system(\"mkdir {}\".format(tmp_dir))\nprint(\"Create local folder {}\".format(tmp_dir))", "_____no_output_____" ] ], [ [ "### 훈련 롤아웃 비디오 가져오기\n\n특정 롤아웃의 비디오는 훈련 중 S3에 기록됩니다. 여기에서는 S3에서 마지막 10개의 비디오 클립을 가져 와서 마지막 비디오를 렌더링합니다.", "_____no_output_____" ] ], [ [ "recent_videos = wait_for_s3_object(s3_bucket, intermediate_folder_key, tmp_dir, \n fetch_only=(lambda obj: obj.key.endswith(\".mp4\") and obj.size>0), limit=10)", "_____no_output_____" ], [ "last_video = sorted(recent_videos)[-1] # Pick which video to watch\nos.system(\"mkdir -p ./src/tmp_render_homogeneous/ && cp {} ./src/tmp_render_homogeneous/last_video.mp4\".format(last_video))\nHTML('<video src=\"./src/tmp_render_homogeneous/last_video.mp4\" controls autoplay></video>')", "_____no_output_____" ] ], [ [ "### 훈련 작업에 대한 지표 plot\n\nCloudWatch 지표에 기록된 알고리즘 지표를 사용하여 실행 중인 훈련의 보상 지표를 볼 수 있습니다. 시간이 지남에 따라, 모델의 성능을 볼 수 있도록 이를 plot할 수 있습니다.", "_____no_output_____" ] ], [ [ "%matplotlib inline\nfrom sagemaker.analytics import TrainingJobAnalytics\n\ndf = TrainingJobAnalytics(job_name, ['episode_reward_mean']).dataframe()\nnum_metrics = len(df)\nif num_metrics == 0:\n print(\"No algorithm metrics found in CloudWatch\")\nelse:\n plt = df.plot(x='timestamp', y='value', figsize=(12,5), legend=True, style='b-')\n plt.set_ylabel('Mean reward per episode')\n plt.set_xlabel('Training time (s)')", "_____no_output_____" ] ], [ [ "### 훈련 진행 상황 모니터링\n\n위의 시각화 셀을 반복해서 실행하여 최신 비디오를 얻거나, 훈련 작업이 진행됨에 따라 최신 지표를 볼 수 있습니다.", "_____no_output_____" ], [ "## Ray 이기종(heterogeneous) 스케일링", "_____no_output_____" ], [ "RL 훈련을 확장하기 위해 롤아웃 작업자 수를 늘릴 수 있습니다. 그러나, 롤아웃이 많을수록 훈련 중 종종 병목 현상이 발생할 수 있습니다. 이를 방지하기 위해 하나 이상의 GPU가 있는 인스턴스를 훈련용으로 사용하고 여러 개의 CPU 인스턴스들을 롤아웃에 사용할 수 있습니다.", "_____no_output_____" ], [ "SageMaker는 훈련 작업에서 단일 유형의 인스턴스를 지원하므로, 두 개의 SageMaker 작업을 서로 통신하도록 함으로써 위의 목표를 달성할 수 있습니다. 이름 지정을 위해 `기본 클러스터(Primary cluster)`를 사용하여 하나 이상의 GPU 인스턴스를 참조하고 `보조 클러스터(Secondary cluster)`를 사용하여 CPU 인스턴스 클러스터를 참조합니다.", "_____no_output_____" ], [ "> local_mode는 이 유형의 스케일링을 테스트하는 데 사용할 수 없습니다.", "_____no_output_____" ], [ "SageMaker 작업을 구성하기 전에 먼저 VPC 모드에서 SageMaker를 실행해야 합니다. VPC 모드에서는 두 SageMaker 작업이 네트워크를 통해 통신할 수 있습니다.", "_____no_output_____" ], [ "작업 시작 스크립트에 서브넷(subnet)과 보안 그룹(security group)을 제공하면 됩니다. 이 예에서는 기본 VPC 구성을 사용합니다.", "_____no_output_____" ] ], [ [ "ec2 = boto3.client('ec2')\ndefault_vpc = [vpc['VpcId'] for vpc in ec2.describe_vpcs()['Vpcs'] if vpc[\"IsDefault\"] == True][0]\n\ndefault_security_groups = [group[\"GroupId\"] for group in ec2.describe_security_groups()['SecurityGroups'] \\\n if group[\"GroupName\"] == \"default\" and group[\"VpcId\"] == default_vpc]\n\ndefault_subnets = [subnet[\"SubnetId\"] for subnet in ec2.describe_subnets()[\"Subnets\"] \\\n if subnet[\"VpcId\"] == default_vpc and subnet['DefaultForAz']==True]\n\nprint(\"Using default VPC:\", default_vpc)\nprint(\"Using default security group:\", default_security_groups)\nprint(\"Using default subnets:\", default_subnets)", "_____no_output_____" ] ], [ [ "VPC 모드에서 실행 중인 SageMaker 작업은 S3 리소스에 액세스할 수 없습니다. 따라서, SageMaker 컨테이너에서 S3에 액세스할 수 있도록 VPC S3 엔드포인트를 생성해야 합니다. VPC 모드에 대한 자세한 내용을 보려면 [이 링크](https://docs.aws.amazon.com/sagemaker/latest/dg/train-vpc.html)를 방문하세요.", "_____no_output_____" ] ], [ [ "try:\n route_tables = [route_table[\"RouteTableId\"] for route_table in ec2.describe_route_tables()['RouteTables']\\\n if route_table['VpcId'] == default_vpc]\nexcept Exception as e:\n if \"UnauthorizedOperation\" in str(e):\n display(Markdown(generate_help_for_s3_endpoint_permissions(role)))\n else:\n display(Markdown(create_s3_endpoint_manually(aws_region, default_vpc)))\n raise e\n\nprint(\"Trying to attach S3 endpoints to the following route tables:\", route_tables)\n\nassert len(route_tables) >= 1, \"No route tables were found. Please follow the VPC S3 endpoint creation \"\\\n \"guide by clicking the above link.\"\n\ntry:\n ec2.create_vpc_endpoint(DryRun=False,\n VpcEndpointType=\"Gateway\",\n VpcId=default_vpc,\n ServiceName=\"com.amazonaws.{}.s3\".format(aws_region),\n RouteTableIds=route_tables)\n print(\"S3 endpoint created successfully!\")\nexcept Exception as e:\n if \"RouteAlreadyExists\" in str(e):\n print(\"S3 endpoint already exists.\")\n elif \"UnauthorizedOperation\" in str(e):\n display(Markdown(generate_help_for_s3_endpoint_permissions(role)))\n raise e\n else:\n display(Markdown(create_s3_endpoint_manually(aws_region, default_vpc)))\n raise e", "_____no_output_____" ] ], [ [ "### 인스턴스 유형 구성\n\n1 Volta (V100) GPU와 40개의 CPU 코어로 클러스터를 구성해 보겠습니다. ml.p3.2xlarge에는 8개의 CPU 코어가 있고 ml.c5.4xlarge에는 16개의 CPU 코어가 있으므로 1개의 ml.p3.2xlarge 인스턴스와 2개의 ml.c5.4xlarge 인스턴스를 사용하여 이 작업을 수행할 수 있습니다.", "_____no_output_____" ] ], [ [ "%%time\n\n# Build CPU image\ncpu_repository_short_name = \"sagemaker-roboschool-ray-%s\" % \"cpu\"\ndocker_build_args = {\n 'CPU_OR_GPU': \"cpu\", \n 'AWS_REGION': boto3.Session().region_name,\n}\ncpu_image_name = build_and_push_docker_image(repository_short_name, build_args=docker_build_args)\nprint(\"Using CPU ECR image %s\" % cpu_image_name)\n\n# Build GPU image\ngpu_repository_short_name = \"sagemaker-roboschool-ray-%s\" % \"gpu\"\ndocker_build_args = {\n 'CPU_OR_GPU': \"gpu\", \n 'AWS_REGION': boto3.Session().region_name,\n}\ngpu_image_name = build_and_push_docker_image(repository_short_name, build_args=docker_build_args)\nprint(\"Using GPU ECR image %s\" % gpu_image_name)", "_____no_output_____" ], [ "primary_cluster_instance_type = \"ml.p3.2xlarge\"\nprimary_cluster_instance_count = 1\n\nsecondary_cluster_instance_type = \"ml.c5.4xlarge\"\nsecondary_cluster_instance_count = 2\n\ntotal_cpus = 40 - 1 # Leave one for ray scheduler\ntotal_gpus = 1", "_____no_output_____" ], [ "primary_cluster_instance_type = \"ml.p3.16xlarge\"\nprimary_cluster_instance_count = 1\n\nsecondary_cluster_instance_type = \"ml.c5.4xlarge\"\nsecondary_cluster_instance_count = 2\n\ntotal_cpus = 40 - 1 # Leave one for ray scheduler\ntotal_gpus = 8", "_____no_output_____" ] ], [ [ "다음으로, 훈련하려는 roboschool 에이전트를 선택합니다. 이기종(heterogeneous) 훈련의 경우 인스턴스 간 동기화를 지원하는 몇 가지 추가 파라메터들을 훈련 작업에 전달합니다.\n\n- s3_bucket, s3_prefix: 마스터 IP 주소와 같은 메타데이터 저장에 사용\n- rl_cluster_type: \"기본\" 또는 \"보조\"\n- aws_region: VPC 모드에서 S3에 연결하는 데 필요\n- rl_num_instances_secondary: 보조 클러스터의 노드 수\n- subnets, security_group_ids: VPC 모드에 필요", "_____no_output_____" ] ], [ [ "roboschool_problem = 'reacher'\njob_name_prefix = 'rl-roboschool-distributed-'+ roboschool_problem\n\ns3_output_path = 's3://{}/'.format(s3_bucket) # SDK appends the job name and output folder\n\n# We explicitly need to specify these params so that the two jobs can synchronize using the metadata stored here\ns3_bucket = sage_session.default_bucket()\ns3_prefix = \"dist-ray-%s-1GPU-40CPUs\" % (roboschool_problem)\n\n# Make sure that the prefix is empty\n!aws s3 rm --recursive s3://{s3_bucket}/{s3_prefix} ", "_____no_output_____" ] ], [ [ "### 기본 클러스터 시작 (1 GPU 훈련 인스턴스)", "_____no_output_____" ] ], [ [ "primary_cluster_estimator = RLEstimator(entry_point=\"train-%s.py\" % roboschool_problem,\n source_dir='src',\n dependencies=[\"common/sagemaker_rl\"],\n image_name=gpu_image_name,\n role=role,\n train_instance_type=primary_cluster_instance_type,\n train_instance_count=primary_cluster_instance_count,\n output_path=s3_output_path,\n base_job_name=job_name_prefix,\n metric_definitions=metric_definitions,\n train_max_run=int(3600 * .5), # Maximum runtime in seconds\n hyperparameters={\n \"s3_prefix\": s3_prefix, # Important for syncing\n \"s3_bucket\": s3_bucket, # Important for syncing\n \"aws_region\": boto3.Session().region_name, # Important for S3 connection\n \"rl_cluster_type\": \"primary\", # Important for syncing\n \"rl_num_instances_secondary\": secondary_cluster_instance_count, # Important for syncing\n \"rl.training.config.num_workers\": total_cpus,\n \"rl.training.config.train_batch_size\": 20000,\n \"rl.training.config.num_gpus\": total_gpus,\n },\n subnets=default_subnets, # Required for VPC mode\n security_group_ids=default_security_groups # Required for VPC mode\n )\n\nprimary_cluster_estimator.fit(wait=False)\nprimary_job_name = primary_cluster_estimator.latest_training_job.job_name\nprint(\"Primary Training job: %s\" % primary_job_name)", "_____no_output_____" ] ], [ [ "### 보조 클러스터 시작 (2 CPU 인스턴스)", "_____no_output_____" ] ], [ [ "secondary_cluster_estimator = RLEstimator(entry_point=\"train-%s.py\" % roboschool_problem,\n source_dir='src',\n dependencies=[\"common/sagemaker_rl\"],\n image_name=cpu_image_name,\n role=role,\n train_instance_type=secondary_cluster_instance_type,\n train_instance_count=secondary_cluster_instance_count,\n output_path=s3_output_path,\n base_job_name=job_name_prefix,\n metric_definitions=metric_definitions,\n train_max_run=3600, # Maximum runtime in seconds\n hyperparameters={\n \"s3_prefix\": s3_prefix, # Important for syncing\n \"s3_bucket\": s3_bucket, # Important for syncing\n \"aws_region\": boto3.Session().region_name, # Important for S3 connection\n \"rl_cluster_type\": \"secondary\", # Important for syncing\n },\n subnets=default_subnets, # Required for VPC mode\n security_group_ids=default_security_groups # Required for VPC mode\n )\n\nsecondary_cluster_estimator.fit(wait=False)\nsecondary_job_name = secondary_cluster_estimator.latest_training_job.job_name\nprint(\"Secondary Training job: %s\" % secondary_job_name)", "_____no_output_____" ] ], [ [ "### 시각화", "_____no_output_____" ] ], [ [ "print(\"Job name: {}\".format(primary_job_name))\n\ns3_url = \"s3://{}/{}\".format(s3_bucket,primary_job_name)\n\nif local_mode:\n output_tar_key = \"{}/output.tar.gz\".format(primary_job_name)\nelse:\n output_tar_key = \"{}/output/output.tar.gz\".format(primary_job_name)\n\nintermediate_folder_key = \"{}/output/intermediate/\".format(primary_job_name)\noutput_url = \"s3://{}/{}\".format(s3_bucket, output_tar_key)\nintermediate_url = \"s3://{}/{}\".format(s3_bucket, intermediate_folder_key)\n\nprint(\"S3 job path: {}\".format(s3_url))\nprint(\"Output.tar.gz location: {}\".format(output_url))\nprint(\"Intermediate folder path: {}\".format(intermediate_url))\n \ntmp_dir = \"/tmp/{}\".format(primary_job_name)\nos.system(\"mkdir {}\".format(tmp_dir))\nprint(\"Create local folder {}\".format(tmp_dir))", "_____no_output_____" ] ], [ [ "### 훈련 롤아웃 비디오 가져오기\n\n특정 롤아웃의 비디오는 훈련 중 S3에 기록됩니다. 여기에서는 S3에서 마지막 10개의 비디오 클립을 가져 와서 마지막 비디오를 렌더링합니다.", "_____no_output_____" ] ], [ [ "recent_videos = wait_for_s3_object(s3_bucket, intermediate_folder_key, tmp_dir, \n fetch_only=(lambda obj: obj.key.endswith(\".mp4\") and obj.size>0), limit=10)", "_____no_output_____" ], [ "last_video = sorted(recent_videos)[-1] # Pick which video to watch\nos.system(\"mkdir -p ./src/tmp_render_heterogeneous/ && cp {} ./src/tmp_render_heterogeneous/last_video.mp4\".format(last_video))\nHTML('<video src=\"./src/tmp_render_heterogeneous/last_video.mp4\" controls autoplay></video>')", "_____no_output_____" ] ], [ [ "### 훈련 작업에 대한 지표 plot\n\nCloudWatch 지표에 기록된 알고리즘 지표를 사용하여 실행 중인 훈련의 보상 지표를 볼 수 있습니다. 시간이 지남에 따라, 모델의 성능을 볼 수 있도록 이를 plot할 수 있습니다.", "_____no_output_____" ] ], [ [ "%matplotlib inline\nfrom sagemaker.analytics import TrainingJobAnalytics\n\ndf = TrainingJobAnalytics(primary_job_name, ['episode_reward_mean']).dataframe()\nnum_metrics = len(df)\nif num_metrics == 0:\n print(\"No algorithm metrics found in CloudWatch\")\nelse:\n plt = df.plot(x='timestamp', y='value', figsize=(12,5), legend=True, style='b-')\n plt.set_ylabel('Mean reward per episode')\n plt.set_xlabel('Training time (s)')", "_____no_output_____" ] ], [ [ "위의 시각화 셀을 반복해서 실행하여 최신 비디오를 얻거나, 훈련 작업이 진행됨에 따라 최신 지표를 볼 수 있습니다.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
4aa5c630385f4db368c500d863f05ac1875dc0f1
100,764
ipynb
Jupyter Notebook
experiments/Harmonics_experiment.ipynb
alercebroker/turbo-fats
0e8f0452ce1bcac69ee876c5598a0e1152374400
[ "MIT" ]
null
null
null
experiments/Harmonics_experiment.ipynb
alercebroker/turbo-fats
0e8f0452ce1bcac69ee876c5598a0e1152374400
[ "MIT" ]
2
2021-04-21T14:51:36.000Z
2021-04-21T14:51:44.000Z
experiments/Harmonics_experiment.ipynb
alercebroker/turbo-fats
0e8f0452ce1bcac69ee876c5598a0e1152374400
[ "MIT" ]
1
2020-10-14T21:47:31.000Z
2020-10-14T21:47:31.000Z
231.641379
34,328
0.899984
[ [ [ "import numpy as np\nimport pandas as pd\n%matplotlib inline\nimport matplotlib.pyplot as plt\nimport turbofats", "_____no_output_____" ] ], [ [ "## Create a lightcurve", "_____no_output_____" ] ], [ [ "n_samples = 400\nn_days = 100\nn_components = 7\nperiod = 7.4\nstd = 0.5\ntime = np.random.rand(n_samples) * n_days\ntime.sort()\ntime = time.reshape(-1, 1)\ncosine_components = np.random.randn(1, n_components) * np.exp(-np.linspace(0, 4, n_components))\nsine_components = np.random.randn(1, n_components) * np.exp(-np.linspace(0, 4, n_components))\nbias = np.random.randn(1) * 5\n\ntime_arg = 2*np.pi*time/(period/np.arange(1, n_components+1).reshape(1, n_components))\nmagnitude = np.sum(cosine_components * np.cos(time_arg) + sine_components * np.sin(time_arg), axis=1) + bias\n\nerror = np.ones(n_samples)*std + np.random.rand(n_samples)*std*3\n\nmagnitude += error", "_____no_output_____" ], [ "print(time.shape, magnitude.shape)\nplt.subplot(2, 1, 1)\nplt.errorbar(time, magnitude, yerr=error, fmt='*')\nplt.subplot(2, 1, 2)\nplt.errorbar(time % period, magnitude, yerr=error, fmt='*')", "(400, 1) (400,)\n" ], [ "feature_space = turbofats.NewFeatureSpace(feature_list=['PeriodLS_v2', 'Period_fit_v2', 'Harmonics'])", "_____no_output_____" ], [ "detections_data = np.stack(\n [\n time.flatten(),\n magnitude.flatten(),\n error\n ],\n axis=-1\n)\ndetections = pd.DataFrame(\n data=detections_data,\n columns=['mjd', 'magpsf_corr', 'sigmapsf_corr'],\n index=['asdf'] * len(detections_data)\n)\nfeature_values = feature_space.calculate_features(detections)", "2.76163\n" ], [ "print(cosine_components)\nprint(sine_components)\nprint(np.sqrt(cosine_components**2 + sine_components**2))\nfeature_values", "[[ 1.53332206e+00 -3.46816936e-01 -2.86905319e-01 5.67639226e-02\n -3.77999219e-02 -1.01550653e-03 3.86706726e-02]]\n[[-0.05578493 0.23234559 -0.01994014 0.04322573 0.02133497 -0.00550677\n 0.02116701]]\n[[1.5343365 0.41745235 0.28759741 0.07134849 0.04340524 0.00559962\n 0.04408473]]\n" ], [ "reconstructed_period = feature_values['PeriodLS_v2'].values[0]\nreconstructed_time_arg = 2*np.pi*time/(reconstructed_period/np.arange(1, n_components+1).reshape(1, n_components))\nprint(reconstructed_time_arg.shape)\nreconstructed_harmonics_mag = np.array([feature_values['Harmonics_mag_%d' % i].values[0] for i in range(1, 8)])\nreconstructed_harmonics_phase = np.array([0.0] + [feature_values['Harmonics_phase_%d' % i].values[0] for i in range(2, 8)])\n\nreconstructed_mag = reconstructed_harmonics_mag.reshape(1, -1)*np.cos(reconstructed_time_arg - reconstructed_harmonics_phase.reshape(1, -1))\nreconstructed_mag = np.real(np.sum(reconstructed_mag, axis=1) + np.mean(magnitude))", "(400, 7)\n" ], [ "plt.subplot(2, 1, 1)\nplt.scatter(time % period, magnitude)\nplt.scatter((time - 3.0) % period, reconstructed_mag)\nplt.title(f'periodo original {period} dias')\nplt.subplot(2, 1, 2)\nplt.scatter(time % reconstructed_period, magnitude)\nplt.scatter((time-0.5) % reconstructed_period, reconstructed_mag)\nplt.title(f'periodo reconstruido {reconstructed_period} dias')\n\nplt.tight_layout()", "_____no_output_____" ], [ "tt = np.linspace(0, 10, 1000)\na = 1.2\nb = -1.5\nf = 0.2\ny = a*np.cos(2*np.pi*f*tt) + b*np.sin(2*np.pi*f*tt)\nplt.plot(tt, y)\nm = np.sqrt(a**2 + b**2)\nphi = np.arctan2(b, a)\ny2 = m*np.cos(2*np.pi*f*tt-phi)\nplt.plot(tt, y2)", "_____no_output_____" ], [ "lc = pd.read_pickle('~/alerce/GP-Augmentation/results_paula/augmented_lightcurves.pkl')\nlc.head()", "_____no_output_____" ], [ "detections = lc[lc.detected]\noids = detections.index.unique()\nfor oid in oids:\n one_lc = detections.loc[oid]\n feature_values = feature_space.calculate_features(one_lc)\n print(oid,)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa5cf52b517e217cbfe8847b03699b20db7c326
12,666
ipynb
Jupyter Notebook
linked_lists/add_reverse/add_reverse_challenge.ipynb
hanbf/interactive-coding-challenges
1676ac16c987e35eeb4be6ab57a3c10ed9b71b8b
[ "Apache-2.0" ]
null
null
null
linked_lists/add_reverse/add_reverse_challenge.ipynb
hanbf/interactive-coding-challenges
1676ac16c987e35eeb4be6ab57a3c10ed9b71b8b
[ "Apache-2.0" ]
null
null
null
linked_lists/add_reverse/add_reverse_challenge.ipynb
hanbf/interactive-coding-challenges
1676ac16c987e35eeb4be6ab57a3c10ed9b71b8b
[ "Apache-2.0" ]
null
null
null
38.150602
1,406
0.542871
[ [ [ "This notebook was prepared by [Donne Martin](http://donnemartin.com). Source and license info is on [GitHub](https://github.com/donnemartin/interactive-coding-challenges).", "_____no_output_____" ], [ "# Challenge Notebook", "_____no_output_____" ], [ "## Problem: Add two numbers whose digits are stored in a linked list in reverse order.\n\n* [Constraints](#Constraints)\n* [Test Cases](#Test-Cases)\n* [Algorithm](#Algorithm)\n* [Code](#Code)\n* [Unit Test](#Unit-Test)\n* [Solution Notebook](#Solution-Notebook)", "_____no_output_____" ], [ "## Constraints\n\n* Can we assume this is a non-circular, singly linked list?\n * Yes\n* Do we expect the return to be in reverse order too?\n * Yes\n* What if one of the inputs is None?\n * Return None for an invalid operation\n* How large are these numbers--can they fit in memory?\n * Yes\n* Can we assume we already have a linked list class that can be used for this problem?\n * Yes\n* Can we assume this fits in memory?\n * Yes", "_____no_output_____" ], [ "## Test Cases\n\n* Empty list(s) -> None\n* Add values of different lengths\n * Input 1: 6->5->None\n * Input 2: 9->8->7\n * Result: 5->4->8\n* Add values of same lengths\n * Exercised from values of different lengths\n * Done here for completeness", "_____no_output_____" ], [ "## Algorithm\n\nRefer to the [Solution Notebook](http://nbviewer.ipython.org/github/donnemartin/interactive-coding-challenges/blob/master/linked_lists/add_reverse/add_reverse_solution.ipynb). If you are stuck and need a hint, the solution notebook's algorithm discussion might be a good place to start.", "_____no_output_____" ], [ "## Code", "_____no_output_____" ] ], [ [ "# %load ../linked_list/linked_list.py\nclass Node(object):\n\n def __init__(self, data, next=None):\n self.next = next\n self.data = data\n\n def __str__(self):\n return self.data\n\n\nclass LinkedList(object):\n\n def __init__(self, head=None):\n self.head = head\n\n def __len__(self):\n curr = self.head\n counter = 0\n while curr is not None:\n counter += 1\n curr = curr.next\n return counter\n\n def insert_to_front(self, data):\n if data is None:\n return None\n node = Node(data, self.head)\n self.head = node\n return node\n\n def append(self, data):\n if data is None:\n return None\n node = Node(data)\n if self.head is None:\n self.head = node\n return node\n curr_node = self.head\n while curr_node.next is not None:\n curr_node = curr_node.next\n curr_node.next = node\n return node\n\n def find(self, data):\n if data is None:\n return None\n curr_node = self.head\n while curr_node is not None:\n if curr_node.data == data:\n return curr_node\n curr_node = curr_node.next\n return None\n\n def delete(self, data):\n if data is None:\n return\n if self.head is None:\n return\n if self.head.data == data:\n self.head = self.head.next\n return\n prev_node = self.head\n curr_node = self.head.next\n while curr_node is not None:\n if curr_node.data == data:\n prev_node.next = curr_node.next\n return\n prev_node = curr_node\n curr_node = curr_node.next\n\n def delete_alt(self, data):\n if data is None:\n return\n if self.head is None:\n return\n curr_node = self.head\n if curr_node.data == data:\n curr_node = curr_node.next\n return\n while curr_node.next is not None:\n if curr_node.next.data == data:\n curr_node.next = curr_node.next.next\n return\n curr_node = curr_node.next\n\n def print_list(self):\n curr_node = self.head\n while curr_node is not None:\n print(curr_node.data)\n curr_node = curr_node.next\n\n def get_all_data(self):\n data = []\n curr_node = self.head\n while curr_node is not None:\n data.append(curr_node.data)\n curr_node = curr_node.next\n return data", "_____no_output_____" ], [ "class MyLinkedList(LinkedList):\n\n def add_reverse(self, first_list, second_list):\n # TODO: Implement me\n pass", "_____no_output_____" ] ], [ [ "## Unit Test", "_____no_output_____" ], [ "\n\n**The following unit test is expected to fail until you solve the challenge.**", "_____no_output_____" ] ], [ [ "# %load test_add_reverse.py\nfrom nose.tools import assert_equal\n\n\nclass TestAddReverse(object):\n\n def test_add_reverse(self):\n print('Test: Empty list(s)')\n assert_equal(MyLinkedList().add_reverse(None, None), None)\n assert_equal(MyLinkedList().add_reverse(Node(5), None), None)\n assert_equal(MyLinkedList().add_reverse(None, Node(10)), None)\n\n print('Test: Add values of different lengths')\n # Input 1: 6->5->None\n # Input 2: 9->8->7\n # Result: 5->4->8\n first_list = MyLinkedList(Node(6))\n first_list.append(5)\n second_list = MyLinkedList(Node(9))\n second_list.append(8)\n second_list.append(7)\n result = MyLinkedList().add_reverse(first_list, second_list)\n assert_equal(result.get_all_data(), [5, 4, 8])\n\n print('Test: Add values of same lengths')\n # Input 1: 6->5->4\n # Input 2: 9->8->7\n # Result: 5->4->2->1\n first_head = Node(6)\n first_list = MyLinkedList(first_head)\n first_list.append(5)\n first_list.append(4)\n second_head = Node(9)\n second_list = MyLinkedList(second_head)\n second_list.append(8)\n second_list.append(7)\n result = MyLinkedList().add_reverse(first_list, second_list)\n assert_equal(result.get_all_data(), [5, 4, 2, 1])\n\n print('Success: test_add_reverse')\n\n\ndef main():\n test = TestAddReverse()\n test.test_add_reverse()\n\n\nif __name__ == '__main__':\n main()", "Test: Empty list(s)\nTest: Add values of different lengths\n" ] ], [ [ "## Solution Notebook\n\nReview the [Solution Notebook](http://nbviewer.ipython.org/github/donnemartin/interactive-coding-challenges/blob/master/linked_lists/add_reverse/add_reverse_solution.ipynb) for a discussion on algorithms and code solutions.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ] ]
4aa5d70e723b03c0550e7242b160a92f90216aeb
8,636
ipynb
Jupyter Notebook
src/lab_2.ipynb
jafetimbre/optimus
ef57d07f79423b2021c549fe94112bdf408e54da
[ "MIT" ]
null
null
null
src/lab_2.ipynb
jafetimbre/optimus
ef57d07f79423b2021c549fe94112bdf408e54da
[ "MIT" ]
null
null
null
src/lab_2.ipynb
jafetimbre/optimus
ef57d07f79423b2021c549fe94112bdf408e54da
[ "MIT" ]
null
null
null
27.329114
226
0.388606
[ [ [ "<a href=\"https://colab.research.google.com/github/jafetimbre/optimus/blob/master/src/lab_2.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "# Generatoare de liste", "_____no_output_____" ] ], [ [ "L = [x + y for x in 'abc' for y in 'lmnpq']\nprint(f'Primul generator de lista: {L}')\n\nL = [x + y for (x, y) in zip('abc', 'lmnpq')]\nprint(f'Al doilea generator de lista: {L}')", "Primul generator de lista: ['al', 'am', 'an', 'ap', 'aq', 'bl', 'bm', 'bn', 'bp', 'bq', 'cl', 'cm', 'cn', 'cp', 'cq']\nAl doilea generator de lista: ['al', 'bm', 'cn']\n" ], [ "M = [ [1,2,3], [4,5,6], [7,8,9]]\nm = [ [col + 10 for col in row] for row in M]\n\nprint(f'Prima expresie: {M}')\nprint(f'A doua expresie: {m}')", "Prima expresie: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]\nA doua expresie: [[11, 12, 13], [14, 15, 16], [17, 18, 19]]\n" ], [ "R = range(5)\nprint(type(R))\nL1, L2 = iter(R), iter(R)\nprint([next(L1), next(L1), next(L1)])\nprint([next(L2), next(L2)])", "<class 'range'>\n[0, 1, 2]\n[0, 1]\n" ], [ "import timeit\n\nexpr = '[abs(x) for x in range(1000)]\\nlist(abs(x) for x in range(1000))'\n\nprint(timeit.timeit(expr, number = 10000))\n\nprint(timeit.timeit('[abs(x) for x in range(1000)]', number = 10000))\nprint(timeit.timeit('list(abs(x) for x in range(1000))', number = 10000))\nprint(timeit.timeit('map(abs,range(1000))', number = 10000))\nprint(timeit.timeit('list(map(abs,range(1000)))', number = 10000))", "2.6679140999999618\n0.9788963000000876\n1.2854178000000047\n0.0040117999999438325\n0.4548700000000281\n" ] ], [ [ "# NumPy", "_____no_output_____" ] ], [ [ "import numpy as np\narr = np.random.normal(5, 0.5, size=(3, 3, 3))", "_____no_output_____" ], [ "arr = np.arange(1, 28).reshape(3,3,3)\nprint(arr)", "[[[ 1 2 3]\n [ 4 5 6]\n [ 7 8 9]]\n\n [[10 11 12]\n [13 14 15]\n [16 17 18]]\n\n [[19 20 21]\n [22 23 24]\n [25 26 27]]]\n" ], [ "print([arr[x, :, :] for x in range(3)])\nprint([arr[:, y, :] for y in range(3)]) \nprint([arr[:, :, z] for z in range(3)])", "[array([[1, 2, 3],\n [4, 5, 6],\n [7, 8, 9]]), array([[10, 11, 12],\n [13, 14, 15],\n [16, 17, 18]]), array([[19, 20, 21],\n [22, 23, 24],\n [25, 26, 27]])]\n[array([[ 1, 2, 3],\n [10, 11, 12],\n [19, 20, 21]]), array([[ 4, 5, 6],\n [13, 14, 15],\n [22, 23, 24]]), array([[ 7, 8, 9],\n [16, 17, 18],\n [25, 26, 27]])]\n[array([[ 1, 4, 7],\n [10, 13, 16],\n [19, 22, 25]]), array([[ 2, 5, 8],\n [11, 14, 17],\n [20, 23, 26]]), array([[ 3, 6, 9],\n [12, 15, 18],\n [21, 24, 27]])]\n" ] ], [ [ "## 3", "_____no_output_____" ] ], [ [ "mat = np.random.randint(10, size=(6,6))\nmat", "_____no_output_____" ], [ "import random\narr = np.array([random.random() for _ in range(10000)]) ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
4aa5e0227340a3049df5dff5b101f6e4cb1287a2
108,150
ipynb
Jupyter Notebook
old_projects/quchem_ibm/Experiments/LiH_Simulation_result/LiH_Analysis_STANDARD.ipynb
AlexisRalli/VQE-code
4112d2bba4c327360e95dfd7cb6120b2ce67bf29
[ "MIT" ]
1
2021-04-01T14:01:46.000Z
2021-04-01T14:01:46.000Z
old_projects/quchem_ibm/Experiments/LiH_Simulation_result/LiH_Analysis_STANDARD.ipynb
AlexisRalli/VQE-code
4112d2bba4c327360e95dfd7cb6120b2ce67bf29
[ "MIT" ]
5
2019-11-13T16:23:54.000Z
2021-04-07T11:03:06.000Z
old_projects/quchem_ibm/Experiments/LiH_Simulation_result/LiH_Analysis_STANDARD.ipynb
AlexisRalli/VQE-code
4112d2bba4c327360e95dfd7cb6120b2ce67bf29
[ "MIT" ]
null
null
null
100.417827
21,924
0.844475
[ [ [ "import pickle\nimport os\nimport numpy as np\nfrom tqdm.notebook import tqdm", "_____no_output_____" ], [ "from quchem_ibm.exp_analysis import *\n\ndef dict_of_M_to_list(M_dict, PauliOP):\n \n P_Qubit_list, _ = zip(*(list(*PauliOP.terms.keys())))\n \n list_of_M_bitstrings=None\n for bit_string, N_obtained in M_dict.items():\n \n M_string = np.take(list(bit_string[::-1]), P_Qubit_list) # only take terms measured! Note bitstring reversed!\n \n array_meas = np.repeat(''.join(M_string), N_obtained)\n if list_of_M_bitstrings is None:\n list_of_M_bitstrings=array_meas\n else:\n list_of_M_bitstrings=np.hstack((list_of_M_bitstrings,array_meas))\n \n # randomly shuffle (seed means outcome will always be the SAME!)\n# np.random.seed(42) \n np.random.shuffle(list_of_M_bitstrings) \n \n return list_of_M_bitstrings", "_____no_output_____" ], [ "# # input for exp\nbase_dir = os.getcwd()\ninput_file = os.path.join(base_dir, 'LiH_simulation_RESULTS_time=2020Oct07-163210198971.pickle')\nwith open(input_file, 'rb') as handle:\n LiH_data = pickle.load(handle)\n", "_____no_output_____" ], [ "experimental_data_STANDARD = LiH_data['experiment_data'].copy()\ndel LiH_data", "_____no_output_____" ], [ "STANDARD_data = experimental_data_STANDARD[101852100]['standard'].copy()\ndel experimental_data_STANDARD", "_____no_output_____" ], [ "len(STANDARD_data)", "_____no_output_____" ], [ "STANDARD_Hist_data_sim={} \nfor exp_instance in STANDARD_data: #each exp repeated 10 times!\n for exp_dict_key in exp_instance:\n \n exp_dict= exp_instance[exp_dict_key]\n \n P=exp_dict['qubitOp']\n coeff = exp_dict['coeff']\n measured_dict_sim = exp_dict['measurement_dict']\n\n M_list_sim = dict_of_M_to_list(measured_dict_sim, P)\n\n if exp_dict_key in STANDARD_Hist_data_sim.keys():\n STANDARD_Hist_data_sim[exp_dict_key]={'P':list(P.terms.items())[0] ,'coeff': coeff.real, 'Measurements': np.hstack((STANDARD_Hist_data_sim[exp_dict_key]['Measurements'],M_list_sim))}\n else:\n STANDARD_Hist_data_sim[exp_dict_key]={'P':list(P.terms.items())[0] ,'coeff': coeff.real, 'Measurements': M_list_sim}\n \n del exp_dict", "_____no_output_____" ], [ "del STANDARD_data", "_____no_output_____" ], [ "# for key in STANDARD_Hist_data_sim:\n# STANDARD_Hist_data_sim[key]['Measurements']=STANDARD_Hist_data_sim[key]['Measurements'].tolist()\n# STANDARD_Hist_data_sim[key]['P']=(STANDARD_Hist_data_sim[key]['P'][0], STANDARD_Hist_data_sim[key]['P'][1].real)", "_____no_output_____" ], [ "# import json\n# with open(\"STANDARD_Hist_data_sim\", \"w\") as write_file:\n# json.dump(STANDARD_Hist_data_sim, write_file)\n", "_____no_output_____" ], [ "STANDARD_Hist_data_sim[0]['Measurements'].shape", "_____no_output_____" ], [ "# ### save output\n# np.save('Standard_hist_data', STANDARD_Hist_data_sim)", "_____no_output_____" ], [ "import matplotlib.pyplot as plt", "_____no_output_____" ], [ "fci_energy= -7.971184315565538", "_____no_output_____" ] ], [ [ "# Histogram", "_____no_output_____" ] ], [ [ "def Get_Hist_data(Histogram_data, I_term):\n E_list=[]\n for m_index in tqdm(range(Histogram_data[0]['Measurements'].shape[0])):\n E=I_term\n for M_dict_key in Histogram_data:\n coeff = Histogram_data[M_dict_key]['coeff']\n parity = 1 if sum(map(int, Histogram_data[M_dict_key]['Measurements'][m_index])) % 2 == 0 else -1\n E+=coeff*parity\n E_list.append(E)\n return E_list\n ", "_____no_output_____" ], [ "I_term = -4.142299396835105\nE_list_STANDARD_sim=Get_Hist_data(STANDARD_Hist_data_sim, I_term)", "100%|██████████| 1616700/1616700 [31:34<00:00, 853.19it/s]\n" ], [ "import json\nwith open(\"E_list_STANDARD_sim.json\", \"w\") as write_file:\n json.dump(E_list_STANDARD_sim, write_file)\n", "_____no_output_____" ], [ "E_list_STANDARD_sim=np.array(E_list_STANDARD_sim)", "_____no_output_____" ], [ "def gaussian(x, mean, amplitude, standard_deviation):\n return amplitude * np.exp( - ((x - mean)**2 / (2*standard_deviation**2)))", "_____no_output_____" ], [ "from scipy.optimize import curve_fit", "_____no_output_____" ], [ "# from matplotlib import pyplot\n# %matplotlib inline\n\n\n# # bins_standard = len(set(E_list_STANDARD_sim))\n# bins_standard = 1000\n\n# bin_heights_STANDARD, bin_borders_STANDARD, _=pyplot.hist(E_list_STANDARD_sim,\n# bins_standard, alpha=0.7,\n# label='$E$ standard VQE - sim',\n# color='g',\n# density=False)\n\n# bin_centers_STANDARD = bin_borders_STANDARD[:-1] + np.diff(bin_borders_STANDARD) / 2\n# popt, _ = curve_fit(gaussian, bin_centers_STANDARD, bin_heights_STANDARD, p0=[fci_energy, 0., 1.], **{'maxfev':10000})\n# mean_STANDARD, amplitude_STANDARD, standard_deviation_STANDARD= popt\n# x_interval_for_fit = np.linspace(bin_borders_STANDARD[0], bin_borders_STANDARD[-1], 10000)\n# pyplot.plot(x_interval_for_fit, gaussian(x_interval_for_fit, *popt), label='Gaussian fit', color='g')\n\n\n# pyplot.axvline(mean_STANDARD, color='g', linestyle='dashed', linewidth=1, \n# label='$E_{average}$ standard VQE - sim') # mean of GAUSSIAN FIT\n\n# # pyplot.axvline(E_list_STANDARD_sim.mean(), color='g', linestyle='dashed', linewidth=1, \n# # label='$E_{average}$ standard VQE - sim') # mean of DATA\n\n# pyplot.errorbar(mean_STANDARD,65_000,\n# xerr=standard_deviation_STANDARD, linestyle=\"None\", color='g',\n# uplims=True, lolims=True, label='$\\sigma_{E_{av}}$standard VQE - sim')\n\n\n\n\n# pyplot.axvline(fci_energy, color='k', linestyle='solid', linewidth=2, \n# label='$E_{FCI}$', alpha=0.4)\n\n# pyplot.legend(loc='upper right')\n# # pyplot.legend(bbox_to_anchor=(0.865,1.9), loc=\"upper left\")\n# pyplot.ylabel('Frequency')\n# pyplot.xlabel('Energy')\n\n# pyplot.tight_layout()\n\n# file_name = 'LiH_Histogram_STANDARD_sim_Gaussian.jpeg'\n# pyplot.savefig(file_name, dpi=300,transparent=True,) # edgecolor='black', facecolor='white')\n# pyplot.show()", "_____no_output_____" ], [ "def normal_dist(x, mean, standard_deviation):\n return (1/(np.sqrt(2*np.pi)*standard_deviation)) * np.exp( - ((x - mean)**2 / (2*standard_deviation**2)))\n\nplt.plot(x, normal_dist(x, av, sig))", "_____no_output_____" ], [ "# from scipy.stats import norm\n\n# x=np.linspace(-10, 10, 1000)\n# av=2\n# sig=1\n# plt.plot(x, norm.pdf(x, av, sig))", "_____no_output_____" ], [ "len(set(np.around(E_list_STANDARD_sim, 5)))", "_____no_output_____" ], [ "E_list_STANDARD_sim.shape", "_____no_output_____" ], [ " E_list_STANDARD_sim.shape[0]**(1/3)", "_____no_output_____" ], [ "# https://stats.stackexchange.com/questions/798/calculating-optimal-number-of-bins-in-a-histogram\nfrom scipy.stats import iqr\n\nbin_width = 2 * iqr(E_list_STANDARD_sim) / E_list_STANDARD_sim.shape[0]**(1/3)\nnp.ceil((max(E_list_STANDARD_sim)-min(E_list_STANDARD_sim))/bin_width)", "_____no_output_____" ], [ "from matplotlib import pyplot\n%matplotlib inline\n\n\n# bins = len(set(E_list_SEQ_ROT_sim))\n# bins_standard = len(set(E_list_STANDARD_sim))\n\n# bins_standard = 150_000\nbins_standard = 2500\n\nbin_heights_STANDARD, bin_borders_STANDARD, _=pyplot.hist(E_list_STANDARD_sim,\n bins_standard, alpha=0.7,\n label='$E$ standard VQE - sim',\n color='g',\n density=True)\n#### ,hatch='-')\n\n###### Gaussian fit\nbin_centers_STANDARD = bin_borders_STANDARD[:-1] + np.diff(bin_borders_STANDARD) / 2\npopt, _ = curve_fit(gaussian, bin_centers_STANDARD, bin_heights_STANDARD, p0=[fci_energy, 0., 1.])#, **{'maxfev':10000})\nmean_STANDARD, amplitude_STANDARD, standard_deviation_STANDARD= popt\nx_interval_for_fit = np.linspace(bin_borders_STANDARD[0], bin_borders_STANDARD[-1], 10000)\npyplot.plot(x_interval_for_fit, gaussian(x_interval_for_fit, *popt), label='Gaussian fit', color='olive',\n linewidth=3)\n\n\n\n\n### normal fit\n# popt_norm, _ = curve_fit(normal_dist, bin_centers_STANDARD, bin_heights_STANDARD, p0=[fci_energy, standard_deviation_STANDARD])#, **{'maxfev':10000})\n# mean_norm, standard_deviation_norm= popt_norm\n# pyplot.plot(x_interval_for_fit, normal_dist(x_interval_for_fit, *popt_norm), label='Normal fit', color='b',\n# linestyle='--')\n\n# pyplot.plot(x_interval_for_fit, normal_dist(x_interval_for_fit, mean_STANDARD, standard_deviation_STANDARD), \n# label='Normal fit', color='b', linestyle='--')\n\n\n\n\n\n#### Average energy from data\npyplot.axvline(E_list_STANDARD_sim.mean(), color='g', linestyle='--', linewidth=2, \n label='$E_{average}$ standard VQE - sim') # mean of DATA\n\n\n\n##############\n\n# chemical accuracy\npyplot.axvline(fci_energy, color='k', linestyle='solid', linewidth=3, \n label='$E_{FCI}$', alpha=0.3)\n\n# # chemical accuracy\n# pyplot.fill_between([fci_energy-1.6e-3, fci_energy+1.6e-3],\n# [0, np.ceil(max(bin_heights_STANDARD))] ,\n# color='k', \n# label='chemical accuracy',\n# alpha=0.5)\n\n\npyplot.rcParams[\"font.family\"] = \"Times New Roman\"\n# pyplot.legend(loc='upper right')\n# # pyplot.legend(bbox_to_anchor=(0.865,1.9), loc=\"upper left\")\npyplot.ylabel('Probability Density', fontsize=20)\npyplot.xlabel('Energy / Hartree', fontsize=20)\npyplot.xticks(np.arange(-9.5,-5.5,0.5), fontsize=20)\npyplot.yticks(np.arange(0,2.5,0.5), fontsize=20)\n\n# pyplot.xlim(np.floor(min(bin_borders_STANDARD)), np.ceil(max(bin_borders_STANDARD)))\npyplot.xlim(-9.5, -6.5)\n\n\npyplot.tight_layout()\n\nfile_name = 'LiH_Histogram_STANDARD_sim_Gaussian.jpeg'\npyplot.savefig(file_name, dpi=300,transparent=True,) # edgecolor='black', facecolor='white')\npyplot.show()", "_____no_output_____" ], [ "from matplotlib import pyplot\n%matplotlib inline\n\n\n# bins = len(set(E_list_SEQ_ROT_sim))\n# bins_standard = len(set(E_list_STANDARD_sim))\n\n# bins_standard = 5000\nbins_standard = 150_000\n\nbin_heights_STANDARD, bin_borders_STANDARD, _=pyplot.hist(E_list_STANDARD_sim,\n bins_standard, alpha=0.7,\n label='$E$ standard VQE - sim',\n color='g',\n density=True)\n\n\n\n##############\n\n\npyplot.rcParams[\"font.family\"] = \"Times New Roman\"\n# pyplot.legend(loc='upper right')\n# # pyplot.legend(bbox_to_anchor=(0.865,1.9), loc=\"upper left\")\npyplot.ylabel('Probability Density', fontsize=20)\npyplot.xlabel('Energy / Hartree', fontsize=20)\npyplot.xticks(np.arange(-9.5,-5.5,0.5), fontsize=20)\npyplot.yticks(np.arange(0,3,0.5), fontsize=20)\n\n# pyplot.xlim(np.floor(min(bin_borders_STANDARD)), np.ceil(max(bin_borders_STANDARD)))\npyplot.xlim(-9.5, -6.5)\n\n\npyplot.tight_layout()\n\n# file_name = 'LiH_Histogram_STANDARD_sim_Gaussian.jpeg'\n# pyplot.savefig(file_name, dpi=300,transparent=True,) # edgecolor='black', facecolor='white')\npyplot.show()", "_____no_output_____" ], [ "from scipy import stats", "_____no_output_____" ], [ "print(stats.shapiro(E_list_STANDARD_sim))\nprint(stats.kstest(E_list_STANDARD_sim, 'norm'))", "/Users/lex/anaconda3/envs/UpdatedCirq/lib/python3.7/site-packages/scipy/stats/morestats.py:1676: UserWarning: p-value may not be accurate for N > 5000.\n warnings.warn(\"p-value may not be accurate for N > 5000.\")\n" ] ], [ [ "# XY Z comparison", "_____no_output_____" ] ], [ [ "i_list_XY=[]\nSTANDARD_Hist_data_XY={}\n\ni_list_Z=[]\nSTANDARD_Hist_data_Z={}\namplitude_min=0.00\nXY_terms=[]\nZ_amp_sum=0\n\nfor key in STANDARD_Hist_data_sim:\n Pword, const = STANDARD_Hist_data_sim[key]['P']\n coeff=STANDARD_Hist_data_sim[key]['coeff']\n \n if np.abs(coeff)>amplitude_min:\n qubitNos, qubitPstrs = zip(*(list(Pword)))\n # XY terms only!\n if ('X' in qubitPstrs) or ('Y' in qubitPstrs):\n i_list_XY.append(key)\n STANDARD_Hist_data_XY[key]=STANDARD_Hist_data_sim[key]\n XY_terms.append(STANDARD_Hist_data_sim[key]['P'])\n else:\n i_list_Z.append(key)\n STANDARD_Hist_data_Z[key]=STANDARD_Hist_data_sim[key]\n Z_amp_sum+=coeff\nZ_amp_sum", "_____no_output_____" ], [ "def Get_Hist_data(Histogram_data, I_term):\n E_list=[]\n for m_index in tqdm(range(Histogram_data[list(Histogram_data.keys())[0]]['Measurements'].shape[0])):\n E=I_term\n for M_dict_key in Histogram_data:\n coeff = Histogram_data[M_dict_key]['coeff']\n parity = 1 if sum(map(int, Histogram_data[M_dict_key]['Measurements'][m_index])) % 2 == 0 else -1\n E+=coeff*parity\n E_list.append(E)\n return E_list\n ", "_____no_output_____" ], [ "I_term = -4.142299396835105\n\nE_list_STANDARD_XY=Get_Hist_data(STANDARD_Hist_data_XY, 0)\nE_list_STANDARD_Z=Get_Hist_data(STANDARD_Hist_data_Z, 0)", "100%|██████████| 1616700/1616700 [28:48<00:00, 935.30it/s] \n100%|██████████| 1616700/1616700 [03:34<00:00, 7529.63it/s]\n" ], [ "print(len(set(np.around(E_list_STANDARD_XY, 5))))\nprint(len(set(np.around(E_list_STANDARD_Z, 5))))", "106136\n9536\n" ], [ "from matplotlib import pyplot\n%matplotlib inline\n\n\n# bins_standard = len(set(E_list_STANDARD_sim))\n# bins_standard = 1000\nbins_standard=8_000\n\n# bin_heights_XY, bin_borders_XY, _=pyplot.hist(E_list_STANDARD_XY,\n# bins_standard, alpha=0.7,\n# label='$XY$ terms',\n# color='b',\n# density=False)\n\nbin_heights_Z, bin_borders_Z, _=pyplot.hist(E_list_STANDARD_Z,\n bins_standard, alpha=0.7,\n label='$Z$ terms',\n color='g',\n density=True)\n\npyplot.rcParams[\"font.family\"] = \"Times New Roman\"\npyplot.ylabel('Probability Density', fontsize=20)\npyplot.xlabel('Energy / Hartree', fontsize=20)\npyplot.xticks(np.arange(-4.2,-3.0,0.2), fontsize=20)\npyplot.xlim((-4.2, -3.2)) \npyplot.yticks(np.arange(0,1200,200), fontsize=20)\npyplot.ylim((0, 1000)) \n\npyplot.tight_layout()\n\nfile_name = 'LiH_standard_Z.jpeg'\npyplot.savefig(file_name, dpi=300,transparent=True,) # edgecolor='black', facecolor='white')\npyplot.show()", "_____no_output_____" ], [ "np.where(bin_heights_Z==max(bin_heights_Z))[0]", "_____no_output_____" ], [ "print(bin_heights_Z[2334])\nprint('left sum:',sum(bin_heights_Z[:2334]))\nprint('right sum:', sum(bin_heights_Z[2335:]))\n# therefore slighlt more likely to get more +ve energy!!!", "951.3579852420173\nleft sum: 504.8055111080494\nright sum: 518.2528410337952\n" ], [ "bin_borders_Z[583]", "_____no_output_____" ], [ "print(len(np.where(np.array(E_list_STANDARD_Z)>-3.8)[0]))\nprint(len(np.where(np.array(E_list_STANDARD_Z)<-3.89)[0]))\nlen(E_list_STANDARD_Z)", "411283\n372029\n" ], [ "from matplotlib import pyplot\n%matplotlib inline\n\n\n# bins_standard = len(set(E_list_STANDARD_sim))\n# bins_standard = 1000\nbins_standard = 5000\n\nbin_heights_XY, bin_borders_XY, _=pyplot.hist(E_list_STANDARD_XY,\n bins_standard, alpha=0.7,\n label='$XY$ terms',\n color='g',\n density=True)\n\npyplot.rcParams[\"font.family\"] = \"Times New Roman\"\npyplot.ylabel('Probability Density', fontsize=20)\npyplot.xlabel('Energy / Hartree', fontsize=20)\npyplot.xticks(np.arange(-0.8,0.9,0.2), fontsize=20)\npyplot.xlim((-0.8, 0.8)) \npyplot.yticks(np.arange(0,3,0.5), fontsize=20)\n\n\npyplot.tight_layout()\n\nfile_name = 'LiH_standard_XY.jpeg'\npyplot.savefig(file_name, dpi=300,transparent=True,) # edgecolor='black', facecolor='white')\npyplot.show()", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa5f127f2b142394a0a08c8595381aa57438665
76,772
ipynb
Jupyter Notebook
docs/notebooks/base/esercizi/esercizi_18_b.ipynb
anhelus/python-data-science
12f291986afba3d58fc1a36e16e9931dadf77952
[ "MIT" ]
null
null
null
docs/notebooks/base/esercizi/esercizi_18_b.ipynb
anhelus/python-data-science
12f291986afba3d58fc1a36e16e9931dadf77952
[ "MIT" ]
null
null
null
docs/notebooks/base/esercizi/esercizi_18_b.ipynb
anhelus/python-data-science
12f291986afba3d58fc1a36e16e9931dadf77952
[ "MIT" ]
null
null
null
52.946207
10,296
0.650419
[ [ [ "L'obiettivo di questa esercitazione è quello di arrivare ad implementare un sistema completo di classificazione dei sopravvissuti al disastro del Titanic. Per farlo, partiremo dall'omonimo dataset, faremo un'analisi completa dello stesso, e cercheremo di raggiungere il miglior risultato possibile in termini di accuracy.", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport warnings\nwarnings.filterwarnings('ignore')\n\nfrom sklearn.compose import ColumnTransformer\nfrom sklearn.datasets import fetch_openml\nfrom sklearn.feature_selection import VarianceThreshold\nfrom sklearn.model_selection import GridSearchCV\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.preprocessing import OneHotEncoder\nfrom sklearn.tree import DecisionTreeClassifier", "_____no_output_____" ] ], [ [ "## Parte 1: estrazione dei dati\n\nFinora, abbiamo sempre usato la funzione `read_csv` di Pandas per la lettura di un dataset. Scikit Learn, però, offre la funzione [`fetch_openml`](http://scikit-learn.org/stable/modules/generated/sklearn.datasets.fetch_openml.html), che permette di estrarre un dataframe da [OpenML](https://www.openml.org/), nota repository online dalla quale è possibile reperire numerosi dataset.\n\nProviamo quindi ad estrarre i dati usando proprio questa funzione.\n\n> **Suggerimento**: la funzione `fetch_openml` restituisce un oggetto. Esploriamolo, assieme alla documentazione, per estrarre il dataframe.", "_____no_output_____" ] ], [ [ "data = fetch_openml(\"titanic\", version=1, as_frame=True)", "_____no_output_____" ], [ "df = data.frame\ndf.head()", "_____no_output_____" ] ], [ [ "## Parte 2: Exploratory data analysis\n\nCome abbiamo visto, è sempre opportuno \"esplorare\" i dati a nostra disposizione.\n\n### Parte 2.1: Tipologia di feature e preprocessing\n\nPer prima cosa, quindi, osserviamoli, guardando i primi cinque campioni, e valutiamo il tipo delle feature che stiamo utilizzando.", "_____no_output_____" ] ], [ [ "df.head()", "_____no_output_____" ], [ "df.dtypes", "_____no_output_____" ] ], [ [ "Notiamo subito che ci sono dei `NaN` e dei `None` relativi a diverse feature.\n\n> **Suggerimento**: `NaN` e `None` *non* sono analoghi. Entrambi indicano la mancanza di dati, ma `None` implica la presenza di un oggetto, mentre `NaN` quella di un valore numerico. In tal senso, a [questo indirizzo](https://stackoverflow.com/questions/17534106/what-is-the-difference-between-nan-and-none) potete trovare un'interessante disquisizione.\n\nAbbiamo due possibilità: la prima è quella di eliminare i campioni che presentano dati mancanti, la seconda è quella di eliminare le feature che presentano tali valori. Scriviamo la funzione `drop_nan` che elimini una feature qualora il numero di dati mancanti sia superiore al 25% del totale e che, una volta terminata questa operazione, provveda ad eliminare i campioni che presentano una o più feature con dati mancanti.", "_____no_output_____" ] ], [ [ "def drop_nan(df):\n threshold = round(len(df) / 4)\n df.dropna(axis=1, inplace=True, thresh=threshold)\n df.dropna(axis=0, inplace=True)", "_____no_output_____" ] ], [ [ "Prima di applicare la funzione `drop_nan`, però, eliminiamo le feature che non reputiamo significative ai fini della nostra analisi. In particolare, potremmo eliminare feature come il nome o il numero di ticket.\n\nPer quello che riguarda i valori a `None`, questi sono indicativi del fatto che il passeggero non è stato imbarcato su alcuna scialuppa di salvataggio. Assegnamo uno zero a tutti i dati che assumono valore `None` mediante la funzione [`apply`](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.apply.html).\n\n> **Nota**\n>\n> La funzione `apply` prevede l'utilizzo delle [*lambda functions*](https://docs.python.org/3/tutorial/controlflow.html). Per brevità, queste funzioni hanno una sintassi di questo tipo:\n> ```python\n> lambda x: f(x)\n> ```\n> Ciò significa che sarà applicata `f(x)` ad ogni valore di `x`. Così, ad esempio:\n> ```python\n> df = pd.DataFrame([1, 2, 3])\n> df = df.apply(lambda x: x**2)\n> # Risultato: pd.DataFrame([1, 4, 9])\n> ```\n", "_____no_output_____" ] ], [ [ "df.drop(['name', 'ticket'], axis=1, inplace=True)\ndf['boat'] = df['boat'].apply(lambda x: '0' if x is None else x)\ndrop_nan(df)", "_____no_output_____" ] ], [ [ "Vediamo adesso il dataframe risultante.", "_____no_output_____" ] ], [ [ "df.head()", "_____no_output_____" ], [ "df.dtypes", "_____no_output_____" ] ], [ [ "Nel dataframe, sono rimaste dieci feature, rispetto alle iniziali 14. Notiamo anche che abbiamo alcune feature che possiamo contrassegnare come *categorical*, ovvero:\n\n* `sex`\n* `embarked`\n* `boat`\n* `home.dest`\n\n> **Nota**: `boat` è una feature categorica, in quanto alcune delle scialuppe di salvataggio erano contrassegnate da valori alfanumerici (ad esempio, `D`) e non da semplici cifre.\n\n### Parte 2.2: Esplorazione dei dati\n\nUsiamo adesso congiuntamente gli strumenti degli *istogrammi* e degli *scatter plot* per esplorare visivamente le singole feature. Partiamo dall'età.", "_____no_output_____" ] ], [ [ "df.hist(\n column='age',\n grid=False\n)\nplt.show()", "_____no_output_____" ] ], [ [ "Come possiamo vedere, l'età ricorda una distribuzione di Rayleigh. Ci attendiamo lo stesso per un'altra dimensione che possiamo esplorare, ovvero quella delle tariffe pagate dai singoli passeggeri.", "_____no_output_____" ] ], [ [ "df.hist(\n column='fare',\n grid=False,\n bins=100\n)\nplt.show()", "_____no_output_____" ] ], [ [ "Vediamo come si dispongono le due feature usando uno scatter plot.", "_____no_output_____" ] ], [ [ "df.boxplot(['age', 'fare'])\nplt.show()", "_____no_output_____" ], [ "df.plot.scatter(x='survived', y='age', c='survived', cmap='inferno')\nplt.show()", "_____no_output_____" ] ], [ [ "Notiamo come la variabilità dell'età è sufficiente, mentre quella del ticket è bassa. Potremmo quindi provare ad usare una tecnica di feature selection basata su `VarianceThreshold`.\n\nIn ultimo, valutiamo la matrice di correlazione. Usiamo l'indice di correlazione di Kendall, che risulta essere maggiormente robusto rispetto a quello di Pearson ed a quello di Spearman. Per approfondire, ecco un [eccellente punto di partenza](https://datascience.stackexchange.com/a/64261) su Stack Exchange.", "_____no_output_____" ] ], [ [ "df.corr(method='kendall')", "_____no_output_____" ], [ "df['survived'] = df['survived'].apply(lambda x: int(x))", "_____no_output_____" ], [ "df.head()", "_____no_output_____" ] ], [ [ "E' facile vedere come i risultati ci conducano alla conclusione che non vi sono feature fortemente correlate od anticorrelate; la correlazione maggiore che è possibile riscontrare è infatti tra classe del passeggero e tariffa (ed è negativa, come prevedibile: ciò significa che passeggeri con classe numerica più alta, ovvero terza, hanno pagato meno rispetto a passeggeri con classe numerica più bassa, ovvero seconda e prima).\n\n## Parte 3: pipeline di machine learning\n\nPossiamo adesso passare a creare due pipeline per il machine learning. \n\nUseremo in tal senso due classificatori: il primo sarà basato su alberi decisionali, mentre il secondo sarà un *multi-layer perceptron*, modellato grazie alla classe [`MPLClassifier`](http://scikit-learn.org/stable/modules/generated/sklearn.neural_network.MLPClassifier.html).\n\nPrima di continuare, però, isoliamo le label del database. Per farlo, usiamo la funzione [`pop`](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.pop.html):", "_____no_output_____" ] ], [ [ "y = df.pop('survived')", "_____no_output_____" ], [ "df.head()", "_____no_output_____" ] ], [ [ "Definiamo ora un `ColumnTransformer` che codifichi le feature in precedenza indicate come categorical:", "_____no_output_____" ] ], [ [ "ct = ColumnTransformer(\n [('sex_tr', OneHotEncoder(handle_unknown='ignore'), ['sex']),\n ('embarked_tr', OneHotEncoder(handle_unknown='ignore'), ['embarked']),\n ('boat_tr', OneHotEncoder(handle_unknown='ignore'), ['boat']),\n ('home.dest_tr', OneHotEncoder(handle_unknown='ignore'), ['home.dest'])],\n remainder='passthrough')\n\nX = ct.fit_transform(df)", "_____no_output_____" ], [ "print(X[0, :])", " (0, 0)\t1.0\n (0, 4)\t1.0\n (0, 15)\t1.0\n (0, 321)\t1.0\n (0, 378)\t1.0\n (0, 379)\t29.0\n (0, 382)\t211.3375\n" ] ], [ [ "Possiamo ora definire le nostre due processing pipeline, una per ognuno dei possibili classificatori.", "_____no_output_____" ] ], [ [ "dt_pipeline = Pipeline([\n ('feat_sel', VarianceThreshold(.8 * 1 - .8)),\n ('dt', DecisionTreeClassifier(random_state=42))\n])\n\nmlp_pipeline = Pipeline([\n ('feat_sel', VarianceThreshold(.8 * 1 - .8)),\n ('mlp', MLPClassifier(random_state=42))\n])", "_____no_output_____" ] ], [ [ "Applichiamo la `GridSearchCV` su ognuna delle due pipeline. Dato che queste offrono un'interfaccia analoga a quella dei classici stimatori, potremo usare la stessa modalità vista in precedenza sui regressori; l'unica accortezza starà nello specificare a quale step della pipeline si riferiscono i parametri indicati nella griglia, usando una notazione:\n\n```python\nstep_name__param_name\n```", "_____no_output_____" ] ], [ [ "dt_params = {\n 'dt__max_depth': list(range(1, 11)),\n 'dt__criterion': ['gini', 'entropy'],\n}\n\nmlp_params = {\n 'mlp__hidden_layer_sizes': [50, 100, 150, 200],\n 'mlp__activation': ['logistic', 'tanh', 'relu'],\n 'mlp__solver': ['sgd', 'adam'],\n 'mlp__learning_rate': ['constant', 'adaptive'],\n}\n\ndt_search = GridSearchCV(dt_pipeline, dt_params)\ndt_search = dt_search.fit(X, y)\n\nmlp_search = GridSearchCV(mlp_pipeline, mlp_params)\nmlp_search = mlp_search.fit(X, y)", "_____no_output_____" ] ], [ [ "Vediamo quali sono i migliori punteggi ottenuti da entrambe le pipeline.", "_____no_output_____" ] ], [ [ "print('Accuracy per la pipeline con albero decisionale: ~{}%'.format(\n round(dt_search.best_score_ * 100)))\nprint('Accuracy per la pipeline con MLP: ~{}%'.format(\n round(mlp_search.best_score_ * 100)))", "Accuracy per la pipeline con albero decisionale: ~98%\nAccuracy per la pipeline con MLP: ~96%\n" ] ], [ [ "Ovviamente, potremo usare in inferenza la pipeline addestrata esattamente come uno stimatore mediante il metodo `predict`.", "_____no_output_____" ], [ "## Note finali\n\nScegliere tra un gran numero di stimatori può essere un'operazione abbastanza onerosa. Per questo, esiste un'intera branca del machine learning, chiamata *AutoML*, che si occupa di automatizzare la scelta, rendendo il processo trasparente all'utente. \n\nIn tal senso, un tool per l'AutoML basato su Scikit Learn è [AutoSKLearn](https://github.com/automl/auto-sklearn). Questo è, al momento, disponibile soltanto per macchine non Windows; tuttavia, il consiglio è quello di darci un'occhiata, se possibile.\n\nUn altro tool molto interessante (ma purtroppo meno \"aggiornato\" di AutoSKLearn) è [LazyPredict](https://lazypredict.readthedocs.io/en/latest/readme.html).", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ] ]
4aa5f78ab995b7654ec373b26aaaadb26e01f8e3
105,328
ipynb
Jupyter Notebook
Udemy Course/T2-3 Data cleaning, agrupacion de datos.ipynb
nestorsgarzonc/Fundamentals-of-Machine-learning
f29a34acbbd47829fc37ac5b0caa70f3a08332c3
[ "MIT" ]
null
null
null
Udemy Course/T2-3 Data cleaning, agrupacion de datos.ipynb
nestorsgarzonc/Fundamentals-of-Machine-learning
f29a34acbbd47829fc37ac5b0caa70f3a08332c3
[ "MIT" ]
7
2019-12-16T22:20:24.000Z
2022-02-10T01:25:04.000Z
Udemy Course/T2-3 Data cleaning, agrupacion de datos.ipynb
nestorsgarzonc/Fundamentals-of-Machine-learning
f29a34acbbd47829fc37ac5b0caa70f3a08332c3
[ "MIT" ]
null
null
null
34.992691
4,564
0.405486
[ [ [ "import numpy as np\nimport pandas as pd", "_____no_output_____" ], [ "gender=['Male', 'Female']\nincome=['Poor', 'Middle Class', 'Rich']", "_____no_output_____" ], [ "gender_data=[]\nincome_data=[]\nn=500\n\nfor i in range(n):\n gender_data.append(np.random.choice(gender))\n income_data.append(np.random.choice(income))", "_____no_output_____" ], [ "gender_data[:10]", "_____no_output_____" ], [ "income_data[:10]", "_____no_output_____" ], [ "#Z=(0,1) media y desviacion\n#N(m,s)=>m+s*Z\nheight=160+30*np.random.randn(n)\nweight=65+25*np.random.randn(n)\nage=30 +12*np.random.randn(n)\nincome=18000 +3500*np.random.randn(n)", "_____no_output_____" ], [ "data=pd.DataFrame(\n {\n \"Gender\":gender_data,\n \"Economic Status\":income_data,\n \"Height\":height,\n \"Weight\":weight,\n \"Age\":age,\n \"Income\":income\n }\n)", "_____no_output_____" ], [ "data.head()", "_____no_output_____" ] ], [ [ "# Agrupcion de datos", "_____no_output_____" ] ], [ [ "grouped_gender=data.groupby('Gender')", "_____no_output_____" ], [ "grouped_gender.groups", "_____no_output_____" ], [ "for names, groups in grouped_gender:\n print(names)\n print(groups)", "Female\n Gender Economic Status Height Weight Age Income\n0 Female Middle Class 166.769375 60.997846 31.763117 21261.887722\n3 Female Middle Class 108.075794 68.858798 40.666406 19620.983038\n7 Female Rich 131.796620 20.434221 23.574353 22004.665963\n10 Female Middle Class 131.541587 29.241717 33.267409 21219.865598\n16 Female Poor 181.699658 73.961060 23.467515 22803.997345\n.. ... ... ... ... ... ...\n493 Female Poor 184.332419 54.456678 12.233594 12502.685898\n494 Female Rich 191.908022 69.640775 14.186962 16507.832528\n497 Female Poor 217.676918 33.330039 22.584839 18283.457438\n498 Female Poor 162.049647 95.194632 60.416840 17937.286993\n499 Female Middle Class 172.124481 94.630737 19.122712 20431.283882\n\n[251 rows x 6 columns]\nMale\n Gender Economic Status Height Weight Age Income\n1 Male Poor 154.412583 52.239964 35.396019 21970.535592\n2 Male Rich 150.241907 68.440701 5.672197 18955.983950\n4 Male Rich 140.166396 66.890672 31.388991 13194.729458\n5 Male Rich 216.145735 87.137793 22.471955 22514.029957\n6 Male Rich 170.904528 68.252848 35.244927 19181.003451\n.. ... ... ... ... ... ...\n490 Male Poor 131.542608 79.531191 37.520920 11928.574296\n491 Male Rich 152.007980 54.189376 34.807762 17882.817899\n492 Male Rich 173.423295 44.023754 43.778252 19508.450080\n495 Male Poor 191.736704 81.435678 29.239686 9551.920614\n496 Male Poor 158.329829 63.806982 29.048178 16518.792538\n\n[249 rows x 6 columns]\n" ], [ "grouped_gender.get_group('Female')", "_____no_output_____" ], [ "double_group=data.groupby(['Gender', 'Economic Status'])", "_____no_output_____" ], [ "len(double_group)", "_____no_output_____" ], [ "for names, groups in double_group:\n print(names)\n print(groups)", "('Female', 'Middle Class')\n Gender Economic Status Height Weight Age Income\n0 Female Middle Class 166.769375 60.997846 31.763117 21261.887722\n3 Female Middle Class 108.075794 68.858798 40.666406 19620.983038\n10 Female Middle Class 131.541587 29.241717 33.267409 21219.865598\n37 Female Middle Class 159.158069 58.148123 34.761774 19068.096422\n48 Female Middle Class 174.801327 33.875147 52.437361 17353.594275\n.. ... ... ... ... ... ...\n469 Female Middle Class 155.412415 56.545468 33.208929 16908.909690\n473 Female Middle Class 171.735710 56.512319 27.333256 14664.314270\n474 Female Middle Class 138.695887 90.674176 45.746046 16738.255822\n484 Female Middle Class 170.464735 31.967694 55.943703 15660.068354\n499 Female Middle Class 172.124481 94.630737 19.122712 20431.283882\n\n[79 rows x 6 columns]\n('Female', 'Poor')\n Gender Economic Status Height Weight Age Income\n16 Female Poor 181.699658 73.961060 23.467515 22803.997345\n32 Female Poor 121.115402 83.648165 23.712510 19329.982682\n39 Female Poor 176.063697 47.450089 18.598977 21172.775870\n46 Female Poor 227.658866 64.958668 20.130136 22305.174032\n56 Female Poor 156.032399 -5.912760 42.111948 19080.847754\n.. ... ... ... ... ... ...\n477 Female Poor 149.923040 104.038230 29.784501 23780.329464\n482 Female Poor 190.484022 51.029483 44.131594 15894.378659\n493 Female Poor 184.332419 54.456678 12.233594 12502.685898\n497 Female Poor 217.676918 33.330039 22.584839 18283.457438\n498 Female Poor 162.049647 95.194632 60.416840 17937.286993\n\n[78 rows x 6 columns]\n('Female', 'Rich')\n Gender Economic Status Height Weight Age Income\n7 Female Rich 131.796620 20.434221 23.574353 22004.665963\n19 Female Rich 142.358772 29.052818 37.332879 20349.277205\n21 Female Rich 152.457545 70.799987 41.428284 14337.778892\n25 Female Rich 172.470713 71.629252 37.781757 16599.585382\n31 Female Rich 169.740349 62.988597 13.920572 13545.170144\n.. ... ... ... ... ... ...\n459 Female Rich 141.908070 78.651475 27.120336 21151.349662\n467 Female Rich 145.907587 29.731743 35.477216 19418.372011\n476 Female Rich 155.324912 36.971553 27.266801 16358.831485\n488 Female Rich 170.186851 59.117646 35.510264 16719.972843\n494 Female Rich 191.908022 69.640775 14.186962 16507.832528\n\n[94 rows x 6 columns]\n('Male', 'Middle Class')\n Gender Economic Status Height Weight Age Income\n9 Male Middle Class 171.329585 77.361249 19.889561 13591.123060\n11 Male Middle Class 83.971664 72.043813 20.349259 24342.247899\n12 Male Middle Class 95.854712 64.740584 37.773970 22809.408354\n13 Male Middle Class 146.311864 56.818258 25.429957 18060.467998\n20 Male Middle Class 125.665523 81.234757 24.639822 18661.122522\n.. ... ... ... ... ... ...\n452 Male Middle Class 187.893968 90.542092 7.456949 18170.800657\n464 Male Middle Class 194.764400 88.443561 31.429025 15484.172381\n465 Male Middle Class 132.367847 33.937705 6.458266 16480.349020\n471 Male Middle Class 171.139279 77.358066 34.834564 15901.547934\n481 Male Middle Class 191.028579 102.442087 22.636027 14948.893546\n\n[88 rows x 6 columns]\n('Male', 'Poor')\n Gender Economic Status Height Weight Age Income\n1 Male Poor 154.412583 52.239964 35.396019 21970.535592\n14 Male Poor 173.260859 72.702515 25.811203 17413.409283\n15 Male Poor 115.024189 73.727331 39.868965 15334.208241\n22 Male Poor 100.722833 65.637439 27.092509 18480.173251\n23 Male Poor 132.298478 83.588187 15.809467 22982.947764\n.. ... ... ... ... ... ...\n486 Male Poor 182.334198 42.024611 25.281015 22098.558302\n489 Male Poor 106.345875 74.782662 21.799270 18459.299217\n490 Male Poor 131.542608 79.531191 37.520920 11928.574296\n495 Male Poor 191.736704 81.435678 29.239686 9551.920614\n496 Male Poor 158.329829 63.806982 29.048178 16518.792538\n\n[75 rows x 6 columns]\n('Male', 'Rich')\n Gender Economic Status Height Weight Age Income\n2 Male Rich 150.241907 68.440701 5.672197 18955.983950\n4 Male Rich 140.166396 66.890672 31.388991 13194.729458\n5 Male Rich 216.145735 87.137793 22.471955 22514.029957\n6 Male Rich 170.904528 68.252848 35.244927 19181.003451\n8 Male Rich 167.110189 30.422227 20.960454 12665.029002\n.. ... ... ... ... ... ...\n480 Male Rich 162.069689 76.523121 29.436631 21234.180281\n485 Male Rich 185.108882 53.033149 46.929687 17028.194434\n487 Male Rich 167.114234 20.092421 34.120205 13984.237509\n491 Male Rich 152.007980 54.189376 34.807762 17882.817899\n492 Male Rich 173.423295 44.023754 43.778252 19508.450080\n\n[86 rows x 6 columns]\n" ] ], [ [ "# Operaciones sobre datos agrupados", "_____no_output_____" ] ], [ [ "double_group.sum()", "_____no_output_____" ], [ "double_group.mean()", "_____no_output_____" ], [ "double_group.size()", "_____no_output_____" ], [ "double_group.describe()", "_____no_output_____" ], [ "grouped_income=double_group['Income']", "_____no_output_____" ], [ "grouped_income.describe()", "_____no_output_____" ], [ "double_group.aggregate(\n {\n \"Income\":np.sum,\n 'Age': np.mean,\n 'Height': np.std\n }\n)", "_____no_output_____" ], [ "double_group.aggregate(\n {\n 'Age':np.mean,\n 'Height': lambda h: np.mean(h)/np.std(h)\n }\n)", "_____no_output_____" ], [ "double_group.aggregate([np.sum, np.mean, np.std])", "_____no_output_____" ], [ "double_group.aggregate([lambda x: np.mean(x)/np.std(x)])", "_____no_output_____" ] ], [ [ "# Filtrado de datos", "_____no_output_____" ] ], [ [ "double_group.sum()", "_____no_output_____" ], [ "double_group['Age'].filter(lambda x: x.sum()>2400)", "_____no_output_____" ] ], [ [ "# Transformacion de variables", "_____no_output_____" ] ], [ [ "zscore=lambda x: (x-x.mean())/x.std()", "_____no_output_____" ], [ "z_group=double_group.transform(zscore)", "_____no_output_____" ], [ "import matplotlib.pyplot as plt", "_____no_output_____" ], [ "plt.hist(z_group['Age'])", "_____no_output_____" ], [ "fill_na_mean=lambda x: x.fillna(x.mean())", "_____no_output_____" ], [ "double_group.transform(fill_na_mean)", "_____no_output_____" ] ], [ [ "## Operaciones diversas utiles", "_____no_output_____" ] ], [ [ "double_group.head(1)", "_____no_output_____" ], [ "double_group.tail(1)", "_____no_output_____" ], [ "double_group.nth(32)", "_____no_output_____" ], [ "data_sorted=data.sort_values(['Age', 'Income'])", "_____no_output_____" ], [ "data_sorted.head(10)", "_____no_output_____" ], [ "age_grouped=data_sorted.groupby('Gender')", "_____no_output_____" ], [ "age_grouped.head()", "_____no_output_____" ], [ "age_grouped.tail(1)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa5f8e35b87354e35236e10dd03766d2cba06fd
33,762
ipynb
Jupyter Notebook
samples/core/guide/autograph.ipynb
yzhliu/models
22e248cebd6a556594f5631f5213bf13fd6f5792
[ "Apache-2.0" ]
2
2020-08-08T09:58:34.000Z
2020-08-08T09:58:37.000Z
samples/core/guide/autograph.ipynb
dheera/models
edc769a3a1d5bc3e90eabebd1c589126e70f5cb3
[ "Apache-2.0" ]
null
null
null
samples/core/guide/autograph.ipynb
dheera/models
edc769a3a1d5bc3e90eabebd1c589126e70f5cb3
[ "Apache-2.0" ]
6
2020-06-17T01:18:30.000Z
2022-01-14T06:35:05.000Z
31.03125
606
0.487086
[ [ [ "##### Copyright 2018 The TensorFlow Authors.\n\nLicensed under the Apache License, Version 2.0 (the \"License\");", "_____no_output_____" ] ], [ [ "#@title Licensed under the Apache License, Version 2.0 (the \"License\"); { display-mode: \"form\" }\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.", "_____no_output_____" ] ], [ [ "# AutoGraph: Easy control flow for graphs ", "_____no_output_____" ], [ "<table class=\"tfo-notebook-buttons\" align=\"left\">\n <td>\n <a target=\"_blank\" href=\"https://www.tensorflow.org/versions/master/guide/autograph\"><img src=\"https://www.tensorflow.org/images/tf_logo_32px.png\" />View on TensorFlow.org</a>\n </td>\n <td>\n <a target=\"_blank\" href=\"https://colab.research.google.com/github/tensorflow/models/blob/master/samples/core/guide/autograph.ipynb\"><img src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" />Run in Google Colab</a>\n </td>\n <td>\n <a target=\"_blank\" href=\"https://github.com/tensorflow/models/blob/master/samples/core/guide/autograph.ipynb\"><img src=\"https://www.tensorflow.org/images/GitHub-Mark-32px.png\" />View source on GitHub</a>\n </td>\n</table>", "_____no_output_____" ], [ "[AutoGraph](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/autograph/) helps you write complicated graph code using normal Python. Behind the scenes, AutoGraph automatically transforms your code into the equivalent [TensorFlow graph code](https://www.tensorflow.org/guide/graphs). AutoGraph already supports much of the Python language, and that coverage continues to grow. For a list of supported Python language features, see the [Autograph capabilities and limitations](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/autograph/LIMITATIONS.md).", "_____no_output_____" ], [ "## Setup\n\nTo use AutoGraph, install the latest version of TensorFlow:", "_____no_output_____" ] ], [ [ "! pip install -U tf-nightly", "_____no_output_____" ] ], [ [ "Import TensorFlow, AutoGraph, and any supporting modules:", "_____no_output_____" ] ], [ [ "from __future__ import division, print_function, absolute_import\n\nimport tensorflow as tf\nimport tensorflow.keras.layers as layers\nfrom tensorflow.contrib import autograph\n\n\nimport numpy as np\nimport matplotlib.pyplot as plt", "_____no_output_____" ] ], [ [ "We'll enable [eager execution](https://www.tensorflow.org/guide/eager) for demonstration purposes, but AutoGraph works in both eager and [graph execution](https://www.tensorflow.org/guide/graphs) environments:", "_____no_output_____" ] ], [ [ "tf.enable_eager_execution()", "_____no_output_____" ] ], [ [ "Note: AutoGraph converted code is designed to run during graph execution. When eager exectuon is enabled, use explicit graphs (as this example shows) or `tf.contrib.eager.defun`.", "_____no_output_____" ], [ "## Automatically convert Python control flow\n\nAutoGraph will convert much of the Python language into the equivalent TensorFlow graph building code. \n\nNote: In real applications batching is essential for performance. The best code to convert to AutoGraph is code where the control flow is decided at the _batch_ level. If making decisions at the individual _example_ level, you must index and batch the examples to maintain performance while applying the control flow logic. \n\nAutoGraph converts a function like:", "_____no_output_____" ] ], [ [ "def square_if_positive(x):\n if x > 0:\n x = x * x\n else:\n x = 0.0\n return x", "_____no_output_____" ] ], [ [ "To a function that uses graph building:", "_____no_output_____" ] ], [ [ "print(autograph.to_code(square_if_positive))", "_____no_output_____" ] ], [ [ "Code written for eager execution can run in a `tf.Graph` with the same results, but with the benfits of graph execution:", "_____no_output_____" ] ], [ [ "print('Eager results: %2.2f, %2.2f' % (square_if_positive(tf.constant(9.0)), \n square_if_positive(tf.constant(-9.0))))", "_____no_output_____" ] ], [ [ "Generate a graph-version and call it:", "_____no_output_____" ] ], [ [ "tf_square_if_positive = autograph.to_graph(square_if_positive)\n\nwith tf.Graph().as_default(): \n # The result works like a regular op: takes tensors in, returns tensors.\n # You can inspect the graph using tf.get_default_graph().as_graph_def()\n g_out1 = tf_square_if_positive(tf.constant( 9.0))\n g_out2 = tf_square_if_positive(tf.constant(-9.0))\n with tf.Session() as sess:\n print('Graph results: %2.2f, %2.2f\\n' % (sess.run(g_out1), sess.run(g_out2)))", "_____no_output_____" ] ], [ [ "AutoGraph supports common Python statements like `while`, `for`, `if`, `break`, and `return`, with support for nesting. Compare this function with the complicated graph verson displayed in the following code blocks:", "_____no_output_____" ] ], [ [ "# Continue in a loop\ndef sum_even(items):\n s = 0\n for c in items:\n if c % 2 > 0:\n continue\n s += c\n return s\n\nprint('Eager result: %d' % sum_even(tf.constant([10,12,15,20])))\n\ntf_sum_even = autograph.to_graph(sum_even)\n\nwith tf.Graph().as_default(), tf.Session() as sess:\n print('Graph result: %d\\n\\n' % sess.run(tf_sum_even(tf.constant([10,12,15,20]))))", "_____no_output_____" ], [ "print(autograph.to_code(sum_even))", "_____no_output_____" ] ], [ [ "## Decorator\n\nIf you don't need easy access to the original Python function, use the `convert` decorator:", "_____no_output_____" ] ], [ [ "@autograph.convert()\ndef fizzbuzz(i, n):\n while i < n:\n msg = ''\n if i % 3 == 0:\n msg += 'Fizz'\n if i % 5 == 0:\n msg += 'Buzz'\n if msg == '':\n msg = tf.as_string(i)\n print(msg)\n i += 1\n return i\n\nwith tf.Graph().as_default():\n final_i = fizzbuzz(tf.constant(10), tf.constant(16))\n # The result works like a regular op: takes tensors in, returns tensors.\n # You can inspect the graph using tf.get_default_graph().as_graph_def()\n with tf.Session() as sess:\n sess.run(final_i)\n\n", "_____no_output_____" ] ], [ [ "## Examples\n\nLet's demonstrate some useful Python language features.\n", "_____no_output_____" ], [ "### Assert\n\nAutoGraph automatically converts the Python `assert` statement into the equivalent `tf.Assert` code:", "_____no_output_____" ] ], [ [ "@autograph.convert()\ndef inverse(x):\n assert x != 0.0, 'Do not pass zero!'\n return 1.0 / x\n\nwith tf.Graph().as_default(), tf.Session() as sess:\n try:\n print(sess.run(inverse(tf.constant(0.0))))\n except tf.errors.InvalidArgumentError as e:\n print('Got error message:\\n %s' % e.message)", "_____no_output_____" ] ], [ [ "### Print\n\nUse the Python `print` function in-graph:", "_____no_output_____" ] ], [ [ "@autograph.convert()\ndef count(n):\n i=0\n while i < n:\n print(i)\n i += 1\n return n\n \nwith tf.Graph().as_default(), tf.Session() as sess:\n sess.run(count(tf.constant(5)))", "_____no_output_____" ] ], [ [ "### Lists\n\nAppend to lists in loops (tensor list ops are automatically created):", "_____no_output_____" ] ], [ [ "@autograph.convert()\ndef arange(n):\n z = []\n # We ask you to tell us the element dtype of the list\n autograph.set_element_type(z, tf.int32)\n \n for i in range(n):\n z.append(i)\n # when you're done with the list, stack it\n # (this is just like np.stack)\n return autograph.stack(z) \n\n\nwith tf.Graph().as_default(), tf.Session() as sess:\n sess.run(arange(tf.constant(10)))", "_____no_output_____" ] ], [ [ "### Nested control flow", "_____no_output_____" ] ], [ [ "@autograph.convert()\ndef nearest_odd_square(x):\n if x > 0:\n x = x * x\n if x % 2 == 0:\n x = x + 1\n return x\n\nwith tf.Graph().as_default(): \n with tf.Session() as sess:\n print(sess.run(nearest_odd_square(tf.constant(4))))\n print(sess.run(nearest_odd_square(tf.constant(5))))\n print(sess.run(nearest_odd_square(tf.constant(6))))", "_____no_output_____" ] ], [ [ "### While loop", "_____no_output_____" ] ], [ [ "@autograph.convert()\ndef square_until_stop(x, y):\n while x < y:\n x = x * x\n return x\n \nwith tf.Graph().as_default(): \n with tf.Session() as sess:\n print(sess.run(square_until_stop(tf.constant(4), tf.constant(100))))", "_____no_output_____" ] ], [ [ "### For loop", "_____no_output_____" ] ], [ [ "@autograph.convert()\ndef fizzbuzz_each(nums):\n\n result = []\n autograph.set_element_type(result, tf.string)\n\n for num in nums: \n result.append(fizzbuzz(num))\n \n return autograph.stack(result)\n \nwith tf.Graph().as_default(): \n with tf.Session() as sess:\n print(sess.run(fizzbuzz_each(tf.constant(np.arange(10)))))", "_____no_output_____" ] ], [ [ "### Break", "_____no_output_____" ] ], [ [ "@autograph.convert()\ndef argwhere_cumsum(x, threshold):\n current_sum = 0.0\n idx = 0\n for i in range(len(x)):\n idx = i\n if current_sum >= threshold:\n break\n current_sum += x[i]\n return idx\n\nN = 10\nwith tf.Graph().as_default(): \n with tf.Session() as sess:\n idx = argwhere_cumsum(tf.ones(N), tf.constant(float(N/2)))\n print(sess.run(idx))", "_____no_output_____" ] ], [ [ "## Interoperation with `tf.Keras`\n\nNow that you've seen the basics, let's build some model components with autograph.\n\nIt's relatively simple to integrate `autograph` with `tf.keras`. \n\n\n### Stateless functions\n\nFor stateless functions, like `collatz` shown below, the easiest way to include them in a keras model is to wrap them up as a layer uisng `tf.keras.layers.Lambda`.", "_____no_output_____" ] ], [ [ "import numpy as np\n\[email protected]()\ndef collatz(x):\n x=tf.reshape(x,())\n assert x>0\n n = tf.convert_to_tensor((0,)) \n while not tf.equal(x,1):\n n+=1\n if tf.equal(x%2, 0):\n x = x//2\n else:\n x = 3*x+1\n \n return n\n\nwith tf.Graph().as_default():\n model = tf.keras.Sequential([\n tf.keras.layers.Lambda(collatz, input_shape=(1,), output_shape=(), )\n ])\n \nresult = model.predict(np.array([6171])) #261\nresult", "_____no_output_____" ] ], [ [ "### Custom Layers and Models\n\n<!--TODO(markdaoust) link to full examples or these referenced models.-->\n\nThe easiest way to use AutoGraph with Keras layers and models is to `@autograph.convert()` the `call` method. See the [TensorFlow Keras guide](https://tensorflow.org/guide/keras#build_advanced_models) for details on how to build on these classes. \n\nHere is a simple example of the [stocastic network depth](https://arxiv.org/abs/1603.09382) technique :", "_____no_output_____" ] ], [ [ "# `K` is used to check if we're in train or test mode.\nimport tensorflow.keras.backend as K\n\nclass StocasticNetworkDepth(tf.keras.Sequential):\n def __init__(self, pfirst=1.0, plast=0.5, *args,**kwargs):\n self.pfirst = pfirst\n self.plast = plast\n super().__init__(*args,**kwargs)\n \n def build(self,input_shape):\n super().build(input_shape.as_list())\n self.depth = len(self.layers)\n self.plims = np.linspace(self.pfirst, self.plast, self.depth+1)[:-1]\n \n @autograph.convert()\n def call(self, inputs):\n training = tf.cast(K.learning_phase(), dtype=bool) \n if not training: \n count = self.depth\n return super(StocasticNetworkDepth, self).call(inputs), count\n \n p = tf.random_uniform((self.depth,))\n \n keeps = p<=self.plims\n x = inputs\n \n count = tf.reduce_sum(tf.cast(keeps, tf.int32))\n for i in range(self.depth):\n if keeps[i]:\n x = self.layers[i](x)\n \n # return both the final-layer output and the number of layers executed.\n return x, count", "_____no_output_____" ] ], [ [ "Let's try it on mnist-shaped data:", "_____no_output_____" ] ], [ [ "train_batch = np.random.randn(64, 28,28,1).astype(np.float32)", "_____no_output_____" ] ], [ [ "Build a simple stack of `conv` layers, in the stocastic depth model:", "_____no_output_____" ] ], [ [ "with tf.Graph().as_default() as g:\n model = StocasticNetworkDepth(\n pfirst=1.0, plast=0.5)\n\n for n in range(20):\n model.add(\n layers.Conv2D(filters=16, activation=tf.nn.relu,\n kernel_size=(3,3), padding='same'))\n\n model.build(tf.TensorShape((None, None, None,1)))\n \n init = tf.global_variables_initializer()", "_____no_output_____" ] ], [ [ "Now test it to ensure it behaves as expected in train and test modes:", "_____no_output_____" ] ], [ [ "# Use an explicit session here so we can set the train/test switch, and\n# inspect the layer count returned by `call`\nwith tf.Session(graph=g) as sess:\n init.run()\n \n for phase, name in enumerate(['test','train']):\n K.set_learning_phase(phase)\n result, count = model(tf.convert_to_tensor(train_batch, dtype=tf.float32))\n\n result1, count1 = sess.run((result, count))\n result2, count2 = sess.run((result, count))\n\n delta = (result1 - result2)\n print(name, \"sum abs delta: \", abs(delta).mean())\n print(\" layers 1st call: \", count1)\n print(\" layers 2nd call: \", count2)\n print()", "_____no_output_____" ] ], [ [ "## Advanced example: An in-graph training loop\n\nThe previous section showed that AutoGraph can be used inside Keras layers and models. Keras models can also be used in AutoGraph code.\n\nSince writing control flow in AutoGraph is easy, running a training loop in a TensorFlow graph should also be easy. \n\nThis example shows how to train a simple Keras model on MNIST with the entire training process—loading batches, calculating gradients, updating parameters, calculating validation accuracy, and repeating until convergence—is performed in-graph.", "_____no_output_____" ], [ "### Download data", "_____no_output_____" ] ], [ [ "(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()", "_____no_output_____" ] ], [ [ "### Define the model", "_____no_output_____" ] ], [ [ "def mlp_model(input_shape):\n model = tf.keras.Sequential((\n tf.keras.layers.Flatten(),\n tf.keras.layers.Dense(100, activation='relu', input_shape=input_shape),\n tf.keras.layers.Dense(100, activation='relu'),\n tf.keras.layers.Dense(10, activation='softmax')))\n model.build()\n return model\n\n\ndef predict(m, x, y):\n y_p = m(x)\n losses = tf.keras.losses.categorical_crossentropy(y, y_p)\n l = tf.reduce_mean(losses)\n accuracies = tf.keras.metrics.categorical_accuracy(y, y_p)\n accuracy = tf.reduce_mean(accuracies)\n return l, accuracy\n\n\ndef fit(m, x, y, opt):\n l, accuracy = predict(m, x, y)\n # Autograph automatically adds the necessary `tf.control_dependencies` here.\n # (Without them nothing depends on `opt.minimize`, so it doesn't run.)\n # This makes it much more like eager-code.\n opt.minimize(l)\n return l, accuracy\n\n\ndef setup_mnist_data(is_training, batch_size):\n if is_training:\n ds = tf.data.Dataset.from_tensor_slices((train_images, train_labels))\n ds = ds.shuffle(batch_size * 10)\n else:\n ds = tf.data.Dataset.from_tensor_slices((test_images, test_labels))\n\n ds = ds.repeat()\n ds = ds.batch(batch_size)\n return ds\n\n\ndef get_next_batch(ds):\n itr = ds.make_one_shot_iterator()\n image, label = itr.get_next()\n x = tf.to_float(image)/255.0\n y = tf.one_hot(tf.squeeze(label), 10)\n return x, y ", "_____no_output_____" ] ], [ [ "### Define the training loop", "_____no_output_____" ] ], [ [ "# Use `recursive = True` to recursively convert functions called by this one.\[email protected](recursive=True)\ndef train(train_ds, test_ds, hp):\n m = mlp_model((28 * 28,))\n opt = tf.train.AdamOptimizer(hp.learning_rate)\n \n # We'd like to save our losses to a list. In order for AutoGraph\n # to convert these lists into their graph equivalent,\n # we need to specify the element type of the lists.\n train_losses = []\n autograph.set_element_type(train_losses, tf.float32)\n test_losses = []\n autograph.set_element_type(test_losses, tf.float32)\n train_accuracies = []\n autograph.set_element_type(train_accuracies, tf.float32)\n test_accuracies = []\n autograph.set_element_type(test_accuracies, tf.float32)\n \n # This entire training loop will be run in-graph.\n i = tf.constant(0)\n while i < hp.max_steps:\n train_x, train_y = get_next_batch(train_ds)\n test_x, test_y = get_next_batch(test_ds)\n\n step_train_loss, step_train_accuracy = fit(m, train_x, train_y, opt)\n step_test_loss, step_test_accuracy = predict(m, test_x, test_y)\n if i % (hp.max_steps // 10) == 0:\n print('Step', i, 'train loss:', step_train_loss, 'test loss:',\n step_test_loss, 'train accuracy:', step_train_accuracy,\n 'test accuracy:', step_test_accuracy)\n train_losses.append(step_train_loss)\n test_losses.append(step_test_loss)\n train_accuracies.append(step_train_accuracy)\n test_accuracies.append(step_test_accuracy)\n i += 1\n \n # We've recorded our loss values and accuracies \n # to a list in a graph with AutoGraph's help.\n # In order to return the values as a Tensor, \n # we need to stack them before returning them.\n return (autograph.stack(train_losses), autograph.stack(test_losses), \n autograph.stack(train_accuracies), autograph.stack(test_accuracies))", "_____no_output_____" ] ], [ [ "Now build the graph and run the training loop:", "_____no_output_____" ] ], [ [ "with tf.Graph().as_default() as g:\n hp = tf.contrib.training.HParams(\n learning_rate=0.005,\n max_steps=500,\n )\n train_ds = setup_mnist_data(True, 50)\n test_ds = setup_mnist_data(False, 1000)\n (train_losses, test_losses, train_accuracies,\n test_accuracies) = train(train_ds, test_ds, hp)\n\n init = tf.global_variables_initializer()\n \nwith tf.Session(graph=g) as sess:\n sess.run(init)\n (train_losses, test_losses, train_accuracies,\n test_accuracies) = sess.run([train_losses, test_losses, train_accuracies,\n test_accuracies])\n \nplt.title('MNIST train/test losses')\nplt.plot(train_losses, label='train loss')\nplt.plot(test_losses, label='test loss')\nplt.legend()\nplt.xlabel('Training step')\nplt.ylabel('Loss')\nplt.show()\nplt.title('MNIST train/test accuracies')\nplt.plot(train_accuracies, label='train accuracy')\nplt.plot(test_accuracies, label='test accuracy')\nplt.legend(loc='lower right')\nplt.xlabel('Training step')\nplt.ylabel('Accuracy')\nplt.show()", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4aa6253e22ddd5a5a26591b21d457fae61b2d057
96,789
ipynb
Jupyter Notebook
covid_xprize/RyanModelExponential.ipynb
nickwilders/covid-xprize
86c611e7da2dafb2a12f2baa2bff0fdd16a1fb22
[ "Apache-2.0" ]
null
null
null
covid_xprize/RyanModelExponential.ipynb
nickwilders/covid-xprize
86c611e7da2dafb2a12f2baa2bff0fdd16a1fb22
[ "Apache-2.0" ]
null
null
null
covid_xprize/RyanModelExponential.ipynb
nickwilders/covid-xprize
86c611e7da2dafb2a12f2baa2bff0fdd16a1fb22
[ "Apache-2.0" ]
null
null
null
37.956471
1,952
0.447117
[ [ [ "# Exponential Model\nThis model is not working! It attempts to fit an exponential curve to the data in order to predict the number of new cases. The example used here just takes in one feature and needs to be able to take an `*args` value and unpack it to be able to expand the function to take more arguments. Might be easier/more efficient to just transform the output, make the model and untransform it back. Yeo-johnson or power transform??? just trying to predict `x` for `tomorrow_cases = today_cases**x` instead of predicting `tomorrow cases directly`", "_____no_output_____" ] ], [ [ "import pickle\nimport os\nimport urllib.request\nfrom sklearn.linear_model import Lasso\nfrom sklearn.model_selection import train_test_split\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns", "_____no_output_____" ] ], [ [ "## Validation Data?", "_____no_output_____" ] ], [ [ "path_to_ips_file=\"validation/data/2020-09-30_historical_ip.csv\"\ninput_file = pd.read_csv(path_to_ips_file, low_memory=False)", "_____no_output_____" ], [ "# input_file[input_file['CountryName'] == 'United States']\ninput_file[input_file['RegionName'] == 'California']", "_____no_output_____" ] ], [ [ "## Importing the Training Data", "_____no_output_____" ] ], [ [ "# Main source for the training data\nDATA_URL = 'https://raw.githubusercontent.com/OxCGRT/covid-policy-tracker/master/data/OxCGRT_latest.csv'\n# Local files\ndata_path = 'examples/predictors/ryan_predictor/data'\nDATA_FILE = data_path + '/OxCGRT_latest.csv'\n\n\nif not os.path.exists(data_path):\n os.mkdir(data_path)\nurllib.request.urlretrieve(DATA_URL, DATA_FILE)", "_____no_output_____" ], [ "df = pd.read_csv(DATA_FILE, \n parse_dates=['Date'],\n encoding=\"ISO-8859-1\",\n dtype={\"RegionName\": str,\n \"RegionCode\": str},\n error_bad_lines=False)\n# df[cases_df['RegionName'] == 'California']", "_____no_output_____" ], [ "df.columns", "_____no_output_____" ], [ "HYPOTHETICAL_SUBMISSION_DATE = np.datetime64(\"2020-07-31\")\ndf = df[df.Date <= HYPOTHETICAL_SUBMISSION_DATE]", "_____no_output_____" ], [ "# Add RegionID column that combines CountryName and RegionName for easier manipulation of data\ndf['GeoID'] = df['CountryName'] + '__' + df['RegionName'].astype(str)", "_____no_output_____" ], [ "# Add new cases column\ndf['NewCases'] = df.groupby('GeoID').ConfirmedCases.diff().fillna(0)", "_____no_output_____" ], [ "# import sys\n# NewCases = []\n# for val in df['NewCases']:\n# if val != 0:\n# NewCases.append(val)\n# else:\n# NewCases.append(sys.float_info.epsilon)\n# sys.float_info.epsilon\n# df['NewCasesPercent'] = df.groupby('GeoID').NewCases.diff().fillna(0)/NewCases\ndf['NewCasesPercent'] = df.groupby('GeoID').NewCases.diff().fillna(0)/df['NewCases']\n# NewCasesList = df['NewCasesPercent'].tolist()\ndf = df.replace([np.inf, -np.inf, np.nan], 0)\nNewCasesList = df['NewCasesPercent'].tolist()\nNewCasesList", "_____no_output_____" ], [ "# Keep only columns of interest\nid_cols = ['CountryName',\n 'RegionName',\n 'GeoID',\n 'Date']\n#cases_col = ['NewCases', 'NewCasesPercent', 'ConfirmedCases']\ncases_col = ['NewCasesPercent']\nnpi_cols = ['C1_School closing',\n 'C2_Workplace closing',\n 'C3_Cancel public events',\n 'C4_Restrictions on gatherings',\n 'C5_Close public transport',\n 'C6_Stay at home requirements',\n 'C7_Restrictions on internal movement',\n 'C8_International travel controls',\n 'H1_Public information campaigns',\n 'H2_Testing policy',\n 'H3_Contact tracing',\n 'H6_Facial Coverings']\ndf = df[id_cols + cases_col + npi_cols]", "_____no_output_____" ], [ "# Fill any missing case values by interpolation and setting NaNs to 0\ndf.update(df.groupby('GeoID').NewCasesPercent.apply(\n lambda group: group.interpolate()).fillna(0))", "_____no_output_____" ], [ "# Fill any missing NPIs by assuming they are the same as previous day\nfor npi_col in npi_cols:\n df.update(df.groupby('GeoID')[npi_col].ffill().fillna(0))", "_____no_output_____" ], [ "df", "_____no_output_____" ] ], [ [ "## Making the Model", "_____no_output_____" ] ], [ [ "# Set number of past days to use to make predictions\nnb_lookback_days = 30\n\n# Create training data across all countries for predicting one day ahead\nX_cols = cases_col + npi_cols\ny_col = cases_col\nX_samples = []\ny_samples = []\ngeo_ids = df.GeoID.unique()\nfor g in geo_ids:\n gdf = df[df.GeoID == g]\n all_case_data = np.array(gdf[cases_col])\n all_npi_data = np.array(gdf[npi_cols])\n\n # Create one sample for each day where we have enough data\n # Each sample consists of cases and npis for previous nb_lookback_days\n nb_total_days = len(gdf)\n for d in range(nb_lookback_days, nb_total_days - 1):\n X_cases = all_case_data[d-nb_lookback_days:d]\n\n # Take negative of npis to support positive\n # weight constraint in Lasso.\n X_npis = -all_npi_data[d - nb_lookback_days:d]\n\n # Flatten all input data so it fits Lasso input format.\n X_sample = np.concatenate([X_cases.flatten(),\n X_npis.flatten()])\n y_sample = all_case_data[d + 1]\n X_samples.append(X_sample)\n y_samples.append(y_sample)\n\nX_samples = np.array(X_samples)\ny_samples = np.array(y_samples).flatten()", "_____no_output_____" ], [ "# Helpful function to compute mae\ndef mae(pred, true):\n return np.mean(np.abs(pred - true))", "_____no_output_____" ], [ "# Split data into train and test sets\nX_train, X_test, y_train, y_test = train_test_split(X_samples,\n y_samples,\n test_size=0.2,\n random_state=301)", "_____no_output_____" ], [ "# Create and train Lasso model.\n# Set positive=True to enforce assumption that cases are positively correlated\n# with future cases and npis are negatively correlated.\nmodel = Lasso(alpha=0.1,\n precompute=True,\n max_iter=10000,\n positive=True,\n selection='random')\n# Fit model\nmodel.fit(X_train, y_train)", "_____no_output_____" ], [ "# Evaluate model\ntrain_preds = model.predict(X_train)\ntrain_preds = np.maximum(train_preds, 0) # Don't predict negative cases\nprint('Train MAE:', mae(train_preds, y_train))\n\ntest_preds = model.predict(X_test)\ntest_preds = np.maximum(test_preds, 0) # Don't predict negative cases\nprint('Test MAE:', mae(test_preds, y_test))", "Train MAE: 0.4709144568587262\nTest MAE: 0.42944358872060706\n" ], [ "# Evaluate model\ntrain_preds = model.predict(X_train)\n#train_preds = np.maximum(train_preds, 0) # Don't predict negative cases\n# y_train\nprint('Train MAE:', mae(train_preds, y_train))\n\ntest_preds = model.predict(X_test)\n#test_preds = np.maximum(test_preds, 0) # Don't predict negative cases\nprint('Test MAE:', mae(test_preds, y_test))", "Train MAE: 0.5236120633926784\nTest MAE: 0.4847246155807235\n" ], [ "from sklearn.preprocessing import StandardScaler\n\nsc = StandardScaler()\nX_train = sc.fit_transform(X_train)\nX_test = sc.transform(X_test)\nfrom sklearn.ensemble import RandomForestRegressor\n\nregressor = RandomForestRegressor(n_estimators=20, random_state=0)\nregressor.fit(X_train, y_train)\ny_pred = regressor.predict(X_test)", "_____no_output_____" ], [ "# Evaluate model\ntrain_preds = model.predict(X_train)\ntrain_preds = np.maximum(train_preds, 0) # Don't predict negative cases\n# y_train\nprint('Train MAE:', mae(train_preds, y_train))\n\ntest_preds = model.predict(X_test)\ntest_preds = np.maximum(test_preds, 0) # Don't predict negative cases\nprint('Test MAE:', mae(test_preds, y_test))", "Train MAE: 0.4708342501539647\nTest MAE: 0.42940659821165933\n" ], [ "import numpy as np\nfrom scipy.optimize import curve_fit\n\ndef func_exp(x, a, b, c):\n #c = 0\n return a * np.exp(b * x) + c\n\ndef exponential_regression (x_data, y_data):\n popt, pcov = curve_fit(func_exp, x_data, y_data, p0 = (-1, 0.01, 1))\n print(popt)\n puntos = plt.plot(x_data, y_data, 'x', color='xkcd:maroon', label = \"data\")\n curva_regresion = plt.plot(x_data, func_exp(x_data, *popt), color='xkcd:teal', label = \"fit: {:.3f}, {:.3f}, {:.3f}\".format(*popt))\n return func_exp(x_data, *popt)\n\n# x_data = np.arange(0, 51) \n# y_data = np.array([0.001, 0.199, 0.394, 0.556, 0.797, 0.891, 1.171, 1.128, 1.437, \n# 1.525, 1.720, 1.703, 1.895, 2.003, 2.108, 2.408, 2.424,2.537, \n# 2.647, 2.740, 2.957, 2.58, 3.156, 3.051, 3.043, 3.353, 3.400, \n# 3.606, 3.659, 3.671, 3.750, 3.827, 3.902, 3.976, 4.048, 4.018, \n# 4.286, 4.353, 4.418, 4.382, 4.444, 4.485, 4.465, 4.600, 4.681, \n# 4.737, 4.792, 4.845, 4.909, 4.919, 5.100])\n# exponential_regression(x_data, y_data)\nexponential_regression(X_train, list(y_train))", "_____no_output_____" ], [ "# Inspect the learned feature coefficients for the model\n# to see what features it's paying attention to.\n\n# Give names to the features\nx_col_names = []\nfor d in range(-nb_lookback_days, 0):\n x_col_names.append('Day ' + str(d) + ' ' + cases_col[0])\nfor d in range(-nb_lookback_days, 1):\n for col_name in npi_cols:\n x_col_names.append('Day ' + str(d) + ' ' + col_name)\n\n# View non-zero coefficients\nfor (col, coeff) in zip(x_col_names, list(model.coef_)):\n if coeff != 0.:\n print(col, coeff)\nprint('Intercept', model.intercept_)", "_____no_output_____" ], [ "# Save model to file\n\nmodel_path = 'examples/predictors/ryan_predictor/model'\n\nif not os.path.exists(model_path):\n os.mkdir(model_path)\nwith open(model_path + '/model.pkl', 'wb') as model_file:\n pickle.dump(model, model_file)", "_____no_output_____" ] ], [ [ "## Evaluating the Model", "_____no_output_____" ] ], [ [ "# Reload the module to get the latest changes\nfrom examples.predictors.linear import predict\nfrom importlib import reload\nreload(predict)\nfrom examples.predictors.linear.predict import predict_df", "_____no_output_____" ], [ "%%time\npath_to_ips_file=\"validation/data/2020-09-30_historical_ip.csv\"\npreds_df = predict_df(\"2020-08-01\", \"2020-08-31\", path_to_ips_file, verbose=True)", "_____no_output_____" ], [ "# Check the predictions\npreds_df.head()", "_____no_output_____" ] ], [ [ "## Validation\n\nThis is how the predictor is going to be called during the competition.\n!!! PLEASE DO NOT CHANGE THE API !!!", "_____no_output_____" ] ], [ [ "!python examples/predictors/linear/ryan_predict.py -s 2020-08-01 -e 2020-08-04 -ip validation/data/2020-09-30_historical_ip.csv -o examples/predictors/ryan_predictor/predictions/2020-08-01_2020-08-04.csv", "python: can't open file 'examples/predictors/linear/ryan_predict.py': [Errno 2] No such file or directory\n" ], [ "!head predictions/2020-08-01_2020-08-04.csv", "head: predictions/2020-08-01_2020-08-04.csv: No such file or directory\n" ] ], [ [ "## Test Cases\nWe can generate a prediction file. Let's validate a few cases...", "_____no_output_____" ] ], [ [ "import sys\nfrom validation.predictor_validation import validate_submission\n\ndef validate(start_date, end_date, ip_file, output_file):\n # First, delete any potential old file\n try:\n os.remove(output_file)\n except OSError:\n pass\n \n # Then generate the prediction, calling the official API\n !python examples/predictors/linear/predict.py -s {start_date} -e {end_date} -ip {ip_file} -o {output_file}\n \n # And validate it\n errors = validate_submission(start_date, end_date, ip_file, output_file)\n if errors:\n for error in errors:\n print(error)\n else:\n print(\"All good!\")", "_____no_output_____" ] ], [ [ "### 4 days, no gap\n- All countries and regions\n- Official number of cases is known up to start_date\n- Intervention Plans are the official ones", "_____no_output_____" ] ], [ [ "validate(start_date=\"2020-08-01\",\n end_date=\"2020-08-04\",\n ip_file=\"validation/data/2020-09-30_historical_ip.csv\",\n output_file=\"examples/predictors/ryan_predictor/predictions/val_4_days.csv\")", "Generating predictions from 2020-08-01 to 2020-08-04...\nTraceback (most recent call last):\n File \"examples/predictors/linear/predict.py\", line 197, in <module>\n predict(args.start_date, args.end_date, args.ip_file, args.output_file)\n File \"examples/predictors/linear/predict.py\", line 52, in predict\n preds_df = predict_df(start_date, end_date, path_to_ips_file, verbose=False)\n File \"examples/predictors/linear/predict.py\", line 92, in predict_df\n hist_cases_df = pd.read_csv(DATA_FILE,\n File \"/Users/rlew/opt/miniconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/Users/rlew/opt/miniconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/Users/rlew/opt/miniconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 936, in __init__\n self._make_engine(self.engine)\n File \"/Users/rlew/opt/miniconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1168, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/Users/rlew/opt/miniconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1981, in __init__\n src = open(src, \"rb\")\nFileNotFoundError: [Errno 2] No such file or directory: '/Users/rlew/Documents/datascience/covid-xprize/covid_xprize/examples/predictors/linear/data/OxCGRT_latest.csv'\n" ] ], [ [ "### 1 month in the future\n- 2 countries only\n- there's a gap between date of last known number of cases and start_date\n- For future dates, Intervention Plans contains scenarios for which predictions are requested to answer the question: what will happen if we apply these plans?", "_____no_output_____" ] ], [ [ "%%time\nvalidate(start_date=\"2021-01-01\",\n end_date=\"2021-01-31\",\n ip_file=\"validation/data/future_ip.csv\",\n output_file=\"examples/predictors/linear/predictions/val_1_month_future.csv\")", "_____no_output_____" ] ], [ [ "### 180 days, from a future date, all countries and regions\n- Prediction start date is 1 week from now. (i.e. assuming submission date is 1 week from now) \n- Prediction end date is 6 months after start date. \n- Prediction is requested for all available countries and regions. \n- Intervention plan scenario: freeze last known intervention plans for each country and region. \n\nAs the number of cases is not known yet between today and start date, but the model relies on them, the model has to predict them in order to use them. \nThis test is the most demanding test. It should take less than 1 hour to generate the prediction file.", "_____no_output_____" ] ], [ [ "from datetime import datetime, timedelta\n\nstart_date = datetime.now() + timedelta(days=7)\nstart_date_str = start_date.strftime('%Y-%m-%d')\nend_date = start_date + timedelta(days=180)\nend_date_str = end_date.strftime('%Y-%m-%d')\nprint(f\"Start date: {start_date_str}\")\nprint(f\"End date: {end_date_str}\")", "_____no_output_____" ], [ "from validation.scenario_generator import get_raw_data, generate_scenario, NPI_COLUMNS\nDATA_FILE = 'examples/predictors/linear/data/OxCGRT_latest.csv'\nlatest_df = get_raw_data(DATA_FILE, latest=True)\nscenario_df = generate_scenario(start_date_str, end_date_str, latest_df, countries=None, scenario=\"Freeze\")\nscenario_file = \"examples/predictors/linear/predictions/180_days_future_scenario.csv\"\nscenario_df.to_csv(scenario_file, index=False)\nprint(f\"Saved scenario to {scenario_file}\")", "_____no_output_____" ] ], [ [ "### Check it", "_____no_output_____" ] ], [ [ "%%time\nvalidate(start_date=start_date_str,\n end_date=end_date_str,\n ip_file=scenario_file,\n output_file=\"examples/predictors/linear/predictions/val_6_month_future.csv\")", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ] ]
4aa62f0de856ac82f730aa3ec4daa03182db2653
215,508
ipynb
Jupyter Notebook
ex5-bias vs variance/bias_vs_variance.ipynb
yangwang/MachineLearning-AndrewNg
f4597bdf42d29f4bf79e27d93ef4e9eeac4dcc0d
[ "Apache-2.0" ]
null
null
null
ex5-bias vs variance/bias_vs_variance.ipynb
yangwang/MachineLearning-AndrewNg
f4597bdf42d29f4bf79e27d93ef4e9eeac4dcc0d
[ "Apache-2.0" ]
null
null
null
ex5-bias vs variance/bias_vs_variance.ipynb
yangwang/MachineLearning-AndrewNg
f4597bdf42d29f4bf79e27d93ef4e9eeac4dcc0d
[ "Apache-2.0" ]
null
null
null
278.794308
27,444
0.926597
[ [ [ "在本练习中,您将实现正则化的线性回归,并使用它来研究具有不同偏差-方差属性的模型", "_____no_output_____" ], [ "## 1 Regularized Linear Regression 正则线性回归\n\n在前半部分的练习中,你将实现正则化线性回归,以预测水库中的水位变化,从而预测大坝流出的水量。在下半部分中,您将通过一些调试学习算法的诊断,并检查偏差 v.s. 方差的影响。", "_____no_output_____" ], [ "### 1.1 Visualizing the dataset\n\n我们将从可视化数据集开始,其中包含水位变化的历史记录,x,以及从大坝流出的水量,y。\n\n这个数据集分为了三个部分:\n- training set 训练集:训练模型\n- cross validation set 交叉验证集:选择正则化参数\n- test set 测试集:评估性能,模型训练中不曾用过的样本", "_____no_output_____" ] ], [ [ "%matplotlib inline\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy.io import loadmat\nimport scipy.optimize as opt", "_____no_output_____" ] ], [ [ "读取数据", "_____no_output_____" ] ], [ [ "path = 'ex5data1.mat'\ndata = loadmat(path)\n#Training set\nX, y = data['X'], data['y']\n#Cross validation set\nXval, yval = data['Xval'], data['yval']\n#Test set\nXtest, ytest = data['Xtest'], data['ytest']\n#Insert a column of 1's to all of the X's, as usual\nX = np.insert(X, 0, 1, axis=1)\nXval = np.insert(Xval, 0, 1, axis=1)\nXtest = np.insert(Xtest, 0, 1, axis=1)\nprint('X={},y={}'.format(X.shape, y.shape))\nprint('Xval={},yval={}'.format(Xval.shape, yval.shape))\nprint('Xtest={},ytest={}'.format(Xtest.shape, ytest.shape))", "X=(12, 2),y=(12, 1)\nXval=(21, 2),yval=(21, 1)\nXtest=(21, 2),ytest=(21, 1)\n" ], [ "def plotData():\n \"\"\"瞧一瞧数据长啥样\"\"\"\n plt.figure(figsize=(8,5))\n plt.scatter(X[:,1:], y, c='r', marker='x')\n plt.xlabel('Change in water level (x)')\n plt.ylabel('Water flowing out of the dam (y)')\n plt.grid(True)\n \nplotData()", "_____no_output_____" ] ], [ [ "### 1.2 Regularized linear regression cost function\n\n![image.png](../img/5_1.png)", "_____no_output_____" ] ], [ [ "def costReg(theta, X, y, l):\n '''do not regularizethe theta0\n theta is a 1-d array with shape (n+1,)\n X is a matrix with shape (m, n+1)\n y is a matrix with shape (m, 1)\n '''\n cost = ((X @ theta - y.flatten()) ** 2).sum()\n regterm = theta[1:] @ theta[1:]\n return (cost + l * regterm) / (2 * len(X))", "_____no_output_____" ] ], [ [ " Using theta initialized at [1, 1], and lambda = 1, you should expect to see an output of 303.993192", "_____no_output_____" ] ], [ [ "theta = np.ones(X.shape[1])\nprint(costReg(theta, X, y, 1))", "303.9931922202643\n" ] ], [ [ "### 1.3 Regularized linear regression gradient\n\n![image.png](../img/5_2.png)", "_____no_output_____" ] ], [ [ "def gradientReg(theta, X, y, l):\n \"\"\"\n theta: 1-d array with shape (2,)\n X: 2-d array with shape (12, 2)\n y: 2-d array with shape (12, 1)\n l: lambda constant\n grad has same shape as theta (2,)\n \"\"\"\n grad = (X @ theta - y.flatten()) @ X\n regterm = l * theta\n regterm[0] = 0 # #don't regulate bias term\n return (grad + regterm) / len(X)\n\n# Using theta initialized at [1; 1] you should expect to see a \n# gradient of [-15.303016; 598.250744] (with lambda=1)\nprint(gradientReg(theta, X, y, 1))", "[-15.30301567 598.25074417]\n" ] ], [ [ "### 1.4 Fitting linear regression 拟合线性回归", "_____no_output_____" ] ], [ [ "def trainLinearReg(X, y, l):\n theta = np.zeros(X.shape[1])\n res = opt.minimize(fun=costReg, \n x0=theta, \n args=(X, y ,l), \n method='TNC', \n jac=gradientReg)\n return res.x", "_____no_output_____" ], [ "fit_theta = trainLinearReg(X, y, 0)\nplotData()\nplt.plot(X[:,1], X @ fit_theta)", "_____no_output_____" ] ], [ [ "这里我们把$\\lambda$ = 0,因为我们现在实现的线性回归只有两个参数,这么低的维度,正则化并没有用。\n\n从图中可以看到,拟合最好的这条直线告诉我们这个模型并不适合这个数据。\n\n在下一节中,您将实现一个函数来生成学习曲线,它可以帮助您调试学习算法,即使可视化数据不那么容易。", "_____no_output_____" ], [ "## 2 Bias-variance\n\n机器学习中一个重要的概念是偏差(bias)和方差(variance)的权衡。高偏差意味着欠拟合,高方差意味着过拟合。\n\n在这部分练习中,您将在学习曲线上绘制训练误差和验证误差,以诊断bias-variance问题。", "_____no_output_____" ], [ "### 2.1 Learning curves 学习曲线\n\n![image.png](../img/5_3.png)\n\n训练样本X从1开始逐渐增加,训练出不同的参数向量θ。接着通过交叉验证样本Xval计算验证误差。\n\n1. 使用训练集的子集来训练模型,得到不同的theta。\n\n2. 通过theta计算训练代价和交叉验证代价,切记此时**不要使用正则化**,将 $\\lambda = 0$。\n\n3. 计算交叉验证代价时记得整个交叉验证集来计算,无需分为子集。", "_____no_output_____" ] ], [ [ "def plot_learning_curve(X, y, Xval, yval, l):\n \"\"\"画出学习曲线,即交叉验证误差和训练误差随样本数量的变化的变化\"\"\"\n xx = range(1, len(X) + 1) # at least has one example \n training_cost, cv_cost = [], []\n for i in xx:\n res = trainLinearReg(X[:i], y[:i], l)\n training_cost_i = costReg(res, X[:i], y[:i], 0)\n cv_cost_i = costReg(res, Xval, yval, 0)\n training_cost.append(training_cost_i)\n cv_cost.append(cv_cost_i)\n \n plt.figure(figsize=(8,5))\n plt.plot(xx, training_cost, label='training cost') \n plt.plot(xx, cv_cost, label='cv cost') \n plt.legend()\n plt.xlabel('Number of training examples')\n plt.ylabel('Error')\n plt.title('Learning curve for linear regression')\n plt.grid(True)", "_____no_output_____" ], [ "plot_learning_curve(X, y, Xval, yval, 0)", "_____no_output_____" ] ], [ [ "从图中看出来,随着样本数量的增加,训练误差和交叉验证误差都很高,这属于高偏差,欠拟合。", "_____no_output_____" ], [ "## 3 Polynomial regression 多项式回归\n\n我们的线性模型对于数据来说太简单了,导致了欠拟合(高偏差)。在这一部分的练习中,您将通过添加更多的特性来解决这个问题。\n\n使用多项式回归,假设函数形式如下:\n![image.png](../img/5_4.png)", "_____no_output_____" ], [ "### 3.1 Learning Polynomial Regression ", "_____no_output_____" ], [ "数据预处理\n\n1. X,Xval,Xtest都需要添加多项式特征,这里我们选择增加到6次方,因为若选8次方无法达到作业pdf上的效果图,这是因为scipy和octave版本的优化算法不同。\n\n2. 不要忘了标准化。", "_____no_output_____" ] ], [ [ "def genPolyFeatures(X, power):\n \"\"\"添加多项式特征\n 每次在array的最后一列插入第二列的i+2次方(第一列为偏置)\n 从二次方开始开始插入(因为本身含有一列一次方)\n \"\"\"\n Xpoly = X.copy()\n for i in range(2, power + 1):\n Xpoly = np.insert(Xpoly, Xpoly.shape[1], np.power(Xpoly[:,1], i), axis=1)\n return Xpoly\n\ndef get_means_std(X):\n \"\"\"获取训练集的均值和误差,用来标准化所有数据。\"\"\"\n means = np.mean(X,axis=0)\n stds = np.std(X,axis=0,ddof=1) # ddof=1 means 样本标准差\n return means, stds\n\ndef featureNormalize(myX, means, stds):\n \"\"\"标准化\"\"\"\n X_norm = myX.copy()\n X_norm[:,1:] = X_norm[:,1:] - means[1:]\n X_norm[:,1:] = X_norm[:,1:] / stds[1:]\n return X_norm\n", "_____no_output_____" ] ], [ [ "关于归一化,所有数据集应该都用**训练集的均值和样本标准差**处理。切记。所以要将训练集的均值和样本标准差存储起来,对后面的数据进行处理。\n\n而且注意这里是**样本标准差而不是总体标准差**,使用np.std()时,将ddof=1则是样本标准差,默认=0是总体标准差。而pandas默认计算样本标准差。\n\n\n\n\n获取添加多项式特征以及 标准化之后的数据。", "_____no_output_____" ] ], [ [ "power = 6 # 扩展到x的6次方\n\ntrain_means, train_stds = get_means_std(genPolyFeatures(X,power))\nX_norm = featureNormalize(genPolyFeatures(X,power), train_means, train_stds)\nXval_norm = featureNormalize(genPolyFeatures(Xval,power), train_means, train_stds)\nXtest_norm = featureNormalize(genPolyFeatures(Xtest,power), train_means, train_stds)", "_____no_output_____" ], [ "def plot_fit(means, stds, l):\n theta = trainLinearReg(X_norm,y, l)\n x = np.linspace(-75,55,50)\n xmat = x.reshape(-1, 1) # Reshape your data using array.reshape(-1, 1) if your data has a single feature\n xmat = np.insert(xmat,0,1,axis=1)\n Xmat = genPolyFeatures(xmat, power)\n Xmat_norm = featureNormalize(Xmat, means, stds)\n \n plotData()\n plt.plot(x, Xmat_norm@theta,'b--')", "_____no_output_____" ], [ "plot_fit(train_means, train_stds, 0)\nplot_learning_curve(X_norm, y, Xval_norm, yval, 0)", "_____no_output_____" ] ], [ [ "### 3.2 Adjusting the regularization parameter", "_____no_output_____" ], [ "上图可以看到 $\\lambda$ = 0时,训练误差太小了,明显过拟合了。\n\n我们继续调整$\\lambda$ = 1 时:", "_____no_output_____" ] ], [ [ "plot_fit(train_means, train_stds, 1)\nplot_learning_curve(X_norm, y, Xval_norm, yval, 1)", "_____no_output_____" ] ], [ [ "我们继续调整$\\lambda$ = 100 时,很明显惩罚过多,欠拟合了", "_____no_output_____" ] ], [ [ "plot_fit(train_means, train_stds, 100)\nplot_learning_curve(X_norm, y, Xval_norm, yval, 100)", "_____no_output_____" ] ], [ [ "### 3.3 Selecting λ using a cross validation set", "_____no_output_____" ] ], [ [ "lambdas = [0., 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1., 3., 10.]\n# lambdas = np.linspace(0,5,20)\nerrors_train, errors_val = [], []\nfor l in lambdas:\n theta = trainLinearReg(X_norm, y, l)\n errors_train.append(costReg(theta,X_norm,y,0)) # 记得把lambda = 0\n errors_val.append(costReg(theta,Xval_norm,yval,0))\n \nplt.figure(figsize=(8,5))\nplt.plot(lambdas,errors_train,label='Train')\nplt.plot(lambdas,errors_val,label='Cross Validation')\nplt.legend()\nplt.xlabel('lambda')\nplt.ylabel('Error')\nplt.grid(True)", "_____no_output_____" ], [ "# 可以看到时交叉验证代价最小的是 lambda = 3\nlambdas[np.argmin(errors_val)]", "_____no_output_____" ] ], [ [ "### 3.4 Computing test set error\n\nIn our cross validation, we obtained a test error of 3.8599 for λ = 3.\n\n实际上我在上面调整了power=6来匹配作业里面的图,所以得不到3.8599。但是调整power=8时(同作业里一样),就可以得到上述数据。", "_____no_output_____" ] ], [ [ "theta = trainLinearReg(X_norm, y, 3)\nprint('test cost(l={}) = {}'.format(3, costReg(theta, Xtest_norm, ytest, 0)))\n# for l in lambdas:\n# theta = trainLinearReg(X_norm, y, l)\n# print('test cost(l={}) = {}'.format(l, costReg(theta, Xtest_norm, ytest, 0)))", "test cost(l=3) = 4.755271784804065\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ] ]
4aa633c9119b695ac02fc51857a41745fb96891b
63,224
ipynb
Jupyter Notebook
Tutorial-MaxwellCurvilinear.ipynb
goncalo-andrade/nrpytutorial
4fbcb51c936864b442daefd176bd6a5277c00116
[ "BSD-2-Clause" ]
1
2020-06-09T16:16:21.000Z
2020-06-09T16:16:21.000Z
Tutorial-MaxwellCurvilinear.ipynb
goncalo-andrade/nrpytutorial
4fbcb51c936864b442daefd176bd6a5277c00116
[ "BSD-2-Clause" ]
null
null
null
Tutorial-MaxwellCurvilinear.ipynb
goncalo-andrade/nrpytutorial
4fbcb51c936864b442daefd176bd6a5277c00116
[ "BSD-2-Clause" ]
null
null
null
80.540127
636
0.637954
[ [ [ "<script async src=\"https://www.googletagmanager.com/gtag/js?id=UA-59152712-8\"></script>\n<script>\n window.dataLayer = window.dataLayer || [];\n function gtag(){dataLayer.push(arguments);}\n gtag('js', new Date());\n\n gtag('config', 'UA-59152712-8');\n</script>\n\n# Generating C code for the right-hand sides of Maxwell's equations, in ***curvilinear*** coordinates, using a reference metric formalism\n\n## Author: Ian Ruchlin\n### Formatting improvements courtesy Brandon Clark\n\n[comment]: <> (Abstract: TODO)\n\n### The following formulations of Maxwell's equations, called System I and System II, are described in [Illustrating Stability Properties of Numerical Relativity in Electrodynamics](https://arxiv.org/abs/gr-qc/0201051) by Knapp et al.\n\n**Notebook Status:** <font color='red'><b> In progress </b></font>\n\n**Validation Notes:** This module has not yet undergone validation testing. Do ***not*** use it until after appropriate validation testing has been performed.\n\n## Introduction:\n[Maxwell's equations](https://en.wikipedia.org/wiki/Maxwell%27s_equations) are subject to the Gauss' law constraint\n$$\\mathcal{C} \\equiv \\hat{D}_{i} E^{i} - 4 \\pi \\rho = 0 \\; ,$$\nwhere $E^{i}$ is the electric vector field, $\\hat{D}_{i}$ is the [covariant derivative](https://en.wikipedia.org/wiki/Covariant_derivative) associated with the reference metric $\\hat{\\gamma}_{i j}$ (which is taken to represent flat space), and $\\rho$ is the electric charge density. We use $\\mathcal{C}$ as a measure of numerical error. Maxwell's equations are also required to satisfy $\\hat{D}_{i} B^{i} = 0$, where $B^{i}$ is the magnetic vector field. The magnetic constraint implies that the magnetic field can be expressed as\n$$B_{i} = \\epsilon_{i j k} \\hat{D}^{j} A^{k} \\; ,$$\nwhere $\\epsilon_{i j k}$ is the totally antisymmetric [Levi-Civita tensor](https://en.wikipedia.org/wiki/Levi-Civita_symbol) and $A^{i}$ is the vector potential field. Together with the scalar potential $\\psi$, the electric field can be expressed in terms of the potential fields as\n$$E_{i} = -\\hat{D}_{i} \\psi - \\partial_{t} A_{i} \\; .$$\nFor now, we work in vacuum, where the electric charge density and the electric current density vector both vanish ($\\rho = 0$ and $j_{i} = 0$).\n\nIn addition to the Gauss constraints, the electric and magnetic fields obey two independent [electromagnetic invariants](https://en.wikipedia.org/wiki/Classification_of_electromagnetic_fields#Invariants)\n\\begin{align}\n\\mathcal{P} &\\equiv B_{i} B^{i} - E_{i} E^{i} \\; , \\\\\n\\mathcal{Q} &\\equiv E_{i} B^{i} \\; .\n\\end{align}\nIn vacuum, these satisfy $\\mathcal{P} = \\mathcal{Q} = 0$.", "_____no_output_____" ], [ "<a id='toc'></a>\n\n# Table of Contents:\n$$\\label{toc}$$\n\nThis notebook is organized as follows\n\n1. [Step 1](#sys1): System I\n1. [Step 2](#sys2): System II\n1. [Step 3](#latex_pdf_output): Output this notebook to $\\LaTeX$-formatted PDF file", "_____no_output_____" ], [ "<a id='sys1'></a>\n\n# Step 1: System I \\[Back to [top](#toc)\\]\n$$\\label{sys1}$$\n\nIn terms of the above definitions, the evolution Maxwell's equations take the form\n\\begin{align}\n\\partial_{t} A_{i} &= -E_{i} - \\hat{D}_{i} \\psi \\; , \\\\\n\\partial_{t} E_{i} &= -\\hat{D}_{j} \\hat{D}^{j} A_{i} + \\hat{D}_{i} \\hat{D}_{j} A^{j}\\; , \\\\\n\\partial_{t} \\psi &= -\\hat{D}_{i} A^{i} \\; .\n\\end{align}\nNote that this coupled system contains mixed second derivatives in the second term on the right hand side of the $E^{i}$ evolution equation. We will revisit this fact when building System II.\n\nIt can be shown that the Gauss constraint satisfies the evolution equation\n$$\\partial_{t} \\mathcal{C} = 0 \\; .$$\nThis implies that any constraint violating numerical error remains fixed in place during the evolution. This becomes problematic when the violations grow large and spoil the physics of the simulation.", "_____no_output_____" ] ], [ [ "import NRPy_param_funcs as par # NRPy+: parameter interface\nimport indexedexp as ixp # NRPy+: Symbolic indexed expression (e.g., tensors, vectors, etc.) support\nimport grid as gri # NRPy+: Functions having to do with numerical grids\nimport finite_difference as fin # NRPy+: Finite difference C code generation module\nimport reference_metric as rfm # NRPy+: Reference metric support\nfrom outputC import lhrh # NRPy+: Core C code output module\n\npar.set_parval_from_str(\"reference_metric::CoordSystem\", \"Spherical\")\npar.set_parval_from_str(\"grid::DIM\", 3)\n\nrfm.reference_metric()\n\n# The name of this module (\"maxwell\") is given by __name__:\nthismodule = __name__\n\n# Step 0: Read the spatial dimension parameter as DIM.\nDIM = par.parval_from_str(\"grid::DIM\")\n\n# Step 1: Set the finite differencing order to 4.\npar.set_parval_from_str(\"finite_difference::FD_CENTDERIVS_ORDER\", 4)\n\n# Step 2: Register gridfunctions that are needed as input.\npsi = gri.register_gridfunctions(\"EVOL\", [\"psi\"])\n\n# Step 3a: Declare the rank-1 indexed expressions E_{i}, A_{i},\n# and \\partial_{i} \\psi. Derivative variables like these\n# must have an underscore in them, so the finite\n# difference module can parse the variable name properly.\nED = ixp.register_gridfunctions_for_single_rank1(\"EVOL\", \"ED\")\nAD = ixp.register_gridfunctions_for_single_rank1(\"EVOL\", \"AD\")\npsi_dD = ixp.declarerank1(\"psi_dD\")\n\n# Step 3b: Declare the rank-2 indexed expression \\partial_{j} A_{i},\n# which is not symmetric in its indices.\n# Derivative variables like these must have an underscore\n# in them, so the finite difference module can parse the\n# variable name properly.\nAD_dD = ixp.declarerank2(\"AD_dD\", \"nosym\")\n\n# Step 3c: Declare the rank-3 indexed expression \\partial_{jk} A_{i},\n# which is symmetric in the two {jk} indices.\nAD_dDD = ixp.declarerank3(\"AD_dDD\", \"sym12\")\n\n# Step 4: Calculate first and second covariant derivatives, and the\n# necessary contractions.\n# First covariant derivative\n# D_{j} A_{i} = A_{i,j} - \\Gamma^{k}_{ij} A_{k}\nAD_dHatD = ixp.zerorank2()\nfor i in range(DIM):\n for j in range(DIM):\n AD_dHatD[i][j] = AD_dD[i][j]\n for k in range(DIM):\n AD_dHatD[i][j] -= rfm.GammahatUDD[k][i][j] * AD[k]\n\n# Second covariant derivative\n# D_{k} D_{j} A_{i} = \\partial_{k} D_{j} A_{i} - \\Gamma^{l}_{jk} D_{l} A_{i}\n# - \\Gamma^{l}_{ik} D_{j} A_{l}\n# = A_{i,jk}\n# - \\Gamma^{l}_{ij,k} A_{l}\n# - \\Gamma^{l}_{ij} A_{l,k}\n# - \\Gamma^{l}_{jk} A_{i;\\hat{l}}\n# - \\Gamma^{l}_{ik} A_{l;\\hat{j}}\nAD_dHatDD = ixp.zerorank3()\nfor i in range(DIM):\n for j in range(DIM):\n for k in range(DIM):\n AD_dHatDD[i][j][k] = AD_dDD[i][j][k]\n for l in range(DIM):\n AD_dHatDD[i][j][k] += - rfm.GammahatUDDdD[l][i][j][k] * AD[l] \\\n - rfm.GammahatUDD[l][i][j] * AD_dD[l][k] \\\n - rfm.GammahatUDD[l][j][k] * AD_dHatD[i][l] \\\n - rfm.GammahatUDD[l][i][k] * AD_dHatD[l][j]\n\n# Covariant divergence\n# D_{i} A^{i} = ghat^{ij} D_{j} A_{i}\nDivA = 0\n# Gradient of covariant divergence\n# DivA_dD_{i} = ghat^{jk} A_{k;\\hat{j}\\hat{i}}\nDivA_dD = ixp.zerorank1()\n# Covariant Laplacian\n# LapAD_{i} = ghat^{jk} A_{i;\\hat{j}\\hat{k}}\nLapAD = ixp.zerorank1()\nfor i in range(DIM):\n for j in range(DIM):\n DivA += rfm.ghatUU[i][j] * AD_dHatD[i][j]\n for k in range(DIM):\n DivA_dD[i] += rfm.ghatUU[j][k] * AD_dHatDD[k][j][i]\n LapAD[i] += rfm.ghatUU[j][k] * AD_dHatDD[i][j][k]\n\n# Step 5: Define right-hand sides for the evolution.\nAD_rhs = ixp.zerorank1()\nED_rhs = ixp.zerorank1()\nfor i in range(DIM):\n AD_rhs[i] = -ED[i] - psi_dD[i]\n ED_rhs[i] = -LapAD[i] + DivA_dD[i]\npsi_rhs = -DivA\n\n# Step 6: Generate C code for System I Maxwell's evolution equations,\n# print output to the screen (standard out, or stdout).\nlhrh_list = []\nfor i in range(DIM):\n lhrh_list.append(lhrh(lhs=gri.gfaccess(\"rhs_gfs\", \"AD\" + str(i)), rhs=AD_rhs[i]))\n lhrh_list.append(lhrh(lhs=gri.gfaccess(\"rhs_gfs\", \"ED\" + str(i)), rhs=ED_rhs[i]))\nlhrh_list.append(lhrh(lhs=gri.gfaccess(\"rhs_gfs\", \"psi\"), rhs=psi_rhs))\n\nfin.FD_outputC(\"stdout\", lhrh_list)", "{\n /*\n * NRPy+ Finite Difference Code Generation, Step 1 of 2: Read from main memory and compute finite difference stencils:\n */\n /*\n * Original SymPy expressions:\n * \"[const double AD_dD00 = invdx0*(-2*AD0_i0m1_i1_i2/3 + AD0_i0m2_i1_i2/12 + 2*AD0_i0p1_i1_i2/3 - AD0_i0p2_i1_i2/12),\n * const double AD_dD01 = invdx1*(-2*AD0_i0_i1m1_i2/3 + AD0_i0_i1m2_i2/12 + 2*AD0_i0_i1p1_i2/3 - AD0_i0_i1p2_i2/12),\n * const double AD_dD02 = invdx2*(-2*AD0_i0_i1_i2m1/3 + AD0_i0_i1_i2m2/12 + 2*AD0_i0_i1_i2p1/3 - AD0_i0_i1_i2p2/12),\n * const double AD_dD10 = invdx0*(-2*AD1_i0m1_i1_i2/3 + AD1_i0m2_i1_i2/12 + 2*AD1_i0p1_i1_i2/3 - AD1_i0p2_i1_i2/12),\n * const double AD_dD11 = invdx1*(-2*AD1_i0_i1m1_i2/3 + AD1_i0_i1m2_i2/12 + 2*AD1_i0_i1p1_i2/3 - AD1_i0_i1p2_i2/12),\n * const double AD_dD12 = invdx2*(-2*AD1_i0_i1_i2m1/3 + AD1_i0_i1_i2m2/12 + 2*AD1_i0_i1_i2p1/3 - AD1_i0_i1_i2p2/12),\n * const double AD_dD20 = invdx0*(-2*AD2_i0m1_i1_i2/3 + AD2_i0m2_i1_i2/12 + 2*AD2_i0p1_i1_i2/3 - AD2_i0p2_i1_i2/12),\n * const double AD_dD21 = invdx1*(-2*AD2_i0_i1m1_i2/3 + AD2_i0_i1m2_i2/12 + 2*AD2_i0_i1p1_i2/3 - AD2_i0_i1p2_i2/12),\n * const double AD_dD22 = invdx2*(-2*AD2_i0_i1_i2m1/3 + AD2_i0_i1_i2m2/12 + 2*AD2_i0_i1_i2p1/3 - AD2_i0_i1_i2p2/12),\n * const double AD_dDD001 = invdx0*invdx1*(4*AD0_i0m1_i1m1_i2/9 - AD0_i0m1_i1m2_i2/18 - 4*AD0_i0m1_i1p1_i2/9 + AD0_i0m1_i1p2_i2/18 - AD0_i0m2_i1m1_i2/18 + AD0_i0m2_i1m2_i2/144 + AD0_i0m2_i1p1_i2/18 - AD0_i0m2_i1p2_i2/144 - 4*AD0_i0p1_i1m1_i2/9 + AD0_i0p1_i1m2_i2/18 + 4*AD0_i0p1_i1p1_i2/9 - AD0_i0p1_i1p2_i2/18 + AD0_i0p2_i1m1_i2/18 - AD0_i0p2_i1m2_i2/144 - AD0_i0p2_i1p1_i2/18 + AD0_i0p2_i1p2_i2/144),\n * const double AD_dDD002 = invdx0*invdx2*(4*AD0_i0m1_i1_i2m1/9 - AD0_i0m1_i1_i2m2/18 - 4*AD0_i0m1_i1_i2p1/9 + AD0_i0m1_i1_i2p2/18 - AD0_i0m2_i1_i2m1/18 + AD0_i0m2_i1_i2m2/144 + AD0_i0m2_i1_i2p1/18 - AD0_i0m2_i1_i2p2/144 - 4*AD0_i0p1_i1_i2m1/9 + AD0_i0p1_i1_i2m2/18 + 4*AD0_i0p1_i1_i2p1/9 - AD0_i0p1_i1_i2p2/18 + AD0_i0p2_i1_i2m1/18 - AD0_i0p2_i1_i2m2/144 - AD0_i0p2_i1_i2p1/18 + AD0_i0p2_i1_i2p2/144),\n * const double AD_dDD011 = invdx1**2*(-5*AD0/2 + 4*AD0_i0_i1m1_i2/3 - AD0_i0_i1m2_i2/12 + 4*AD0_i0_i1p1_i2/3 - AD0_i0_i1p2_i2/12),\n * const double AD_dDD022 = invdx2**2*(-5*AD0/2 + 4*AD0_i0_i1_i2m1/3 - AD0_i0_i1_i2m2/12 + 4*AD0_i0_i1_i2p1/3 - AD0_i0_i1_i2p2/12),\n * const double AD_dDD100 = invdx0**2*(-5*AD1/2 + 4*AD1_i0m1_i1_i2/3 - AD1_i0m2_i1_i2/12 + 4*AD1_i0p1_i1_i2/3 - AD1_i0p2_i1_i2/12),\n * const double AD_dDD101 = invdx0*invdx1*(4*AD1_i0m1_i1m1_i2/9 - AD1_i0m1_i1m2_i2/18 - 4*AD1_i0m1_i1p1_i2/9 + AD1_i0m1_i1p2_i2/18 - AD1_i0m2_i1m1_i2/18 + AD1_i0m2_i1m2_i2/144 + AD1_i0m2_i1p1_i2/18 - AD1_i0m2_i1p2_i2/144 - 4*AD1_i0p1_i1m1_i2/9 + AD1_i0p1_i1m2_i2/18 + 4*AD1_i0p1_i1p1_i2/9 - AD1_i0p1_i1p2_i2/18 + AD1_i0p2_i1m1_i2/18 - AD1_i0p2_i1m2_i2/144 - AD1_i0p2_i1p1_i2/18 + AD1_i0p2_i1p2_i2/144),\n * const double AD_dDD112 = invdx1*invdx2*(4*AD1_i0_i1m1_i2m1/9 - AD1_i0_i1m1_i2m2/18 - 4*AD1_i0_i1m1_i2p1/9 + AD1_i0_i1m1_i2p2/18 - AD1_i0_i1m2_i2m1/18 + AD1_i0_i1m2_i2m2/144 + AD1_i0_i1m2_i2p1/18 - AD1_i0_i1m2_i2p2/144 - 4*AD1_i0_i1p1_i2m1/9 + AD1_i0_i1p1_i2m2/18 + 4*AD1_i0_i1p1_i2p1/9 - AD1_i0_i1p1_i2p2/18 + AD1_i0_i1p2_i2m1/18 - AD1_i0_i1p2_i2m2/144 - AD1_i0_i1p2_i2p1/18 + AD1_i0_i1p2_i2p2/144),\n * const double AD_dDD122 = invdx2**2*(-5*AD1/2 + 4*AD1_i0_i1_i2m1/3 - AD1_i0_i1_i2m2/12 + 4*AD1_i0_i1_i2p1/3 - AD1_i0_i1_i2p2/12),\n * const double AD_dDD200 = invdx0**2*(-5*AD2/2 + 4*AD2_i0m1_i1_i2/3 - AD2_i0m2_i1_i2/12 + 4*AD2_i0p1_i1_i2/3 - AD2_i0p2_i1_i2/12),\n * const double AD_dDD202 = invdx0*invdx2*(4*AD2_i0m1_i1_i2m1/9 - AD2_i0m1_i1_i2m2/18 - 4*AD2_i0m1_i1_i2p1/9 + AD2_i0m1_i1_i2p2/18 - AD2_i0m2_i1_i2m1/18 + AD2_i0m2_i1_i2m2/144 + AD2_i0m2_i1_i2p1/18 - AD2_i0m2_i1_i2p2/144 - 4*AD2_i0p1_i1_i2m1/9 + AD2_i0p1_i1_i2m2/18 + 4*AD2_i0p1_i1_i2p1/9 - AD2_i0p1_i1_i2p2/18 + AD2_i0p2_i1_i2m1/18 - AD2_i0p2_i1_i2m2/144 - AD2_i0p2_i1_i2p1/18 + AD2_i0p2_i1_i2p2/144),\n * const double AD_dDD211 = invdx1**2*(-5*AD2/2 + 4*AD2_i0_i1m1_i2/3 - AD2_i0_i1m2_i2/12 + 4*AD2_i0_i1p1_i2/3 - AD2_i0_i1p2_i2/12),\n * const double AD_dDD212 = invdx1*invdx2*(4*AD2_i0_i1m1_i2m1/9 - AD2_i0_i1m1_i2m2/18 - 4*AD2_i0_i1m1_i2p1/9 + AD2_i0_i1m1_i2p2/18 - AD2_i0_i1m2_i2m1/18 + AD2_i0_i1m2_i2m2/144 + AD2_i0_i1m2_i2p1/18 - AD2_i0_i1m2_i2p2/144 - 4*AD2_i0_i1p1_i2m1/9 + AD2_i0_i1p1_i2m2/18 + 4*AD2_i0_i1p1_i2p1/9 - AD2_i0_i1p1_i2p2/18 + AD2_i0_i1p2_i2m1/18 - AD2_i0_i1p2_i2m2/144 - AD2_i0_i1p2_i2p1/18 + AD2_i0_i1p2_i2p2/144),\n * const double psi_dD0 = invdx0*(-2*psi_i0m1_i1_i2/3 + psi_i0m2_i1_i2/12 + 2*psi_i0p1_i1_i2/3 - psi_i0p2_i1_i2/12),\n * const double psi_dD1 = invdx1*(-2*psi_i0_i1m1_i2/3 + psi_i0_i1m2_i2/12 + 2*psi_i0_i1p1_i2/3 - psi_i0_i1p2_i2/12),\n * const double psi_dD2 = invdx2*(-2*psi_i0_i1_i2m1/3 + psi_i0_i1_i2m2/12 + 2*psi_i0_i1_i2p1/3 - psi_i0_i1_i2p2/12)]\"\n */\n const double psi_i0_i1_i2m2 = in_gfs[IDX4(PSIGF, i0,i1,i2-2)];\n const double psi_i0_i1_i2m1 = in_gfs[IDX4(PSIGF, i0,i1,i2-1)];\n const double psi_i0_i1m2_i2 = in_gfs[IDX4(PSIGF, i0,i1-2,i2)];\n const double psi_i0_i1m1_i2 = in_gfs[IDX4(PSIGF, i0,i1-1,i2)];\n const double psi_i0m2_i1_i2 = in_gfs[IDX4(PSIGF, i0-2,i1,i2)];\n const double psi_i0m1_i1_i2 = in_gfs[IDX4(PSIGF, i0-1,i1,i2)];\n const double psi_i0p1_i1_i2 = in_gfs[IDX4(PSIGF, i0+1,i1,i2)];\n const double psi_i0p2_i1_i2 = in_gfs[IDX4(PSIGF, i0+2,i1,i2)];\n const double psi_i0_i1p1_i2 = in_gfs[IDX4(PSIGF, i0,i1+1,i2)];\n const double psi_i0_i1p2_i2 = in_gfs[IDX4(PSIGF, i0,i1+2,i2)];\n const double psi_i0_i1_i2p1 = in_gfs[IDX4(PSIGF, i0,i1,i2+1)];\n const double psi_i0_i1_i2p2 = in_gfs[IDX4(PSIGF, i0,i1,i2+2)];\n const double ED0 = in_gfs[IDX4(ED0GF, i0,i1,i2)];\n const double ED1 = in_gfs[IDX4(ED1GF, i0,i1,i2)];\n const double ED2 = in_gfs[IDX4(ED2GF, i0,i1,i2)];\n const double AD0_i0m2_i1_i2m2 = in_gfs[IDX4(AD0GF, i0-2,i1,i2-2)];\n const double AD0_i0m1_i1_i2m2 = in_gfs[IDX4(AD0GF, i0-1,i1,i2-2)];\n const double AD0_i0_i1_i2m2 = in_gfs[IDX4(AD0GF, i0,i1,i2-2)];\n const double AD0_i0p1_i1_i2m2 = in_gfs[IDX4(AD0GF, i0+1,i1,i2-2)];\n const double AD0_i0p2_i1_i2m2 = in_gfs[IDX4(AD0GF, i0+2,i1,i2-2)];\n const double AD0_i0m2_i1_i2m1 = in_gfs[IDX4(AD0GF, i0-2,i1,i2-1)];\n const double AD0_i0m1_i1_i2m1 = in_gfs[IDX4(AD0GF, i0-1,i1,i2-1)];\n const double AD0_i0_i1_i2m1 = in_gfs[IDX4(AD0GF, i0,i1,i2-1)];\n const double AD0_i0p1_i1_i2m1 = in_gfs[IDX4(AD0GF, i0+1,i1,i2-1)];\n const double AD0_i0p2_i1_i2m1 = in_gfs[IDX4(AD0GF, i0+2,i1,i2-1)];\n const double AD0_i0m2_i1m2_i2 = in_gfs[IDX4(AD0GF, i0-2,i1-2,i2)];\n const double AD0_i0m1_i1m2_i2 = in_gfs[IDX4(AD0GF, i0-1,i1-2,i2)];\n const double AD0_i0_i1m2_i2 = in_gfs[IDX4(AD0GF, i0,i1-2,i2)];\n const double AD0_i0p1_i1m2_i2 = in_gfs[IDX4(AD0GF, i0+1,i1-2,i2)];\n const double AD0_i0p2_i1m2_i2 = in_gfs[IDX4(AD0GF, i0+2,i1-2,i2)];\n const double AD0_i0m2_i1m1_i2 = in_gfs[IDX4(AD0GF, i0-2,i1-1,i2)];\n const double AD0_i0m1_i1m1_i2 = in_gfs[IDX4(AD0GF, i0-1,i1-1,i2)];\n const double AD0_i0_i1m1_i2 = in_gfs[IDX4(AD0GF, i0,i1-1,i2)];\n const double AD0_i0p1_i1m1_i2 = in_gfs[IDX4(AD0GF, i0+1,i1-1,i2)];\n const double AD0_i0p2_i1m1_i2 = in_gfs[IDX4(AD0GF, i0+2,i1-1,i2)];\n const double AD0_i0m2_i1_i2 = in_gfs[IDX4(AD0GF, i0-2,i1,i2)];\n const double AD0_i0m1_i1_i2 = in_gfs[IDX4(AD0GF, i0-1,i1,i2)];\n const double AD0 = in_gfs[IDX4(AD0GF, i0,i1,i2)];\n const double AD0_i0p1_i1_i2 = in_gfs[IDX4(AD0GF, i0+1,i1,i2)];\n const double AD0_i0p2_i1_i2 = in_gfs[IDX4(AD0GF, i0+2,i1,i2)];\n const double AD0_i0m2_i1p1_i2 = in_gfs[IDX4(AD0GF, i0-2,i1+1,i2)];\n const double AD0_i0m1_i1p1_i2 = in_gfs[IDX4(AD0GF, i0-1,i1+1,i2)];\n const double AD0_i0_i1p1_i2 = in_gfs[IDX4(AD0GF, i0,i1+1,i2)];\n const double AD0_i0p1_i1p1_i2 = in_gfs[IDX4(AD0GF, i0+1,i1+1,i2)];\n const double AD0_i0p2_i1p1_i2 = in_gfs[IDX4(AD0GF, i0+2,i1+1,i2)];\n const double AD0_i0m2_i1p2_i2 = in_gfs[IDX4(AD0GF, i0-2,i1+2,i2)];\n const double AD0_i0m1_i1p2_i2 = in_gfs[IDX4(AD0GF, i0-1,i1+2,i2)];\n const double AD0_i0_i1p2_i2 = in_gfs[IDX4(AD0GF, i0,i1+2,i2)];\n const double AD0_i0p1_i1p2_i2 = in_gfs[IDX4(AD0GF, i0+1,i1+2,i2)];\n const double AD0_i0p2_i1p2_i2 = in_gfs[IDX4(AD0GF, i0+2,i1+2,i2)];\n const double AD0_i0m2_i1_i2p1 = in_gfs[IDX4(AD0GF, i0-2,i1,i2+1)];\n const double AD0_i0m1_i1_i2p1 = in_gfs[IDX4(AD0GF, i0-1,i1,i2+1)];\n const double AD0_i0_i1_i2p1 = in_gfs[IDX4(AD0GF, i0,i1,i2+1)];\n const double AD0_i0p1_i1_i2p1 = in_gfs[IDX4(AD0GF, i0+1,i1,i2+1)];\n const double AD0_i0p2_i1_i2p1 = in_gfs[IDX4(AD0GF, i0+2,i1,i2+1)];\n const double AD0_i0m2_i1_i2p2 = in_gfs[IDX4(AD0GF, i0-2,i1,i2+2)];\n const double AD0_i0m1_i1_i2p2 = in_gfs[IDX4(AD0GF, i0-1,i1,i2+2)];\n const double AD0_i0_i1_i2p2 = in_gfs[IDX4(AD0GF, i0,i1,i2+2)];\n const double AD0_i0p1_i1_i2p2 = in_gfs[IDX4(AD0GF, i0+1,i1,i2+2)];\n const double AD0_i0p2_i1_i2p2 = in_gfs[IDX4(AD0GF, i0+2,i1,i2+2)];\n const double AD1_i0_i1m2_i2m2 = in_gfs[IDX4(AD1GF, i0,i1-2,i2-2)];\n const double AD1_i0_i1m1_i2m2 = in_gfs[IDX4(AD1GF, i0,i1-1,i2-2)];\n const double AD1_i0_i1_i2m2 = in_gfs[IDX4(AD1GF, i0,i1,i2-2)];\n const double AD1_i0_i1p1_i2m2 = in_gfs[IDX4(AD1GF, i0,i1+1,i2-2)];\n const double AD1_i0_i1p2_i2m2 = in_gfs[IDX4(AD1GF, i0,i1+2,i2-2)];\n const double AD1_i0_i1m2_i2m1 = in_gfs[IDX4(AD1GF, i0,i1-2,i2-1)];\n const double AD1_i0_i1m1_i2m1 = in_gfs[IDX4(AD1GF, i0,i1-1,i2-1)];\n const double AD1_i0_i1_i2m1 = in_gfs[IDX4(AD1GF, i0,i1,i2-1)];\n const double AD1_i0_i1p1_i2m1 = in_gfs[IDX4(AD1GF, i0,i1+1,i2-1)];\n const double AD1_i0_i1p2_i2m1 = in_gfs[IDX4(AD1GF, i0,i1+2,i2-1)];\n const double AD1_i0m2_i1m2_i2 = in_gfs[IDX4(AD1GF, i0-2,i1-2,i2)];\n const double AD1_i0m1_i1m2_i2 = in_gfs[IDX4(AD1GF, i0-1,i1-2,i2)];\n const double AD1_i0_i1m2_i2 = in_gfs[IDX4(AD1GF, i0,i1-2,i2)];\n const double AD1_i0p1_i1m2_i2 = in_gfs[IDX4(AD1GF, i0+1,i1-2,i2)];\n const double AD1_i0p2_i1m2_i2 = in_gfs[IDX4(AD1GF, i0+2,i1-2,i2)];\n const double AD1_i0m2_i1m1_i2 = in_gfs[IDX4(AD1GF, i0-2,i1-1,i2)];\n const double AD1_i0m1_i1m1_i2 = in_gfs[IDX4(AD1GF, i0-1,i1-1,i2)];\n const double AD1_i0_i1m1_i2 = in_gfs[IDX4(AD1GF, i0,i1-1,i2)];\n const double AD1_i0p1_i1m1_i2 = in_gfs[IDX4(AD1GF, i0+1,i1-1,i2)];\n const double AD1_i0p2_i1m1_i2 = in_gfs[IDX4(AD1GF, i0+2,i1-1,i2)];\n const double AD1_i0m2_i1_i2 = in_gfs[IDX4(AD1GF, i0-2,i1,i2)];\n const double AD1_i0m1_i1_i2 = in_gfs[IDX4(AD1GF, i0-1,i1,i2)];\n const double AD1 = in_gfs[IDX4(AD1GF, i0,i1,i2)];\n const double AD1_i0p1_i1_i2 = in_gfs[IDX4(AD1GF, i0+1,i1,i2)];\n const double AD1_i0p2_i1_i2 = in_gfs[IDX4(AD1GF, i0+2,i1,i2)];\n const double AD1_i0m2_i1p1_i2 = in_gfs[IDX4(AD1GF, i0-2,i1+1,i2)];\n const double AD1_i0m1_i1p1_i2 = in_gfs[IDX4(AD1GF, i0-1,i1+1,i2)];\n const double AD1_i0_i1p1_i2 = in_gfs[IDX4(AD1GF, i0,i1+1,i2)];\n const double AD1_i0p1_i1p1_i2 = in_gfs[IDX4(AD1GF, i0+1,i1+1,i2)];\n const double AD1_i0p2_i1p1_i2 = in_gfs[IDX4(AD1GF, i0+2,i1+1,i2)];\n const double AD1_i0m2_i1p2_i2 = in_gfs[IDX4(AD1GF, i0-2,i1+2,i2)];\n const double AD1_i0m1_i1p2_i2 = in_gfs[IDX4(AD1GF, i0-1,i1+2,i2)];\n const double AD1_i0_i1p2_i2 = in_gfs[IDX4(AD1GF, i0,i1+2,i2)];\n const double AD1_i0p1_i1p2_i2 = in_gfs[IDX4(AD1GF, i0+1,i1+2,i2)];\n const double AD1_i0p2_i1p2_i2 = in_gfs[IDX4(AD1GF, i0+2,i1+2,i2)];\n const double AD1_i0_i1m2_i2p1 = in_gfs[IDX4(AD1GF, i0,i1-2,i2+1)];\n const double AD1_i0_i1m1_i2p1 = in_gfs[IDX4(AD1GF, i0,i1-1,i2+1)];\n const double AD1_i0_i1_i2p1 = in_gfs[IDX4(AD1GF, i0,i1,i2+1)];\n const double AD1_i0_i1p1_i2p1 = in_gfs[IDX4(AD1GF, i0,i1+1,i2+1)];\n const double AD1_i0_i1p2_i2p1 = in_gfs[IDX4(AD1GF, i0,i1+2,i2+1)];\n const double AD1_i0_i1m2_i2p2 = in_gfs[IDX4(AD1GF, i0,i1-2,i2+2)];\n const double AD1_i0_i1m1_i2p2 = in_gfs[IDX4(AD1GF, i0,i1-1,i2+2)];\n const double AD1_i0_i1_i2p2 = in_gfs[IDX4(AD1GF, i0,i1,i2+2)];\n const double AD1_i0_i1p1_i2p2 = in_gfs[IDX4(AD1GF, i0,i1+1,i2+2)];\n const double AD1_i0_i1p2_i2p2 = in_gfs[IDX4(AD1GF, i0,i1+2,i2+2)];\n const double AD2_i0_i1m2_i2m2 = in_gfs[IDX4(AD2GF, i0,i1-2,i2-2)];\n const double AD2_i0_i1m1_i2m2 = in_gfs[IDX4(AD2GF, i0,i1-1,i2-2)];\n const double AD2_i0m2_i1_i2m2 = in_gfs[IDX4(AD2GF, i0-2,i1,i2-2)];\n const double AD2_i0m1_i1_i2m2 = in_gfs[IDX4(AD2GF, i0-1,i1,i2-2)];\n const double AD2_i0_i1_i2m2 = in_gfs[IDX4(AD2GF, i0,i1,i2-2)];\n const double AD2_i0p1_i1_i2m2 = in_gfs[IDX4(AD2GF, i0+1,i1,i2-2)];\n const double AD2_i0p2_i1_i2m2 = in_gfs[IDX4(AD2GF, i0+2,i1,i2-2)];\n const double AD2_i0_i1p1_i2m2 = in_gfs[IDX4(AD2GF, i0,i1+1,i2-2)];\n const double AD2_i0_i1p2_i2m2 = in_gfs[IDX4(AD2GF, i0,i1+2,i2-2)];\n const double AD2_i0_i1m2_i2m1 = in_gfs[IDX4(AD2GF, i0,i1-2,i2-1)];\n const double AD2_i0_i1m1_i2m1 = in_gfs[IDX4(AD2GF, i0,i1-1,i2-1)];\n const double AD2_i0m2_i1_i2m1 = in_gfs[IDX4(AD2GF, i0-2,i1,i2-1)];\n const double AD2_i0m1_i1_i2m1 = in_gfs[IDX4(AD2GF, i0-1,i1,i2-1)];\n const double AD2_i0_i1_i2m1 = in_gfs[IDX4(AD2GF, i0,i1,i2-1)];\n const double AD2_i0p1_i1_i2m1 = in_gfs[IDX4(AD2GF, i0+1,i1,i2-1)];\n const double AD2_i0p2_i1_i2m1 = in_gfs[IDX4(AD2GF, i0+2,i1,i2-1)];\n const double AD2_i0_i1p1_i2m1 = in_gfs[IDX4(AD2GF, i0,i1+1,i2-1)];\n const double AD2_i0_i1p2_i2m1 = in_gfs[IDX4(AD2GF, i0,i1+2,i2-1)];\n const double AD2_i0_i1m2_i2 = in_gfs[IDX4(AD2GF, i0,i1-2,i2)];\n const double AD2_i0_i1m1_i2 = in_gfs[IDX4(AD2GF, i0,i1-1,i2)];\n const double AD2_i0m2_i1_i2 = in_gfs[IDX4(AD2GF, i0-2,i1,i2)];\n const double AD2_i0m1_i1_i2 = in_gfs[IDX4(AD2GF, i0-1,i1,i2)];\n const double AD2 = in_gfs[IDX4(AD2GF, i0,i1,i2)];\n const double AD2_i0p1_i1_i2 = in_gfs[IDX4(AD2GF, i0+1,i1,i2)];\n const double AD2_i0p2_i1_i2 = in_gfs[IDX4(AD2GF, i0+2,i1,i2)];\n const double AD2_i0_i1p1_i2 = in_gfs[IDX4(AD2GF, i0,i1+1,i2)];\n const double AD2_i0_i1p2_i2 = in_gfs[IDX4(AD2GF, i0,i1+2,i2)];\n const double AD2_i0_i1m2_i2p1 = in_gfs[IDX4(AD2GF, i0,i1-2,i2+1)];\n const double AD2_i0_i1m1_i2p1 = in_gfs[IDX4(AD2GF, i0,i1-1,i2+1)];\n const double AD2_i0m2_i1_i2p1 = in_gfs[IDX4(AD2GF, i0-2,i1,i2+1)];\n const double AD2_i0m1_i1_i2p1 = in_gfs[IDX4(AD2GF, i0-1,i1,i2+1)];\n const double AD2_i0_i1_i2p1 = in_gfs[IDX4(AD2GF, i0,i1,i2+1)];\n const double AD2_i0p1_i1_i2p1 = in_gfs[IDX4(AD2GF, i0+1,i1,i2+1)];\n const double AD2_i0p2_i1_i2p1 = in_gfs[IDX4(AD2GF, i0+2,i1,i2+1)];\n const double AD2_i0_i1p1_i2p1 = in_gfs[IDX4(AD2GF, i0,i1+1,i2+1)];\n const double AD2_i0_i1p2_i2p1 = in_gfs[IDX4(AD2GF, i0,i1+2,i2+1)];\n const double AD2_i0_i1m2_i2p2 = in_gfs[IDX4(AD2GF, i0,i1-2,i2+2)];\n const double AD2_i0_i1m1_i2p2 = in_gfs[IDX4(AD2GF, i0,i1-1,i2+2)];\n const double AD2_i0m2_i1_i2p2 = in_gfs[IDX4(AD2GF, i0-2,i1,i2+2)];\n const double AD2_i0m1_i1_i2p2 = in_gfs[IDX4(AD2GF, i0-1,i1,i2+2)];\n const double AD2_i0_i1_i2p2 = in_gfs[IDX4(AD2GF, i0,i1,i2+2)];\n const double AD2_i0p1_i1_i2p2 = in_gfs[IDX4(AD2GF, i0+1,i1,i2+2)];\n const double AD2_i0p2_i1_i2p2 = in_gfs[IDX4(AD2GF, i0+2,i1,i2+2)];\n const double AD2_i0_i1p1_i2p2 = in_gfs[IDX4(AD2GF, i0,i1+1,i2+2)];\n const double AD2_i0_i1p2_i2p2 = in_gfs[IDX4(AD2GF, i0,i1+2,i2+2)];\n const double FDPart1_Rational_2_3 = 2.0/3.0;\n const double FDPart1_Rational_1_12 = 1.0/12.0;\n const double FDPart1_Rational_4_9 = 4.0/9.0;\n const double FDPart1_Rational_1_18 = 1.0/18.0;\n const double FDPart1_Rational_1_144 = 1.0/144.0;\n const double FDPart1_Rational_5_2 = 5.0/2.0;\n const double FDPart1_Rational_4_3 = 4.0/3.0;\n const double FDPart1_1 = -AD0_i0_i1_i2p2;\n const double FDPart1_9 = -AD0*FDPart1_Rational_5_2;\n const double FDPart1_12 = -AD1*FDPart1_Rational_5_2;\n const double FDPart1_14 = -AD2*FDPart1_Rational_5_2;\n const double AD_dD00 = invdx0*(FDPart1_Rational_1_12*(AD0_i0m2_i1_i2 - AD0_i0p2_i1_i2) + FDPart1_Rational_2_3*(-AD0_i0m1_i1_i2 + AD0_i0p1_i1_i2));\n const double AD_dD01 = invdx1*(FDPart1_Rational_1_12*(AD0_i0_i1m2_i2 - AD0_i0_i1p2_i2) + FDPart1_Rational_2_3*(-AD0_i0_i1m1_i2 + AD0_i0_i1p1_i2));\n const double AD_dD02 = invdx2*(FDPart1_Rational_1_12*(AD0_i0_i1_i2m2 + FDPart1_1) + FDPart1_Rational_2_3*(-AD0_i0_i1_i2m1 + AD0_i0_i1_i2p1));\n const double AD_dD10 = invdx0*(FDPart1_Rational_1_12*(AD1_i0m2_i1_i2 - AD1_i0p2_i1_i2) + FDPart1_Rational_2_3*(-AD1_i0m1_i1_i2 + AD1_i0p1_i1_i2));\n const double AD_dD11 = invdx1*(FDPart1_Rational_1_12*(AD1_i0_i1m2_i2 - AD1_i0_i1p2_i2) + FDPart1_Rational_2_3*(-AD1_i0_i1m1_i2 + AD1_i0_i1p1_i2));\n const double AD_dD12 = invdx2*(FDPart1_Rational_1_12*(AD1_i0_i1_i2m2 - AD1_i0_i1_i2p2) + FDPart1_Rational_2_3*(-AD1_i0_i1_i2m1 + AD1_i0_i1_i2p1));\n const double AD_dD20 = invdx0*(FDPart1_Rational_1_12*(AD2_i0m2_i1_i2 - AD2_i0p2_i1_i2) + FDPart1_Rational_2_3*(-AD2_i0m1_i1_i2 + AD2_i0p1_i1_i2));\n const double AD_dD21 = invdx1*(FDPart1_Rational_1_12*(AD2_i0_i1m2_i2 - AD2_i0_i1p2_i2) + FDPart1_Rational_2_3*(-AD2_i0_i1m1_i2 + AD2_i0_i1p1_i2));\n const double AD_dD22 = invdx2*(FDPart1_Rational_1_12*(AD2_i0_i1_i2m2 - AD2_i0_i1_i2p2) + FDPart1_Rational_2_3*(-AD2_i0_i1_i2m1 + AD2_i0_i1_i2p1));\n const double AD_dDD001 = invdx0*invdx1*(FDPart1_Rational_1_144*(AD0_i0m2_i1m2_i2 - AD0_i0m2_i1p2_i2 - AD0_i0p2_i1m2_i2 + AD0_i0p2_i1p2_i2) + FDPart1_Rational_1_18*(-AD0_i0m1_i1m2_i2 + AD0_i0m1_i1p2_i2 - AD0_i0m2_i1m1_i2 + AD0_i0m2_i1p1_i2 + AD0_i0p1_i1m2_i2 - AD0_i0p1_i1p2_i2 + AD0_i0p2_i1m1_i2 - AD0_i0p2_i1p1_i2) + FDPart1_Rational_4_9*(AD0_i0m1_i1m1_i2 - AD0_i0m1_i1p1_i2 - AD0_i0p1_i1m1_i2 + AD0_i0p1_i1p1_i2));\n const double AD_dDD002 = invdx0*invdx2*(FDPart1_Rational_1_144*(AD0_i0m2_i1_i2m2 - AD0_i0m2_i1_i2p2 - AD0_i0p2_i1_i2m2 + AD0_i0p2_i1_i2p2) + FDPart1_Rational_1_18*(-AD0_i0m1_i1_i2m2 + AD0_i0m1_i1_i2p2 - AD0_i0m2_i1_i2m1 + AD0_i0m2_i1_i2p1 + AD0_i0p1_i1_i2m2 - AD0_i0p1_i1_i2p2 + AD0_i0p2_i1_i2m1 - AD0_i0p2_i1_i2p1) + FDPart1_Rational_4_9*(AD0_i0m1_i1_i2m1 - AD0_i0m1_i1_i2p1 - AD0_i0p1_i1_i2m1 + AD0_i0p1_i1_i2p1));\n const double AD_dDD011 = ((invdx1)*(invdx1))*(FDPart1_9 + FDPart1_Rational_1_12*(-AD0_i0_i1m2_i2 - AD0_i0_i1p2_i2) + FDPart1_Rational_4_3*(AD0_i0_i1m1_i2 + AD0_i0_i1p1_i2));\n const double AD_dDD022 = ((invdx2)*(invdx2))*(FDPart1_9 + FDPart1_Rational_1_12*(-AD0_i0_i1_i2m2 + FDPart1_1) + FDPart1_Rational_4_3*(AD0_i0_i1_i2m1 + AD0_i0_i1_i2p1));\n const double AD_dDD100 = ((invdx0)*(invdx0))*(FDPart1_12 + FDPart1_Rational_1_12*(-AD1_i0m2_i1_i2 - AD1_i0p2_i1_i2) + FDPart1_Rational_4_3*(AD1_i0m1_i1_i2 + AD1_i0p1_i1_i2));\n const double AD_dDD101 = invdx0*invdx1*(FDPart1_Rational_1_144*(AD1_i0m2_i1m2_i2 - AD1_i0m2_i1p2_i2 - AD1_i0p2_i1m2_i2 + AD1_i0p2_i1p2_i2) + FDPart1_Rational_1_18*(-AD1_i0m1_i1m2_i2 + AD1_i0m1_i1p2_i2 - AD1_i0m2_i1m1_i2 + AD1_i0m2_i1p1_i2 + AD1_i0p1_i1m2_i2 - AD1_i0p1_i1p2_i2 + AD1_i0p2_i1m1_i2 - AD1_i0p2_i1p1_i2) + FDPart1_Rational_4_9*(AD1_i0m1_i1m1_i2 - AD1_i0m1_i1p1_i2 - AD1_i0p1_i1m1_i2 + AD1_i0p1_i1p1_i2));\n const double AD_dDD112 = invdx1*invdx2*(FDPart1_Rational_1_144*(AD1_i0_i1m2_i2m2 - AD1_i0_i1m2_i2p2 - AD1_i0_i1p2_i2m2 + AD1_i0_i1p2_i2p2) + FDPart1_Rational_1_18*(-AD1_i0_i1m1_i2m2 + AD1_i0_i1m1_i2p2 - AD1_i0_i1m2_i2m1 + AD1_i0_i1m2_i2p1 + AD1_i0_i1p1_i2m2 - AD1_i0_i1p1_i2p2 + AD1_i0_i1p2_i2m1 - AD1_i0_i1p2_i2p1) + FDPart1_Rational_4_9*(AD1_i0_i1m1_i2m1 - AD1_i0_i1m1_i2p1 - AD1_i0_i1p1_i2m1 + AD1_i0_i1p1_i2p1));\n const double AD_dDD122 = ((invdx2)*(invdx2))*(FDPart1_12 + FDPart1_Rational_1_12*(-AD1_i0_i1_i2m2 - AD1_i0_i1_i2p2) + FDPart1_Rational_4_3*(AD1_i0_i1_i2m1 + AD1_i0_i1_i2p1));\n const double AD_dDD200 = ((invdx0)*(invdx0))*(FDPart1_14 + FDPart1_Rational_1_12*(-AD2_i0m2_i1_i2 - AD2_i0p2_i1_i2) + FDPart1_Rational_4_3*(AD2_i0m1_i1_i2 + AD2_i0p1_i1_i2));\n const double AD_dDD202 = invdx0*invdx2*(FDPart1_Rational_1_144*(AD2_i0m2_i1_i2m2 - AD2_i0m2_i1_i2p2 - AD2_i0p2_i1_i2m2 + AD2_i0p2_i1_i2p2) + FDPart1_Rational_1_18*(-AD2_i0m1_i1_i2m2 + AD2_i0m1_i1_i2p2 - AD2_i0m2_i1_i2m1 + AD2_i0m2_i1_i2p1 + AD2_i0p1_i1_i2m2 - AD2_i0p1_i1_i2p2 + AD2_i0p2_i1_i2m1 - AD2_i0p2_i1_i2p1) + FDPart1_Rational_4_9*(AD2_i0m1_i1_i2m1 - AD2_i0m1_i1_i2p1 - AD2_i0p1_i1_i2m1 + AD2_i0p1_i1_i2p1));\n const double AD_dDD211 = ((invdx1)*(invdx1))*(FDPart1_14 + FDPart1_Rational_1_12*(-AD2_i0_i1m2_i2 - AD2_i0_i1p2_i2) + FDPart1_Rational_4_3*(AD2_i0_i1m1_i2 + AD2_i0_i1p1_i2));\n const double AD_dDD212 = invdx1*invdx2*(FDPart1_Rational_1_144*(AD2_i0_i1m2_i2m2 - AD2_i0_i1m2_i2p2 - AD2_i0_i1p2_i2m2 + AD2_i0_i1p2_i2p2) + FDPart1_Rational_1_18*(-AD2_i0_i1m1_i2m2 + AD2_i0_i1m1_i2p2 - AD2_i0_i1m2_i2m1 + AD2_i0_i1m2_i2p1 + AD2_i0_i1p1_i2m2 - AD2_i0_i1p1_i2p2 + AD2_i0_i1p2_i2m1 - AD2_i0_i1p2_i2p1) + FDPart1_Rational_4_9*(AD2_i0_i1m1_i2m1 - AD2_i0_i1m1_i2p1 - AD2_i0_i1p1_i2m1 + AD2_i0_i1p1_i2p1));\n const double psi_dD0 = invdx0*(FDPart1_Rational_1_12*(psi_i0m2_i1_i2 - psi_i0p2_i1_i2) + FDPart1_Rational_2_3*(-psi_i0m1_i1_i2 + psi_i0p1_i1_i2));\n const double psi_dD1 = invdx1*(FDPart1_Rational_1_12*(psi_i0_i1m2_i2 - psi_i0_i1p2_i2) + FDPart1_Rational_2_3*(-psi_i0_i1m1_i2 + psi_i0_i1p1_i2));\n const double psi_dD2 = invdx2*(FDPart1_Rational_1_12*(psi_i0_i1_i2m2 - psi_i0_i1_i2p2) + FDPart1_Rational_2_3*(-psi_i0_i1_i2m1 + psi_i0_i1_i2p1));\n /*\n * NRPy+ Finite Difference Code Generation, Step 2 of 2: Evaluate SymPy expressions and write to main memory:\n */\n /*\n * Original SymPy expressions:\n * \"[rhs_gfs[IDX4(AD0GF, i0, i1, i2)] = -ED0 - psi_dD0,\n * rhs_gfs[IDX4(ED0GF, i0, i1, i2)] = (AD0 + AD_dD00*xx0 + AD_dDD101 - 2*(AD0*xx0 + AD_dD11)/xx0)/xx0**2 - (AD_dD00*xx0 - AD_dD11/xx0 + AD_dDD011 - (AD0*xx0 + AD_dD11)/xx0)/xx0**2 + (AD0*sin(xx1)**2 + AD_dD00*xx0*sin(xx1)**2 + AD_dD10*sin(2*xx1)/2 + AD_dDD202 - 2*(AD0*xx0*sin(xx1)**2 + AD1*sin(2*xx1)/2 + AD_dD22)/xx0)/(xx0**2*sin(xx1)**2) - (AD_dD00*xx0*sin(xx1)**2 - AD_dD22/xx0 + AD_dDD022 + (-AD1/xx0 + AD_dD01)*sin(2*xx1)/2 - (AD0*xx0*sin(xx1)**2 + AD1*sin(2*xx1)/2 + AD_dD22)/xx0)/(xx0**2*sin(xx1)**2),\n * rhs_gfs[IDX4(AD1GF, i0, i1, i2)] = -ED1 - psi_dD1,\n * rhs_gfs[IDX4(ED1GF, i0, i1, i2)] = -AD1/xx0**2 + AD_dD10/xx0 + AD_dDD001 - AD_dDD100 - (-AD1/xx0 + AD_dD01)/xx0 - (-AD_dD22*sin(2*xx1)/(2*sin(xx1)**2) + AD_dDD122 + xx0*(-AD1/xx0 + AD_dD10)*sin(xx1)**2 + (AD0*xx0 + AD_dD11)*sin(2*xx1)/2 - (AD0*xx0*sin(xx1)**2 + AD1*sin(2*xx1)/2 + AD_dD22)*sin(2*xx1)/(2*sin(xx1)**2))/(xx0**2*sin(xx1)**2) + (2*AD0*xx0*sin(xx1)*cos(xx1) + AD1*cos(2*xx1) + AD_dD01*xx0*sin(xx1)**2 + AD_dD11*sin(2*xx1)/2 + AD_dDD212 - (AD0*xx0*sin(xx1)**2 + AD1*sin(2*xx1)/2 + AD_dD22)*sin(2*xx1)/sin(xx1)**2)/(xx0**2*sin(xx1)**2),\n * rhs_gfs[IDX4(AD2GF, i0, i1, i2)] = -ED2 - psi_dD2,\n * rhs_gfs[IDX4(ED2GF, i0, i1, i2)] = -AD2/xx0**2 + AD_dD20/xx0 + AD_dDD002 - AD_dDD200 - (-AD2/xx0 + AD_dD02)/xx0 + (AD_dD02*xx0 + AD_dDD112 - (-AD2*sin(2*xx1)/(2*sin(xx1)**2) + AD_dD12)*sin(2*xx1)/(2*sin(xx1)**2) - (-AD2*sin(2*xx1)/(2*sin(xx1)**2) + AD_dD21)*sin(2*xx1)/(2*sin(xx1)**2))/xx0**2 - (AD2*(-cos(2*xx1)/sin(xx1)**2 + sin(2*xx1)*cos(xx1)/sin(xx1)**3) - AD_dD21*sin(2*xx1)/(2*sin(xx1)**2) + AD_dDD211 + xx0*(-AD2/xx0 + AD_dD20) - (-AD2*sin(2*xx1)/(2*sin(xx1)**2) + AD_dD21)*sin(2*xx1)/(2*sin(xx1)**2))/xx0**2,\n * rhs_gfs[IDX4(PSIGF, i0, i1, i2)] = -AD_dD00 - (AD0*xx0 + AD_dD11)/xx0**2 - (AD0*xx0*sin(xx1)**2 + AD1*sin(2*xx1)/2 + AD_dD22)/(xx0**2*sin(xx1)**2)]\"\n */\n const double FDPart3_0 = (1.0/((xx0)*(xx0)));\n const double FDPart3_1 = AD_dD00*xx0;\n const double FDPart3_2 = (1.0/(xx0));\n const double FDPart3_3 = AD0*xx0;\n const double FDPart3_4 = AD_dD11 + FDPart3_3;\n const double FDPart3_6 = sin(xx1);\n const double FDPart3_7 = ((FDPart3_6)*(FDPart3_6));\n const double FDPart3_9 = sin(2*xx1);\n const double FDPart3_10 = (1.0/2.0)*FDPart3_9;\n const double FDPart3_12 = AD1*FDPart3_10 + AD_dD22 + FDPart3_3*FDPart3_7;\n const double FDPart3_14 = (1.0/(FDPart3_7));\n const double FDPart3_15 = FDPart3_0*FDPart3_14;\n const double FDPart3_16 = -AD1*FDPart3_2;\n const double FDPart3_18 = cos(2*xx1);\n const double FDPart3_20 = cos(xx1);\n const double FDPart3_22 = FDPart3_10*FDPart3_14;\n const double FDPart3_23 = -AD2*FDPart3_2;\n const double FDPart3_24 = -AD2*FDPart3_22;\n const double FDPart3_25 = -FDPart3_22*(AD_dD21 + FDPart3_24);\n rhs_gfs[IDX4(AD0GF, i0, i1, i2)] = -ED0 - psi_dD0;\n rhs_gfs[IDX4(ED0GF, i0, i1, i2)] = FDPart3_0*(AD0 + AD_dDD101 + FDPart3_1 - 2*FDPart3_2*FDPart3_4) - FDPart3_0*(-AD_dD11*FDPart3_2 + AD_dDD011 + FDPart3_1 - FDPart3_2*FDPart3_4) + FDPart3_15*(AD0*FDPart3_7 + AD_dD10*FDPart3_10 + AD_dDD202 + FDPart3_1*FDPart3_7 - 2*FDPart3_12*FDPart3_2) - FDPart3_15*(-AD_dD22*FDPart3_2 + AD_dDD022 + FDPart3_1*FDPart3_7 + FDPart3_10*(AD_dD01 + FDPart3_16) - FDPart3_12*FDPart3_2);\n rhs_gfs[IDX4(AD1GF, i0, i1, i2)] = -ED1 - psi_dD1;\n rhs_gfs[IDX4(ED1GF, i0, i1, i2)] = -AD1*FDPart3_0 + AD_dD10*FDPart3_2 + AD_dDD001 - AD_dDD100 - FDPart3_15*(-AD_dD22*FDPart3_22 + AD_dDD122 - FDPart3_10*FDPart3_12*FDPart3_14 + FDPart3_10*FDPart3_4 + FDPart3_7*xx0*(AD_dD10 + FDPart3_16)) + FDPart3_15*(AD1*FDPart3_18 + AD_dD01*FDPart3_7*xx0 + AD_dD11*FDPart3_10 + AD_dDD212 - FDPart3_12*FDPart3_14*FDPart3_9 + 2*FDPart3_20*FDPart3_3*FDPart3_6) - FDPart3_2*(AD_dD01 + FDPart3_16);\n rhs_gfs[IDX4(AD2GF, i0, i1, i2)] = -ED2 - psi_dD2;\n rhs_gfs[IDX4(ED2GF, i0, i1, i2)] = -AD2*FDPart3_0 + AD_dD20*FDPart3_2 + AD_dDD002 - AD_dDD200 + FDPart3_0*(AD_dD02*xx0 + AD_dDD112 - FDPart3_22*(AD_dD12 + FDPart3_24) + FDPart3_25) - FDPart3_0*(AD2*(-FDPart3_14*FDPart3_18 + FDPart3_20*FDPart3_9/((FDPart3_6)*(FDPart3_6)*(FDPart3_6))) - AD_dD21*FDPart3_22 + AD_dDD211 + FDPart3_25 + xx0*(AD_dD20 + FDPart3_23)) - FDPart3_2*(AD_dD02 + FDPart3_23);\n rhs_gfs[IDX4(PSIGF, i0, i1, i2)] = -AD_dD00 - FDPart3_0*FDPart3_4 - FDPart3_12*FDPart3_15;\n}\n\n" ] ], [ [ "<a id='sys2'></a>\n\n# Step 2: System II \\[Back to [top](#toc)\\]\n$$\\label{sys2}$$\n\nDefine the auxiliary variable\n$$\\Gamma \\equiv \\hat{D}_{i} A^{i} \\; .$$\nSubstituting this into Maxwell's equations yields the system\n\\begin{align}\n\\partial_{t} A_{i} &= -E_{i} - \\hat{D}_{i} \\psi \\; , \\\\\n\\partial_{t} E_{i} &= -\\hat{D}_{j} \\hat{D}^{j} A_{i} + \\hat{D}_{i} \\Gamma \\; , \\\\\n\\partial_{t} \\psi &= -\\Gamma \\; , \\\\\n\\partial_{t} \\Gamma &= -\\hat{D}_{i} \\hat{D}^{i} \\psi \\; .\n\\end{align}\n\n\n\nIt can be shown that the Gauss constraint now satisfies the wave equation\n$$\\partial_{t}^{2} \\mathcal{C} = \\hat{D}_{i} \\hat{D}^{i} \\mathcal{C} \\; .$$\nThus, any constraint violation introduced by numerical error propagates away at the speed of light. This property increases the stability of of the simulation, compared to System I above. A similar trick is used in the [BSSN formulation](Tutorial-BSSNCurvilinear.ipynb) of Einstein's equations.", "_____no_output_____" ] ], [ [ "# We inherit here all of the definitions from System I, above\n\n# Step 7a: Register the scalar auxiliary variable \\Gamma\nGamma = gri.register_gridfunctions(\"EVOL\", [\"Gamma\"])\n\n# Step 7b: Declare the ordinary gradient \\partial_{i} \\Gamma\nGamma_dD = ixp.declarerank1(\"Gamma_dD\")\n\n# Step 8a: Construct the second covariant derivative of the scalar \\psi\n# \\psi_{;\\hat{i}\\hat{j}} = \\psi_{,i;\\hat{j}}\n# = \\psi_{,ij} - \\Gamma^{k}_{ij} \\psi_{,k}\npsi_dDD = ixp.declarerank2(\"psi_dDD\", \"sym01\")\npsi_dHatDD = ixp.zerorank2()\nfor i in range(DIM):\n for j in range(DIM):\n psi_dHatDD[i][j] = psi_dDD[i][j]\n for k in range(DIM):\n psi_dHatDD[i][j] += - rfm.GammahatUDD[k][i][j] * psi_dD[k]\n\n# Step 8b: Construct the covariant Laplacian of \\psi\n# Lappsi = ghat^{ij} D_{j} D_{i} \\psi\nLappsi = 0\nfor i in range(DIM):\n for j in range(DIM):\n Lappsi += rfm.ghatUU[i][j] * psi_dHatDD[i][j]\n\n# Step 9: Define right-hand sides for the evolution.\nAD_rhs = ixp.zerorank1()\nED_rhs = ixp.zerorank1()\nfor i in range(DIM):\n AD_rhs[i] = -ED[i] - psi_dD[i]\n ED_rhs[i] = -LapAD[i] + Gamma_dD[i]\npsi_rhs = -Gamma\nGamma_rhs = -Lappsi\n\n# Step 10: Generate C code for System II Maxwell's evolution equations,\n# print output to the screen (standard out, or stdout).\nlhrh_list = []\nfor i in range(DIM):\n lhrh_list.append(lhrh(lhs=gri.gfaccess(\"rhs_gfs\", \"AD\" + str(i)), rhs=AD_rhs[i]))\n lhrh_list.append(lhrh(lhs=gri.gfaccess(\"rhs_gfs\", \"ED\" + str(i)), rhs=ED_rhs[i]))\nlhrh_list.append(lhrh(lhs=gri.gfaccess(\"rhs_gfs\", \"psi\"), rhs=psi_rhs))\nlhrh_list.append(lhrh(lhs=gri.gfaccess(\"rhs_gfs\", \"Gamma\"), rhs=Gamma_rhs))\n\nfin.FD_outputC(\"stdout\", lhrh_list)", "{\n /*\n * NRPy+ Finite Difference Code Generation, Step 1 of 2: Read from main memory and compute finite difference stencils:\n */\n /*\n * Original SymPy expressions:\n * \"[const double AD_dD00 = invdx0*(-2*AD0_i0m1_i1_i2/3 + AD0_i0m2_i1_i2/12 + 2*AD0_i0p1_i1_i2/3 - AD0_i0p2_i1_i2/12),\n * const double AD_dD01 = invdx1*(-2*AD0_i0_i1m1_i2/3 + AD0_i0_i1m2_i2/12 + 2*AD0_i0_i1p1_i2/3 - AD0_i0_i1p2_i2/12),\n * const double AD_dD02 = invdx2*(-2*AD0_i0_i1_i2m1/3 + AD0_i0_i1_i2m2/12 + 2*AD0_i0_i1_i2p1/3 - AD0_i0_i1_i2p2/12),\n * const double AD_dD10 = invdx0*(-2*AD1_i0m1_i1_i2/3 + AD1_i0m2_i1_i2/12 + 2*AD1_i0p1_i1_i2/3 - AD1_i0p2_i1_i2/12),\n * const double AD_dD11 = invdx1*(-2*AD1_i0_i1m1_i2/3 + AD1_i0_i1m2_i2/12 + 2*AD1_i0_i1p1_i2/3 - AD1_i0_i1p2_i2/12),\n * const double AD_dD12 = invdx2*(-2*AD1_i0_i1_i2m1/3 + AD1_i0_i1_i2m2/12 + 2*AD1_i0_i1_i2p1/3 - AD1_i0_i1_i2p2/12),\n * const double AD_dD20 = invdx0*(-2*AD2_i0m1_i1_i2/3 + AD2_i0m2_i1_i2/12 + 2*AD2_i0p1_i1_i2/3 - AD2_i0p2_i1_i2/12),\n * const double AD_dD21 = invdx1*(-2*AD2_i0_i1m1_i2/3 + AD2_i0_i1m2_i2/12 + 2*AD2_i0_i1p1_i2/3 - AD2_i0_i1p2_i2/12),\n * const double AD_dD22 = invdx2*(-2*AD2_i0_i1_i2m1/3 + AD2_i0_i1_i2m2/12 + 2*AD2_i0_i1_i2p1/3 - AD2_i0_i1_i2p2/12),\n * const double AD_dDD000 = invdx0**2*(-5*AD0/2 + 4*AD0_i0m1_i1_i2/3 - AD0_i0m2_i1_i2/12 + 4*AD0_i0p1_i1_i2/3 - AD0_i0p2_i1_i2/12),\n * const double AD_dDD011 = invdx1**2*(-5*AD0/2 + 4*AD0_i0_i1m1_i2/3 - AD0_i0_i1m2_i2/12 + 4*AD0_i0_i1p1_i2/3 - AD0_i0_i1p2_i2/12),\n * const double AD_dDD022 = invdx2**2*(-5*AD0/2 + 4*AD0_i0_i1_i2m1/3 - AD0_i0_i1_i2m2/12 + 4*AD0_i0_i1_i2p1/3 - AD0_i0_i1_i2p2/12),\n * const double AD_dDD100 = invdx0**2*(-5*AD1/2 + 4*AD1_i0m1_i1_i2/3 - AD1_i0m2_i1_i2/12 + 4*AD1_i0p1_i1_i2/3 - AD1_i0p2_i1_i2/12),\n * const double AD_dDD111 = invdx1**2*(-5*AD1/2 + 4*AD1_i0_i1m1_i2/3 - AD1_i0_i1m2_i2/12 + 4*AD1_i0_i1p1_i2/3 - AD1_i0_i1p2_i2/12),\n * const double AD_dDD122 = invdx2**2*(-5*AD1/2 + 4*AD1_i0_i1_i2m1/3 - AD1_i0_i1_i2m2/12 + 4*AD1_i0_i1_i2p1/3 - AD1_i0_i1_i2p2/12),\n * const double AD_dDD200 = invdx0**2*(-5*AD2/2 + 4*AD2_i0m1_i1_i2/3 - AD2_i0m2_i1_i2/12 + 4*AD2_i0p1_i1_i2/3 - AD2_i0p2_i1_i2/12),\n * const double AD_dDD211 = invdx1**2*(-5*AD2/2 + 4*AD2_i0_i1m1_i2/3 - AD2_i0_i1m2_i2/12 + 4*AD2_i0_i1p1_i2/3 - AD2_i0_i1p2_i2/12),\n * const double AD_dDD222 = invdx2**2*(-5*AD2/2 + 4*AD2_i0_i1_i2m1/3 - AD2_i0_i1_i2m2/12 + 4*AD2_i0_i1_i2p1/3 - AD2_i0_i1_i2p2/12),\n * const double Gamma_dD0 = invdx0*(-2*Gamma_i0m1_i1_i2/3 + Gamma_i0m2_i1_i2/12 + 2*Gamma_i0p1_i1_i2/3 - Gamma_i0p2_i1_i2/12),\n * const double Gamma_dD1 = invdx1*(-2*Gamma_i0_i1m1_i2/3 + Gamma_i0_i1m2_i2/12 + 2*Gamma_i0_i1p1_i2/3 - Gamma_i0_i1p2_i2/12),\n * const double Gamma_dD2 = invdx2*(-2*Gamma_i0_i1_i2m1/3 + Gamma_i0_i1_i2m2/12 + 2*Gamma_i0_i1_i2p1/3 - Gamma_i0_i1_i2p2/12),\n * const double psi_dD0 = invdx0*(-2*psi_i0m1_i1_i2/3 + psi_i0m2_i1_i2/12 + 2*psi_i0p1_i1_i2/3 - psi_i0p2_i1_i2/12),\n * const double psi_dD1 = invdx1*(-2*psi_i0_i1m1_i2/3 + psi_i0_i1m2_i2/12 + 2*psi_i0_i1p1_i2/3 - psi_i0_i1p2_i2/12),\n * const double psi_dD2 = invdx2*(-2*psi_i0_i1_i2m1/3 + psi_i0_i1_i2m2/12 + 2*psi_i0_i1_i2p1/3 - psi_i0_i1_i2p2/12),\n * const double psi_dDD00 = invdx0**2*(-5*psi/2 + 4*psi_i0m1_i1_i2/3 - psi_i0m2_i1_i2/12 + 4*psi_i0p1_i1_i2/3 - psi_i0p2_i1_i2/12),\n * const double psi_dDD11 = invdx1**2*(-5*psi/2 + 4*psi_i0_i1m1_i2/3 - psi_i0_i1m2_i2/12 + 4*psi_i0_i1p1_i2/3 - psi_i0_i1p2_i2/12),\n * const double psi_dDD22 = invdx2**2*(-5*psi/2 + 4*psi_i0_i1_i2m1/3 - psi_i0_i1_i2m2/12 + 4*psi_i0_i1_i2p1/3 - psi_i0_i1_i2p2/12)]\"\n */\n const double psi_i0_i1_i2m2 = in_gfs[IDX4(PSIGF, i0,i1,i2-2)];\n const double psi_i0_i1_i2m1 = in_gfs[IDX4(PSIGF, i0,i1,i2-1)];\n const double psi_i0_i1m2_i2 = in_gfs[IDX4(PSIGF, i0,i1-2,i2)];\n const double psi_i0_i1m1_i2 = in_gfs[IDX4(PSIGF, i0,i1-1,i2)];\n const double psi_i0m2_i1_i2 = in_gfs[IDX4(PSIGF, i0-2,i1,i2)];\n const double psi_i0m1_i1_i2 = in_gfs[IDX4(PSIGF, i0-1,i1,i2)];\n const double psi = in_gfs[IDX4(PSIGF, i0,i1,i2)];\n const double psi_i0p1_i1_i2 = in_gfs[IDX4(PSIGF, i0+1,i1,i2)];\n const double psi_i0p2_i1_i2 = in_gfs[IDX4(PSIGF, i0+2,i1,i2)];\n const double psi_i0_i1p1_i2 = in_gfs[IDX4(PSIGF, i0,i1+1,i2)];\n const double psi_i0_i1p2_i2 = in_gfs[IDX4(PSIGF, i0,i1+2,i2)];\n const double psi_i0_i1_i2p1 = in_gfs[IDX4(PSIGF, i0,i1,i2+1)];\n const double psi_i0_i1_i2p2 = in_gfs[IDX4(PSIGF, i0,i1,i2+2)];\n const double ED0 = in_gfs[IDX4(ED0GF, i0,i1,i2)];\n const double ED1 = in_gfs[IDX4(ED1GF, i0,i1,i2)];\n const double ED2 = in_gfs[IDX4(ED2GF, i0,i1,i2)];\n const double AD0_i0_i1_i2m2 = in_gfs[IDX4(AD0GF, i0,i1,i2-2)];\n const double AD0_i0_i1_i2m1 = in_gfs[IDX4(AD0GF, i0,i1,i2-1)];\n const double AD0_i0_i1m2_i2 = in_gfs[IDX4(AD0GF, i0,i1-2,i2)];\n const double AD0_i0_i1m1_i2 = in_gfs[IDX4(AD0GF, i0,i1-1,i2)];\n const double AD0_i0m2_i1_i2 = in_gfs[IDX4(AD0GF, i0-2,i1,i2)];\n const double AD0_i0m1_i1_i2 = in_gfs[IDX4(AD0GF, i0-1,i1,i2)];\n const double AD0 = in_gfs[IDX4(AD0GF, i0,i1,i2)];\n const double AD0_i0p1_i1_i2 = in_gfs[IDX4(AD0GF, i0+1,i1,i2)];\n const double AD0_i0p2_i1_i2 = in_gfs[IDX4(AD0GF, i0+2,i1,i2)];\n const double AD0_i0_i1p1_i2 = in_gfs[IDX4(AD0GF, i0,i1+1,i2)];\n const double AD0_i0_i1p2_i2 = in_gfs[IDX4(AD0GF, i0,i1+2,i2)];\n const double AD0_i0_i1_i2p1 = in_gfs[IDX4(AD0GF, i0,i1,i2+1)];\n const double AD0_i0_i1_i2p2 = in_gfs[IDX4(AD0GF, i0,i1,i2+2)];\n const double AD1_i0_i1_i2m2 = in_gfs[IDX4(AD1GF, i0,i1,i2-2)];\n const double AD1_i0_i1_i2m1 = in_gfs[IDX4(AD1GF, i0,i1,i2-1)];\n const double AD1_i0_i1m2_i2 = in_gfs[IDX4(AD1GF, i0,i1-2,i2)];\n const double AD1_i0_i1m1_i2 = in_gfs[IDX4(AD1GF, i0,i1-1,i2)];\n const double AD1_i0m2_i1_i2 = in_gfs[IDX4(AD1GF, i0-2,i1,i2)];\n const double AD1_i0m1_i1_i2 = in_gfs[IDX4(AD1GF, i0-1,i1,i2)];\n const double AD1 = in_gfs[IDX4(AD1GF, i0,i1,i2)];\n const double AD1_i0p1_i1_i2 = in_gfs[IDX4(AD1GF, i0+1,i1,i2)];\n const double AD1_i0p2_i1_i2 = in_gfs[IDX4(AD1GF, i0+2,i1,i2)];\n const double AD1_i0_i1p1_i2 = in_gfs[IDX4(AD1GF, i0,i1+1,i2)];\n const double AD1_i0_i1p2_i2 = in_gfs[IDX4(AD1GF, i0,i1+2,i2)];\n const double AD1_i0_i1_i2p1 = in_gfs[IDX4(AD1GF, i0,i1,i2+1)];\n const double AD1_i0_i1_i2p2 = in_gfs[IDX4(AD1GF, i0,i1,i2+2)];\n const double AD2_i0_i1_i2m2 = in_gfs[IDX4(AD2GF, i0,i1,i2-2)];\n const double AD2_i0_i1_i2m1 = in_gfs[IDX4(AD2GF, i0,i1,i2-1)];\n const double AD2_i0_i1m2_i2 = in_gfs[IDX4(AD2GF, i0,i1-2,i2)];\n const double AD2_i0_i1m1_i2 = in_gfs[IDX4(AD2GF, i0,i1-1,i2)];\n const double AD2_i0m2_i1_i2 = in_gfs[IDX4(AD2GF, i0-2,i1,i2)];\n const double AD2_i0m1_i1_i2 = in_gfs[IDX4(AD2GF, i0-1,i1,i2)];\n const double AD2 = in_gfs[IDX4(AD2GF, i0,i1,i2)];\n const double AD2_i0p1_i1_i2 = in_gfs[IDX4(AD2GF, i0+1,i1,i2)];\n const double AD2_i0p2_i1_i2 = in_gfs[IDX4(AD2GF, i0+2,i1,i2)];\n const double AD2_i0_i1p1_i2 = in_gfs[IDX4(AD2GF, i0,i1+1,i2)];\n const double AD2_i0_i1p2_i2 = in_gfs[IDX4(AD2GF, i0,i1+2,i2)];\n const double AD2_i0_i1_i2p1 = in_gfs[IDX4(AD2GF, i0,i1,i2+1)];\n const double AD2_i0_i1_i2p2 = in_gfs[IDX4(AD2GF, i0,i1,i2+2)];\n const double Gamma_i0_i1_i2m2 = in_gfs[IDX4(GAMMAGF, i0,i1,i2-2)];\n const double Gamma_i0_i1_i2m1 = in_gfs[IDX4(GAMMAGF, i0,i1,i2-1)];\n const double Gamma_i0_i1m2_i2 = in_gfs[IDX4(GAMMAGF, i0,i1-2,i2)];\n const double Gamma_i0_i1m1_i2 = in_gfs[IDX4(GAMMAGF, i0,i1-1,i2)];\n const double Gamma_i0m2_i1_i2 = in_gfs[IDX4(GAMMAGF, i0-2,i1,i2)];\n const double Gamma_i0m1_i1_i2 = in_gfs[IDX4(GAMMAGF, i0-1,i1,i2)];\n const double Gamma = in_gfs[IDX4(GAMMAGF, i0,i1,i2)];\n const double Gamma_i0p1_i1_i2 = in_gfs[IDX4(GAMMAGF, i0+1,i1,i2)];\n const double Gamma_i0p2_i1_i2 = in_gfs[IDX4(GAMMAGF, i0+2,i1,i2)];\n const double Gamma_i0_i1p1_i2 = in_gfs[IDX4(GAMMAGF, i0,i1+1,i2)];\n const double Gamma_i0_i1p2_i2 = in_gfs[IDX4(GAMMAGF, i0,i1+2,i2)];\n const double Gamma_i0_i1_i2p1 = in_gfs[IDX4(GAMMAGF, i0,i1,i2+1)];\n const double Gamma_i0_i1_i2p2 = in_gfs[IDX4(GAMMAGF, i0,i1,i2+2)];\n const double FDPart1_Rational_2_3 = 2.0/3.0;\n const double FDPart1_Rational_1_12 = 1.0/12.0;\n const double FDPart1_Rational_5_2 = 5.0/2.0;\n const double FDPart1_Rational_4_3 = 4.0/3.0;\n const double FDPart1_1 = -AD0_i0_i1p2_i2;\n const double FDPart1_9 = ((invdx0)*(invdx0));\n const double FDPart1_10 = -AD0*FDPart1_Rational_5_2;\n const double FDPart1_11 = ((invdx1)*(invdx1));\n const double FDPart1_12 = ((invdx2)*(invdx2));\n const double FDPart1_13 = -AD1*FDPart1_Rational_5_2;\n const double FDPart1_14 = -AD2*FDPart1_Rational_5_2;\n const double FDPart1_18 = -FDPart1_Rational_5_2*psi;\n const double AD_dD00 = invdx0*(FDPart1_Rational_1_12*(AD0_i0m2_i1_i2 - AD0_i0p2_i1_i2) + FDPart1_Rational_2_3*(-AD0_i0m1_i1_i2 + AD0_i0p1_i1_i2));\n const double AD_dD01 = invdx1*(FDPart1_Rational_1_12*(AD0_i0_i1m2_i2 + FDPart1_1) + FDPart1_Rational_2_3*(-AD0_i0_i1m1_i2 + AD0_i0_i1p1_i2));\n const double AD_dD02 = invdx2*(FDPart1_Rational_1_12*(AD0_i0_i1_i2m2 - AD0_i0_i1_i2p2) + FDPart1_Rational_2_3*(-AD0_i0_i1_i2m1 + AD0_i0_i1_i2p1));\n const double AD_dD10 = invdx0*(FDPart1_Rational_1_12*(AD1_i0m2_i1_i2 - AD1_i0p2_i1_i2) + FDPart1_Rational_2_3*(-AD1_i0m1_i1_i2 + AD1_i0p1_i1_i2));\n const double AD_dD11 = invdx1*(FDPart1_Rational_1_12*(AD1_i0_i1m2_i2 - AD1_i0_i1p2_i2) + FDPart1_Rational_2_3*(-AD1_i0_i1m1_i2 + AD1_i0_i1p1_i2));\n const double AD_dD12 = invdx2*(FDPart1_Rational_1_12*(AD1_i0_i1_i2m2 - AD1_i0_i1_i2p2) + FDPart1_Rational_2_3*(-AD1_i0_i1_i2m1 + AD1_i0_i1_i2p1));\n const double AD_dD20 = invdx0*(FDPart1_Rational_1_12*(AD2_i0m2_i1_i2 - AD2_i0p2_i1_i2) + FDPart1_Rational_2_3*(-AD2_i0m1_i1_i2 + AD2_i0p1_i1_i2));\n const double AD_dD21 = invdx1*(FDPart1_Rational_1_12*(AD2_i0_i1m2_i2 - AD2_i0_i1p2_i2) + FDPart1_Rational_2_3*(-AD2_i0_i1m1_i2 + AD2_i0_i1p1_i2));\n const double AD_dD22 = invdx2*(FDPart1_Rational_1_12*(AD2_i0_i1_i2m2 - AD2_i0_i1_i2p2) + FDPart1_Rational_2_3*(-AD2_i0_i1_i2m1 + AD2_i0_i1_i2p1));\n const double AD_dDD000 = FDPart1_9*(FDPart1_10 + FDPart1_Rational_1_12*(-AD0_i0m2_i1_i2 - AD0_i0p2_i1_i2) + FDPart1_Rational_4_3*(AD0_i0m1_i1_i2 + AD0_i0p1_i1_i2));\n const double AD_dDD011 = FDPart1_11*(FDPart1_10 + FDPart1_Rational_1_12*(-AD0_i0_i1m2_i2 + FDPart1_1) + FDPart1_Rational_4_3*(AD0_i0_i1m1_i2 + AD0_i0_i1p1_i2));\n const double AD_dDD022 = FDPart1_12*(FDPart1_10 + FDPart1_Rational_1_12*(-AD0_i0_i1_i2m2 - AD0_i0_i1_i2p2) + FDPart1_Rational_4_3*(AD0_i0_i1_i2m1 + AD0_i0_i1_i2p1));\n const double AD_dDD100 = FDPart1_9*(FDPart1_13 + FDPart1_Rational_1_12*(-AD1_i0m2_i1_i2 - AD1_i0p2_i1_i2) + FDPart1_Rational_4_3*(AD1_i0m1_i1_i2 + AD1_i0p1_i1_i2));\n const double AD_dDD111 = FDPart1_11*(FDPart1_13 + FDPart1_Rational_1_12*(-AD1_i0_i1m2_i2 - AD1_i0_i1p2_i2) + FDPart1_Rational_4_3*(AD1_i0_i1m1_i2 + AD1_i0_i1p1_i2));\n const double AD_dDD122 = FDPart1_12*(FDPart1_13 + FDPart1_Rational_1_12*(-AD1_i0_i1_i2m2 - AD1_i0_i1_i2p2) + FDPart1_Rational_4_3*(AD1_i0_i1_i2m1 + AD1_i0_i1_i2p1));\n const double AD_dDD200 = FDPart1_9*(FDPart1_14 + FDPart1_Rational_1_12*(-AD2_i0m2_i1_i2 - AD2_i0p2_i1_i2) + FDPart1_Rational_4_3*(AD2_i0m1_i1_i2 + AD2_i0p1_i1_i2));\n const double AD_dDD211 = FDPart1_11*(FDPart1_14 + FDPart1_Rational_1_12*(-AD2_i0_i1m2_i2 - AD2_i0_i1p2_i2) + FDPart1_Rational_4_3*(AD2_i0_i1m1_i2 + AD2_i0_i1p1_i2));\n const double AD_dDD222 = FDPart1_12*(FDPart1_14 + FDPart1_Rational_1_12*(-AD2_i0_i1_i2m2 - AD2_i0_i1_i2p2) + FDPart1_Rational_4_3*(AD2_i0_i1_i2m1 + AD2_i0_i1_i2p1));\n const double Gamma_dD0 = invdx0*(FDPart1_Rational_1_12*(Gamma_i0m2_i1_i2 - Gamma_i0p2_i1_i2) + FDPart1_Rational_2_3*(-Gamma_i0m1_i1_i2 + Gamma_i0p1_i1_i2));\n const double Gamma_dD1 = invdx1*(FDPart1_Rational_1_12*(Gamma_i0_i1m2_i2 - Gamma_i0_i1p2_i2) + FDPart1_Rational_2_3*(-Gamma_i0_i1m1_i2 + Gamma_i0_i1p1_i2));\n const double Gamma_dD2 = invdx2*(FDPart1_Rational_1_12*(Gamma_i0_i1_i2m2 - Gamma_i0_i1_i2p2) + FDPart1_Rational_2_3*(-Gamma_i0_i1_i2m1 + Gamma_i0_i1_i2p1));\n const double psi_dD0 = invdx0*(FDPart1_Rational_1_12*(psi_i0m2_i1_i2 - psi_i0p2_i1_i2) + FDPart1_Rational_2_3*(-psi_i0m1_i1_i2 + psi_i0p1_i1_i2));\n const double psi_dD1 = invdx1*(FDPart1_Rational_1_12*(psi_i0_i1m2_i2 - psi_i0_i1p2_i2) + FDPart1_Rational_2_3*(-psi_i0_i1m1_i2 + psi_i0_i1p1_i2));\n const double psi_dD2 = invdx2*(FDPart1_Rational_1_12*(psi_i0_i1_i2m2 - psi_i0_i1_i2p2) + FDPart1_Rational_2_3*(-psi_i0_i1_i2m1 + psi_i0_i1_i2p1));\n const double psi_dDD00 = FDPart1_9*(FDPart1_18 + FDPart1_Rational_1_12*(-psi_i0m2_i1_i2 - psi_i0p2_i1_i2) + FDPart1_Rational_4_3*(psi_i0m1_i1_i2 + psi_i0p1_i1_i2));\n const double psi_dDD11 = FDPart1_11*(FDPart1_18 + FDPart1_Rational_1_12*(-psi_i0_i1m2_i2 - psi_i0_i1p2_i2) + FDPart1_Rational_4_3*(psi_i0_i1m1_i2 + psi_i0_i1p1_i2));\n const double psi_dDD22 = FDPart1_12*(FDPart1_18 + FDPart1_Rational_1_12*(-psi_i0_i1_i2m2 - psi_i0_i1_i2p2) + FDPart1_Rational_4_3*(psi_i0_i1_i2m1 + psi_i0_i1_i2p1));\n /*\n * NRPy+ Finite Difference Code Generation, Step 2 of 2: Evaluate SymPy expressions and write to main memory:\n */\n /*\n * Original SymPy expressions:\n * \"[rhs_gfs[IDX4(AD0GF, i0, i1, i2)] = -ED0 - psi_dD0,\n * rhs_gfs[IDX4(ED0GF, i0, i1, i2)] = -AD_dDD000 + Gamma_dD0 - (AD_dD00*xx0 - AD_dD11/xx0 + AD_dDD011 - (AD0*xx0 + AD_dD11)/xx0)/xx0**2 - (AD_dD00*xx0*sin(xx1)**2 - AD_dD22/xx0 + AD_dDD022 + (-AD1/xx0 + AD_dD01)*sin(2*xx1)/2 - (AD0*xx0*sin(xx1)**2 + AD1*sin(2*xx1)/2 + AD_dD22)/xx0)/(xx0**2*sin(xx1)**2),\n * rhs_gfs[IDX4(AD1GF, i0, i1, i2)] = -ED1 - psi_dD1,\n * rhs_gfs[IDX4(ED1GF, i0, i1, i2)] = -AD1/xx0**2 + AD_dD10/xx0 - AD_dDD100 + Gamma_dD1 + (-AD1/xx0 + AD_dD10)/xx0 - (AD_dD01*xx0 + AD_dDD111 + xx0*(-AD1/xx0 + AD_dD01) + xx0*(-AD1/xx0 + AD_dD10))/xx0**2 - (-AD_dD22*sin(2*xx1)/(2*sin(xx1)**2) + AD_dDD122 + xx0*(-AD1/xx0 + AD_dD10)*sin(xx1)**2 + (AD0*xx0 + AD_dD11)*sin(2*xx1)/2 - (AD0*xx0*sin(xx1)**2 + AD1*sin(2*xx1)/2 + AD_dD22)*sin(2*xx1)/(2*sin(xx1)**2))/(xx0**2*sin(xx1)**2),\n * rhs_gfs[IDX4(AD2GF, i0, i1, i2)] = -ED2 - psi_dD2,\n * rhs_gfs[IDX4(ED2GF, i0, i1, i2)] = -AD2/xx0**2 + AD_dD20/xx0 - AD_dDD200 + Gamma_dD2 + (-AD2/xx0 + AD_dD20)/xx0 - (AD2*(-cos(2*xx1)/sin(xx1)**2 + sin(2*xx1)*cos(xx1)/sin(xx1)**3) - AD_dD21*sin(2*xx1)/(2*sin(xx1)**2) + AD_dDD211 + xx0*(-AD2/xx0 + AD_dD20) - (-AD2*sin(2*xx1)/(2*sin(xx1)**2) + AD_dD21)*sin(2*xx1)/(2*sin(xx1)**2))/xx0**2 - (AD_dD02*xx0*sin(xx1)**2 + AD_dD12*sin(2*xx1)/2 + AD_dDD222 + xx0*(-AD2/xx0 + AD_dD02)*sin(xx1)**2 + xx0*(-AD2/xx0 + AD_dD20)*sin(xx1)**2 + (-AD2*sin(2*xx1)/(2*sin(xx1)**2) + AD_dD12)*sin(2*xx1)/2 + (-AD2*sin(2*xx1)/(2*sin(xx1)**2) + AD_dD21)*sin(2*xx1)/2)/(xx0**2*sin(xx1)**2),\n * rhs_gfs[IDX4(PSIGF, i0, i1, i2)] = -Gamma,\n * rhs_gfs[IDX4(GAMMAGF, i0, i1, i2)] = -psi_dDD00 - (psi_dD0*xx0 + psi_dDD11)/xx0**2 - (psi_dD0*xx0*sin(xx1)**2 + psi_dD1*sin(2*xx1)/2 + psi_dDD22)/(xx0**2*sin(xx1)**2)]\"\n */\n const double FDPart3_0 = (1.0/((xx0)*(xx0)));\n const double FDPart3_2 = (1.0/(xx0));\n const double FDPart3_4 = AD0*xx0 + AD_dD11;\n const double FDPart3_5 = sin(xx1);\n const double FDPart3_6 = ((FDPart3_5)*(FDPart3_5));\n const double FDPart3_7 = -AD1*FDPart3_2;\n const double FDPart3_10 = sin(2*xx1);\n const double FDPart3_11 = (1.0/2.0)*FDPart3_10;\n const double FDPart3_12 = AD0*FDPart3_6*xx0 + AD1*FDPart3_11 + AD_dD22;\n const double FDPart3_13 = (1.0/(FDPart3_6));\n const double FDPart3_14 = FDPart3_0*FDPart3_13;\n const double FDPart3_16 = xx0*(AD_dD10 + FDPart3_7);\n const double FDPart3_17 = FDPart3_11*FDPart3_13;\n const double FDPart3_18 = -AD2*FDPart3_2;\n const double FDPart3_20 = xx0*(AD_dD20 + FDPart3_18);\n const double FDPart3_21 = -AD2*FDPart3_17;\n const double FDPart3_22 = FDPart3_11*(AD_dD21 + FDPart3_21);\n rhs_gfs[IDX4(AD0GF, i0, i1, i2)] = -ED0 - psi_dD0;\n rhs_gfs[IDX4(ED0GF, i0, i1, i2)] = -AD_dDD000 - FDPart3_0*(AD_dD00*xx0 - AD_dD11*FDPart3_2 + AD_dDD011 - FDPart3_2*FDPart3_4) - FDPart3_14*(AD_dD00*FDPart3_6*xx0 - AD_dD22*FDPart3_2 + AD_dDD022 + FDPart3_11*(AD_dD01 + FDPart3_7) - FDPart3_12*FDPart3_2) + Gamma_dD0;\n rhs_gfs[IDX4(AD1GF, i0, i1, i2)] = -ED1 - psi_dD1;\n rhs_gfs[IDX4(ED1GF, i0, i1, i2)] = -AD1*FDPart3_0 + AD_dD10*FDPart3_2 - AD_dDD100 - FDPart3_0*(AD_dD01*xx0 + AD_dDD111 + FDPart3_16 + xx0*(AD_dD01 + FDPart3_7)) - FDPart3_14*(-AD_dD22*FDPart3_17 + AD_dDD122 + FDPart3_11*FDPart3_4 - FDPart3_12*FDPart3_17 + FDPart3_16*FDPart3_6) + FDPart3_2*(AD_dD10 + FDPart3_7) + Gamma_dD1;\n rhs_gfs[IDX4(AD2GF, i0, i1, i2)] = -ED2 - psi_dD2;\n rhs_gfs[IDX4(ED2GF, i0, i1, i2)] = -AD2*FDPart3_0 + AD_dD20*FDPart3_2 - AD_dDD200 - FDPart3_0*(AD2*(FDPart3_10*cos(xx1)/((FDPart3_5)*(FDPart3_5)*(FDPart3_5)) - FDPart3_13*cos(2*xx1)) - AD_dD21*FDPart3_17 + AD_dDD211 - FDPart3_13*FDPart3_22 + FDPart3_20) - FDPart3_14*(AD_dD02*FDPart3_6*xx0 + AD_dD12*FDPart3_11 + AD_dDD222 + FDPart3_11*(AD_dD12 + FDPart3_21) + FDPart3_20*FDPart3_6 + FDPart3_22 + FDPart3_6*xx0*(AD_dD02 + FDPart3_18)) + FDPart3_2*(AD_dD20 + FDPart3_18) + Gamma_dD2;\n rhs_gfs[IDX4(PSIGF, i0, i1, i2)] = -Gamma;\n rhs_gfs[IDX4(GAMMAGF, i0, i1, i2)] = -FDPart3_0*(psi_dD0*xx0 + psi_dDD11) - FDPart3_14*(FDPart3_11*psi_dD1 + FDPart3_6*psi_dD0*xx0 + psi_dDD22) - psi_dDD00;\n}\n\n" ] ], [ [ "<a id='latex_pdf_output'></a>\n\n# Step 3: Output this notebook to $\\LaTeX$-formatted PDF file \\[Back to [top](#toc)\\]\n$$\\label{latex_pdf_output}$$\n\nThe following code cell converts this Jupyter notebook into a proper, clickable $\\LaTeX$-formatted PDF file. After the cell is successfully run, the generated PDF may be found in the root NRPy+ tutorial directory, with filename\n[Tutorial-MaxwellCurvilinear.pdf](Tutorial-MaxwellCurvilinear.pdf) (Note that clicking on this link may not work; you may need to open the PDF file through another means.)", "_____no_output_____" ] ], [ [ "import cmdline_helper as cmd # NRPy+: Multi-platform Python command-line interface\ncmd.output_Jupyter_notebook_to_LaTeXed_PDF(\"Tutorial-MaxwellCurvilinear\")", "Created Tutorial-MaxwellCurvilinear.tex, and compiled LaTeX file to PDF\n file Tutorial-MaxwellCurvilinear.pdf\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4aa63e2a75d73efd7dacaa86653fff4db5e25d1a
58,565
ipynb
Jupyter Notebook
Toxic_Release_Inventory.ipynb
Prags21/study-of-toxic-release-inventory
6398d1d113c298ff9ce4dd1ab01c68a378ed40f7
[ "MIT" ]
null
null
null
Toxic_Release_Inventory.ipynb
Prags21/study-of-toxic-release-inventory
6398d1d113c298ff9ce4dd1ab01c68a378ed40f7
[ "MIT" ]
null
null
null
Toxic_Release_Inventory.ipynb
Prags21/study-of-toxic-release-inventory
6398d1d113c298ff9ce4dd1ab01c68a378ed40f7
[ "MIT" ]
1
2021-11-17T20:41:53.000Z
2021-11-17T20:41:53.000Z
38.913621
277
0.261675
[ [ [ "from google.colab import drive\ndrive.mount('/content/drive')", "Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n" ], [ "!ls \"/content/drive/My Drive/255_Data_Mining/dataset/basic_data_files.csv\"", "'/content/drive/My Drive/255_Data_Mining/dataset/basic_data_files.csv'\n" ] ], [ [ "**Load only first n rows**", "_____no_output_____" ] ], [ [ "import pandas as pd\ndf = pd.read_csv('/content/drive/My Drive/255_Data_Mining/dataset/basic_data_files.csv'\n , sep=','\n , nrows=1000)\n # , chunksize=1000000)\n # , usecols=['YEAR', 'TRI_FACILITY_ID', 'FACILITY_NAME', 'ZIP', 'LATITUDE', 'LONGITUDE', 'PRIMARY_NAICS', 'INDUSTRY_SECTOR', 'CLASSIFICATION', 'CARCINOGEN', '5.1_FUGITIVE_AIR', '5.2_STACK_AIR', '5.3_WATER', 'ON-SITE_RELEASE_TOTAL'])\ndf", "_____no_output_____" ] ], [ [ "**Load all rows with only selected columns**", "_____no_output_____" ] ], [ [ "import pandas as pd\ndf = pd.read_csv('/content/drive/My Drive/255_Data_Mining/dataset/basic_data_files.csv'\n , sep=','\n , usecols=['YEAR', 'TRI_FACILITY_ID', 'FACILITY_NAME', 'ZIP', 'LATITUDE', 'LONGITUDE', 'INDUSTRY_SECTOR', 'CLASSIFICATION', 'METAL', 'METAL_CATEGORY', 'CARCINOGEN', '5.2_STACK_AIR', '5.3_WATER', 'ON-SITE_RELEASE_TOTAL'])\ndf", "/usr/local/lib/python3.7/dist-packages/IPython/core/interactiveshell.py:2718: DtypeWarning: Columns (0,1,3,8,11,12,15,35,36,40,41,53) have mixed types.Specify dtype option on import or set low_memory=False.\n interactivity=interactivity, compiler=compiler, result=result)\n" ], [ "df['METAL_CATEGORY']", "_____no_output_____" ], [ "print(df['YEAR'][23444])\nprint(df['YEAR'][1222538])\nprint(df['YEAR'][5051319])", "2016\n2008\n81082\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
4aa64e90ce03b3a790eae062aac26fbd217ae148
120,097
ipynb
Jupyter Notebook
dcgan-svhn/DCGAN_Exercise.ipynb
issagaliyeva/udacity_deep_learning
ffe8783a3b4f502aa47cab9196be9a4099dfcafd
[ "MIT" ]
null
null
null
dcgan-svhn/DCGAN_Exercise.ipynb
issagaliyeva/udacity_deep_learning
ffe8783a3b4f502aa47cab9196be9a4099dfcafd
[ "MIT" ]
null
null
null
dcgan-svhn/DCGAN_Exercise.ipynb
issagaliyeva/udacity_deep_learning
ffe8783a3b4f502aa47cab9196be9a4099dfcafd
[ "MIT" ]
null
null
null
141.124559
83,652
0.848672
[ [ [ "# Deep Convolutional GANs\n\nIn this notebook, you'll build a GAN using convolutional layers in the generator and discriminator. This is called a Deep Convolutional GAN, or DCGAN for short. The DCGAN architecture was first explored in 2016 and has seen impressive results in generating new images; you can read the [original paper, here](https://arxiv.org/pdf/1511.06434.pdf).\n\nYou'll be training DCGAN on the [Street View House Numbers](http://ufldl.stanford.edu/housenumbers/) (SVHN) dataset. These are color images of house numbers collected from Google street view. SVHN images are in color and much more variable than MNIST. \n\n<img src='assets/svhn_dcgan.png' width=80% />\n\nSo, our goal is to create a DCGAN that can generate new, realistic-looking images of house numbers. We'll go through the following steps to do this:\n* Load in and pre-process the house numbers dataset\n* Define discriminator and generator networks\n* Train these adversarial networks\n* Visualize the loss over time and some sample, generated images\n\n#### Deeper Convolutional Networks\n\nSince this dataset is more complex than our MNIST data, we'll need a deeper network to accurately identify patterns in these images and be able to generate new ones. Specifically, we'll use a series of convolutional or transpose convolutional layers in the discriminator and generator. It's also necessary to use batch normalization to get these convolutional networks to train. \n\nBesides these changes in network structure, training the discriminator and generator networks should be the same as before. That is, the discriminator will alternate training on real and fake (generated) images, and the generator will aim to trick the discriminator into thinking that its generated images are real!", "_____no_output_____" ] ], [ [ "# import libraries\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pickle as pkl\n\n%matplotlib inline", "_____no_output_____" ] ], [ [ "## Getting the data\n\nHere you can download the SVHN dataset. It's a dataset built-in to the PyTorch datasets library. We can load in training data, transform it into Tensor datatypes, then create dataloaders to batch our data into a desired size.", "_____no_output_____" ] ], [ [ "import torch\nfrom torchvision import datasets\nfrom torchvision import transforms\n\n# Tensor transform\ntransform = transforms.ToTensor()\n\n# SVHN training datasets\nsvhn_train = datasets.SVHN(root='data/', split='train', download=True, transform=transform)\n\nbatch_size = 128\nnum_workers = 0\n\n# build DataLoaders for SVHN dataset\ntrain_loader = torch.utils.data.DataLoader(dataset=svhn_train,\n batch_size=batch_size,\n shuffle=True,\n num_workers=num_workers)\n", "Downloading http://ufldl.stanford.edu/housenumbers/train_32x32.mat to data/train_32x32.mat\n" ] ], [ [ "### Visualize the Data\n\nHere I'm showing a small sample of the images. Each of these is 32x32 with 3 color channels (RGB). These are the real, training images that we'll pass to the discriminator. Notice that each image has _one_ associated, numerical label.", "_____no_output_____" ] ], [ [ "# obtain one batch of training images\ndataiter = iter(train_loader)\nimages, labels = dataiter.next()\n\n# plot the images in the batch, along with the corresponding labels\nfig = plt.figure(figsize=(25, 4))\nplot_size=20\nfor idx in np.arange(plot_size):\n ax = fig.add_subplot(2, plot_size // 2, idx + 1, xticks=[], yticks=[])\n ax.imshow(np.transpose(images[idx], (1, 2, 0)))\n # print out the correct label for each image\n # .item() gets the value contained in a Tensor\n ax.set_title(str(labels[idx].item()))\n\nplt.show()", "_____no_output_____" ] ], [ [ "### Pre-processing: scaling from -1 to 1\n\nWe need to do a bit of pre-processing; we know that the output of our `tanh` activated generator will contain pixel values in a range from -1 to 1, and so, we need to rescale our training images to a range of -1 to 1. (Right now, they are in a range from 0-1.)", "_____no_output_____" ] ], [ [ "# current range\nimg = images[0]\n\nprint('Min: ', img.min())\nprint('Max: ', img.max())", "Min: tensor(0.1412)\nMax: tensor(0.5569)\n" ], [ "# helper scale function\ndef scale(x, feature_range=(-1, 1)):\n ''' Scale takes in an image x and returns that image, scaled\n with a feature_range of pixel values from -1 to 1. \n This function assumes that the input x is already scaled from 0-1.'''\n # assume x is scaled to (0, 1)\n # scale to feature_range and return scaled x\n min, max = feature_range\n return x * (max - min) + min\n ", "_____no_output_____" ], [ "# scaled range\nscaled_img = scale(img)\n\nprint('Scaled min: ', scaled_img.min())\nprint('Scaled max: ', scaled_img.max())", "Scaled min: tensor(-0.7176)\nScaled max: tensor(0.1137)\n" ] ], [ [ "---\n# Define the Model\n\nA GAN is comprised of two adversarial networks, a discriminator and a generator.", "_____no_output_____" ], [ "## Discriminator\n\nHere you'll build the discriminator. This is a convolutional classifier like you've built before, only without any maxpooling layers. \n* The inputs to the discriminator are 32x32x3 tensor images\n* You'll want a few convolutional, hidden layers\n* Then a fully connected layer for the output; as before, we want a sigmoid output, but we'll add that in the loss function, [BCEWithLogitsLoss](https://pytorch.org/docs/stable/nn.html#bcewithlogitsloss), later\n\n<img src='assets/conv_discriminator.png' width=80%/>\n\nFor the depths of the convolutional layers I suggest starting with 32 filters in the first layer, then double that depth as you add layers (to 64, 128, etc.). Note that in the DCGAN paper, they did all the downsampling using only strided convolutional layers with no maxpooling layers.\n\nYou'll also want to use batch normalization with [nn.BatchNorm2d](https://pytorch.org/docs/stable/nn.html#batchnorm2d) on each layer **except** the first convolutional layer and final, linear output layer. \n\n#### Helper `conv` function \n\nIn general, each layer should look something like convolution > batch norm > leaky ReLU, and so we'll define a function to put these layers together. This function will create a sequential series of a convolutional + an optional batch norm layer. We'll create these using PyTorch's [Sequential container](https://pytorch.org/docs/stable/nn.html#sequential), which takes in a list of layers and creates layers according to the order that they are passed in to the Sequential constructor.\n\nNote: It is also suggested that you use a **kernel_size of 4** and a **stride of 2** for strided convolutions.", "_____no_output_____" ] ], [ [ "import torch.nn as nn\nimport torch.nn.functional as F\n\n# helper conv function\ndef conv(in_channels, out_channels, kernel_size, stride=2, padding=1, batch_norm=True):\n \"\"\"Creates a convolutional layer, with optional batch normalization.\n \"\"\"\n layers = []\n conv_layer = nn.Conv2d(in_channels, out_channels, \n kernel_size, stride, padding, bias=False)\n \n # append conv layer\n layers.append(conv_layer)\n\n if batch_norm:\n # append batchnorm layer\n layers.append(nn.BatchNorm2d(out_channels))\n \n # using Sequential container\n return nn.Sequential(*layers)\n", "_____no_output_____" ], [ "class Discriminator(nn.Module):\n\n def __init__(self, conv_dim=32):\n super(Discriminator, self).__init__()\n\n # complete init function\n # input 32x32x3 -> output 16x16x32\n self.conv1 = conv(3, conv_dim, 4, batch_norm=False) # we don't need batch norm in the first conv layer \n # input 16x16x32 -> output 8x8x64\n self.conv2 = conv(conv_dim, conv_dim * 2, 4)\n # input 8x8x64 -> output 4x4x128\n self.conv3 = conv(conv_dim * 2, conv_dim * 4, 4)\n \n # define a FC layer\n self.fc = nn.Linear(4*4*128, 1)\n self.dropout = nn.Dropout(0.3)\n \n\n def forward(self, x):\n # complete forward function\n x = F.leaky_relu(self.conv1(x), 0.2)\n x = self.dropout(x)\n x = F.leaky_relu(self.conv2(x), 0.2)\n x = self.dropout(x)\n x = F.leaky_relu(self.conv3(x), 0.2)\n x = self.dropout(x)\n \n # flatten the image \n x = x.view(-1, 4*4*128)\n x = self.fc(x)\n \n return x\n ", "_____no_output_____" ] ], [ [ "## Generator\n\nNext, you'll build the generator network. The input will be our noise vector `z`, as before. And, the output will be a $tanh$ output, but this time with size 32x32 which is the size of our SVHN images.\n\n<img src='assets/conv_generator.png' width=80% />\n\nWhat's new here is we'll use transpose convolutional layers to create our new images. \n* The first layer is a fully connected layer which is reshaped into a deep and narrow layer, something like 4x4x512. \n* Then, we use batch normalization and a leaky ReLU activation. \n* Next is a series of [transpose convolutional layers](https://pytorch.org/docs/stable/nn.html#convtranspose2d), where you typically halve the depth and double the width and height of the previous layer. \n* And, we'll apply batch normalization and ReLU to all but the last of these hidden layers. Where we will just apply a `tanh` activation.\n\n#### Helper `deconv` function\n\nFor each of these layers, the general scheme is transpose convolution > batch norm > ReLU, and so we'll define a function to put these layers together. This function will create a sequential series of a transpose convolutional + an optional batch norm layer. We'll create these using PyTorch's Sequential container, which takes in a list of layers and creates layers according to the order that they are passed in to the Sequential constructor.\n\nNote: It is also suggested that you use a **kernel_size of 4** and a **stride of 2** for transpose convolutions.", "_____no_output_____" ] ], [ [ "# helper deconv function\ndef deconv(in_channels, out_channels, kernel_size, stride=2, padding=1, batch_norm=True):\n \"\"\"Creates a transposed-convolutional layer, with optional batch normalization.\n \"\"\"\n ## TODO: Complete this function\n ## create a sequence of transpose + optional batch norm layers\n layers = []\n conv_transpose = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, bias=False)\n \n # append conv transpose layer\n layers.append(conv_transpose)\n \n if batch_norm:\n layers.append(nn.BatchNorm2d(out_channels))\n \n return nn.Sequential(*layers)\n", "_____no_output_____" ], [ "class Generator(nn.Module):\n \n def __init__(self, z_size, conv_dim=32):\n super(Generator, self).__init__()\n\n # complete init function\n self.fc = nn.Linear(z_size, conv_dim*4*4*4)\n self.conv_trans1 = deconv(conv_dim*4, conv_dim*2, 4)\n self.conv_trans2 = deconv(conv_dim*2, conv_dim, 4)\n self.conv_trans3 = deconv(conv_dim, 3, 4, batch_norm=False)\n \n self.dropout = nn.Dropout(0.3)\n\n def forward(self, x):\n # complete forward function\n x = self.fc(x)\n # reshape\n x = x.view(-1, 4*32, 4, 4)\n \n x = F.relu(self.conv_trans1(x))\n x = F.relu(self.conv_trans2(x))\n x = self.conv_trans3(x)\n out = F.tanh(x)\n \n return out\n ", "_____no_output_____" ] ], [ [ "## Build complete network\n\nDefine your models' hyperparameters and instantiate the discriminator and generator from the classes defined above. Make sure you've passed in the correct input arguments.", "_____no_output_____" ] ], [ [ "# define hyperparams\nconv_dim = 32\nz_size = 100\n\n# define discriminator and generator\nD = Discriminator(conv_dim)\nG = Generator(z_size=z_size, conv_dim=conv_dim)\n\nprint(D)\nprint()\nprint(G)", "Discriminator(\n (conv1): Sequential(\n (0): Conv2d(3, 32, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n )\n (conv2): Sequential(\n (0): Conv2d(32, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n )\n (conv3): Sequential(\n (0): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n )\n (fc): Linear(in_features=2048, out_features=1, bias=True)\n (dropout): Dropout(p=0.3, inplace=False)\n)\n\nGenerator(\n (fc): Linear(in_features=100, out_features=2048, bias=True)\n (conv_trans1): Sequential(\n (0): ConvTranspose2d(128, 64, kernel_size=(4, 4), stride=(2, 2), bias=False)\n (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n )\n (conv_trans2): Sequential(\n (0): ConvTranspose2d(64, 32, kernel_size=(4, 4), stride=(2, 2), bias=False)\n (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n )\n (conv_trans3): Sequential(\n (0): ConvTranspose2d(32, 3, kernel_size=(4, 4), stride=(2, 2), bias=False)\n )\n (dropout): Dropout(p=0.3, inplace=False)\n)\n" ] ], [ [ "### Training on GPU\n\nCheck if you can train on GPU. If you can, set this as a variable and move your models to GPU. \n> Later, we'll also move any inputs our models and loss functions see (real_images, z, and ground truth labels) to GPU as well.", "_____no_output_____" ] ], [ [ "train_on_gpu = torch.cuda.is_available()\n\nif train_on_gpu:\n # move models to GPU\n G.cuda()\n D.cuda()\n print('GPU available for training. Models moved to GPU')\nelse:\n print('Training on CPU.')\n ", "Training on CPU.\n" ] ], [ [ "---\n## Discriminator and Generator Losses\n\nNow we need to calculate the losses. And this will be exactly the same as before.\n\n### Discriminator Losses\n\n> * For the discriminator, the total loss is the sum of the losses for real and fake images, `d_loss = d_real_loss + d_fake_loss`. \n* Remember that we want the discriminator to output 1 for real images and 0 for fake images, so we need to set up the losses to reflect that.\n\nThe losses will by binary cross entropy loss with logits, which we can get with [BCEWithLogitsLoss](https://pytorch.org/docs/stable/nn.html#bcewithlogitsloss). This combines a `sigmoid` activation function **and** and binary cross entropy loss in one function.\n\nFor the real images, we want `D(real_images) = 1`. That is, we want the discriminator to classify the real images with a label = 1, indicating that these are real. The discriminator loss for the fake data is similar. We want `D(fake_images) = 0`, where the fake images are the _generator output_, `fake_images = G(z)`. \n\n### Generator Loss\n\nThe generator loss will look similar only with flipped labels. The generator's goal is to get `D(fake_images) = 1`. In this case, the labels are **flipped** to represent that the generator is trying to fool the discriminator into thinking that the images it generates (fakes) are real!", "_____no_output_____" ] ], [ [ "def real_loss(D_out, smooth=False):\n batch_size = D_out.size(0)\n # label smoothing\n if smooth:\n # smooth, real labels = 0.9\n labels = torch.ones(batch_size)*0.9\n else:\n labels = torch.ones(batch_size) # real labels = 1\n # move labels to GPU if available \n if train_on_gpu:\n labels = labels.cuda()\n # binary cross entropy with logits loss\n criterion = nn.BCEWithLogitsLoss()\n # calculate loss\n loss = criterion(D_out.squeeze(), labels)\n return loss\n\ndef fake_loss(D_out):\n batch_size = D_out.size(0)\n labels = torch.zeros(batch_size) # fake labels = 0\n if train_on_gpu:\n labels = labels.cuda()\n criterion = nn.BCEWithLogitsLoss()\n # calculate loss\n loss = criterion(D_out.squeeze(), labels)\n return loss", "_____no_output_____" ] ], [ [ "## Optimizers\n\nNot much new here, but notice how I am using a small learning rate and custom parameters for the Adam optimizers, This is based on some research into DCGAN model convergence.\n\n### Hyperparameters\n\nGANs are very sensitive to hyperparameters. A lot of experimentation goes into finding the best hyperparameters such that the generator and discriminator don't overpower each other. Try out your own hyperparameters or read [the DCGAN paper](https://arxiv.org/pdf/1511.06434.pdf) to see what worked for them.", "_____no_output_____" ] ], [ [ "import torch.optim as optim\n\n# params\nlr = 0.0002\nbeta1 = 0.5\nbeta2 = 0.5\n\n# Create optimizers for the discriminator and generator\nd_optimizer = optim.Adam(D.parameters(), lr, [beta1, beta2])\ng_optimizer = optim.Adam(G.parameters(), lr, [beta1, beta2])", "_____no_output_____" ] ], [ [ "---\n## Training\n\nTraining will involve alternating between training the discriminator and the generator. We'll use our functions `real_loss` and `fake_loss` to help us calculate the discriminator losses in all of the following cases.\n\n### Discriminator training\n1. Compute the discriminator loss on real, training images \n2. Generate fake images\n3. Compute the discriminator loss on fake, generated images \n4. Add up real and fake loss\n5. Perform backpropagation + an optimization step to update the discriminator's weights\n\n### Generator training\n1. Generate fake images\n2. Compute the discriminator loss on fake images, using **flipped** labels!\n3. Perform backpropagation + an optimization step to update the generator's weights\n\n#### Saving Samples\n\nAs we train, we'll also print out some loss statistics and save some generated \"fake\" samples.\n\n**Evaluation mode**\n\nNotice that, when we call our generator to create the samples to display, we set our model to evaluation mode: `G.eval()`. That's so the batch normalization layers will use the population statistics rather than the batch statistics (as they do during training), *and* so dropout layers will operate in eval() mode; not turning off any nodes for generating samples.", "_____no_output_____" ] ], [ [ "import pickle as pkl\n\n# training hyperparams\nnum_epochs = 30\n\n# keep track of loss and generated, \"fake\" samples\nsamples = []\nlosses = []\n\nprint_every = 300\n\n# Get some fixed data for sampling. These are images that are held\n# constant throughout training, and allow us to inspect the model's performance\nsample_size=16\nfixed_z = np.random.uniform(-1, 1, size=(sample_size, z_size))\nfixed_z = torch.from_numpy(fixed_z).float()\n\n# train the network\nfor epoch in range(num_epochs):\n \n for batch_i, (real_images, _) in enumerate(train_loader):\n \n batch_size = real_images.size(0)\n \n # important rescaling step\n real_images = scale(real_images)\n \n # ============================================\n # TRAIN THE DISCRIMINATOR\n # ============================================\n \n d_optimizer.zero_grad()\n \n # 1. Train with real images\n\n # Compute the discriminator losses on real images \n if train_on_gpu:\n real_images = real_images.cuda()\n \n real_images = scale(real_images)\n D_real = D(real_images)\n d_real_loss = real_loss(D_real)\n \n # 2. Train with fake images\n \n # Generate fake images\n z = np.random.uniform(-1, 1, size=(batch_size, z_size))\n z = torch.from_numpy(z).float()\n # move x to GPU, if available\n if train_on_gpu:\n z = z.cuda()\n fake_images = G(z)\n \n # Compute the discriminator losses on fake images \n D_fake = D(fake_images)\n d_fake_loss = fake_loss(D_fake)\n \n # add up loss and perform backprop\n d_loss = d_real_loss + d_fake_loss\n d_loss.backward()\n d_optimizer.step()\n \n \n # =========================================\n # TRAIN THE GENERATOR\n # =========================================\n g_optimizer.zero_grad()\n \n # 1. Train with fake images and flipped labels\n \n # Generate fake images\n z = np.random.uniform(-1, 1, size=(batch_size, z_size))\n z = torch.from_numpy(z).float()\n if train_on_gpu:\n z = z.cuda()\n fake_images = G(z)\n \n # Compute the discriminator losses on fake images \n # using flipped labels!\n D_fake = D(fake_images)\n g_loss = real_loss(D_fake) # use real loss to flip labels\n \n # perform backprop\n g_loss.backward()\n g_optimizer.step()\n\n # Print some loss stats\n if batch_i % print_every == 0:\n # append discriminator loss and generator loss\n losses.append((d_loss.item(), g_loss.item()))\n # print discriminator and generator loss\n print('Epoch [{:5d}/{:5d}] | d_loss: {:6.4f} | g_loss: {:6.4f}'.format(\n epoch+1, num_epochs, d_loss.item(), g_loss.item()))\n\n \n ## AFTER EACH EPOCH## \n # generate and save sample, fake images\n G.eval() # for generating samples\n if train_on_gpu:\n fixed_z = fixed_z.cuda()\n samples_z = G(fixed_z)\n samples.append(samples_z)\n G.train() # back to training mode\n\n\n# Save training generator samples\nwith open('train_samples.pkl', 'wb') as f:\n pkl.dump(samples, f)", "c:\\users\\dinar\\appdata\\local\\programs\\python\\python38\\lib\\site-packages\\torch\\nn\\functional.py:1794: UserWarning: nn.functional.tanh is deprecated. Use torch.tanh instead.\n warnings.warn(\"nn.functional.tanh is deprecated. Use torch.tanh instead.\")\n" ] ], [ [ "## Training loss\n\nHere we'll plot the training losses for the generator and discriminator, recorded after each epoch.", "_____no_output_____" ] ], [ [ "fig, ax = plt.subplots()\nlosses = np.array(losses)\nplt.plot(losses.T[0], label='Discriminator', alpha=0.5)\nplt.plot(losses.T[1], label='Generator', alpha=0.5)\nplt.title(\"Training Losses\")\nplt.legend()", "_____no_output_____" ] ], [ [ "## Generator samples from training\n\nHere we can view samples of images from the generator. We'll look at the images we saved during training.", "_____no_output_____" ] ], [ [ "# helper function for viewing a list of passed in sample images\ndef view_samples(epoch, samples):\n fig, axes = plt.subplots(figsize=(16,4), nrows=2, ncols=8, sharey=True, sharex=True)\n for ax, img in zip(axes.flatten(), samples[epoch]):\n img = img.detach().cpu().numpy()\n img = np.transpose(img, (1, 2, 0))\n img = ((img +1)*255 / (2)).astype(np.uint8) # rescale to pixel range (0-255)\n ax.xaxis.set_visible(False)\n ax.yaxis.set_visible(False)\n im = ax.imshow(img.reshape((32,32,3)))", "_____no_output_____" ], [ "_ = view_samples(-1, samples)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
4aa656ff7416a761b02cafeb6233d376b24a2a7d
5,177
ipynb
Jupyter Notebook
notebooks/chapter15_symbolic/04_stats.ipynb
khanparwaz/PythonProjects
3f5c7bf7780b235ad45f8d3f7dd5b05d6b382a2d
[ "BSD-2-Clause" ]
820
2015-01-01T18:15:54.000Z
2022-03-06T16:15:07.000Z
notebooks/chapter15_symbolic/04_stats.ipynb
khanparwaz/PythonProjects
3f5c7bf7780b235ad45f8d3f7dd5b05d6b382a2d
[ "BSD-2-Clause" ]
31
2015-02-25T22:08:09.000Z
2018-09-28T08:41:38.000Z
notebooks/chapter15_symbolic/04_stats.ipynb
khanparwaz/PythonProjects
3f5c7bf7780b235ad45f8d3f7dd5b05d6b382a2d
[ "BSD-2-Clause" ]
483
2015-01-02T13:53:11.000Z
2022-03-18T21:05:16.000Z
18.423488
190
0.514777
[ [ [ "> This is one of the 100 recipes of the [IPython Cookbook](http://ipython-books.github.io/), the definitive guide to high-performance scientific computing and data science in Python.\n", "_____no_output_____" ], [ "# 15.4. Computing exact probabilities and manipulating random variables", "_____no_output_____" ] ], [ [ "from sympy import *\nfrom sympy.stats import *\ninit_printing()", "_____no_output_____" ] ], [ [ "## Rolling dice", "_____no_output_____" ], [ "Let's roll two dices X and Y.", "_____no_output_____" ] ], [ [ "X, Y = Die('X', 6), Die('Y', 6)", "_____no_output_____" ] ], [ [ "We can compute probabilities defined by equalities (with the Eq operator) or inequalities...", "_____no_output_____" ] ], [ [ "P(Eq(X, 3))", "_____no_output_____" ], [ "P(X>3)", "_____no_output_____" ] ], [ [ "Conditions can also involve multiple random variables...", "_____no_output_____" ] ], [ [ "P(X>Y)", "_____no_output_____" ] ], [ [ "Conditional probabilities...", "_____no_output_____" ] ], [ [ "P(X+Y>6, X<5)", "_____no_output_____" ] ], [ [ "## Continuous random variables", "_____no_output_____" ], [ "We can also work with arbitrary discrete or continuous random variables.", "_____no_output_____" ] ], [ [ "Z = Normal('Z', 0, 1) # Gaussian variable", "_____no_output_____" ], [ "P(Z>pi)", "_____no_output_____" ] ], [ [ "We can compute expectancies and variances...", "_____no_output_____" ] ], [ [ "E(Z**2), variance(Z**2)", "_____no_output_____" ] ], [ [ "as well as densities.", "_____no_output_____" ] ], [ [ "f = density(Z)", "_____no_output_____" ] ], [ [ "This is a lambda function, it can be evaluated on a SymPy symbol:", "_____no_output_____" ] ], [ [ "var('x')\nf(x)", "_____no_output_____" ] ], [ [ "We can plot this density.", "_____no_output_____" ] ], [ [ "%matplotlib inline\nplot(f(x), (x, -6, 6));", "_____no_output_____" ] ], [ [ "SymPy.stats works by using integrals and summations for computing probabilistic quantities. For example, P(Z>pi) is:", "_____no_output_____" ] ], [ [ "Eq(Integral(f(x), (x, pi, oo)), \n simplify(integrate(f(x), (x, pi, oo))))", "_____no_output_____" ] ], [ [ "> You'll find all the explanations, figures, references, and much more in the book (to be released later this summer).\n\n> [IPython Cookbook](http://ipython-books.github.io/), by [Cyrille Rossant](http://cyrille.rossant.net), Packt Publishing, 2014 (500 pages).", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
4aa65aa4248bdd5ff83ff0dc0ffed75f370cc402
58,282
ipynb
Jupyter Notebook
MNIST-sign-language-detector/sign-language-detector.ipynb
vaishnav-197/Pytorch-In-Real-Life
e79bb1686ae2959eaaf9ae7ed0326c198255557a
[ "MIT" ]
52
2019-09-25T05:39:26.000Z
2022-01-30T22:38:00.000Z
MNIST-sign-language-detector/sign-language-detector.ipynb
vaishnav-197/Pytorch-In-Real-Life
e79bb1686ae2959eaaf9ae7ed0326c198255557a
[ "MIT" ]
1
2020-10-05T15:40:43.000Z
2020-10-05T16:17:05.000Z
MNIST-sign-language-detector/sign-language-detector.ipynb
vaishnav-197/Pytorch-In-Real-Life
e79bb1686ae2959eaaf9ae7ed0326c198255557a
[ "MIT" ]
32
2019-09-25T06:46:45.000Z
2022-03-26T12:41:13.000Z
133.063927
40,948
0.850108
[ [ [ "import numpy as np\nimport pandas as pd\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom torch.autograd import Variable\n\nimport matplotlib.pyplot as plt\n%matplotlib inline", "_____no_output_____" ], [ "data_raw = pd.read_csv('../input/sign_mnist_train.csv', sep=\",\")\ntest_data_raw = pd.read_csv('../input/sign_mnist_test.csv', sep=\",\")", "_____no_output_____" ], [ "labels = data_raw['label']\ndata_raw.drop('label', axis=1, inplace=True)\nlabels_test = test_data_raw['label']\ntest_data_raw.drop('label', axis=1, inplace=True)", "_____no_output_____" ], [ "data = data_raw.values\nlabels = labels.values\n\ntest_data = test_data_raw.values\nlabels_test = labels_test.values", "_____no_output_____" ], [ "\npixels = data[10].reshape(28, 28)\nplt.subplot(221)\nsns.heatmap(data=pixels)\n\npixels = data[12].reshape(28, 28)\nplt.subplot(222)\nsns.heatmap(data=pixels)\n\npixels = data[20].reshape(28, 28)\nplt.subplot(223)\nsns.heatmap(data=pixels)\n\npixels = data[32].reshape(28, 28)\nplt.subplot(224)\nsns.heatmap(data=pixels)", "_____no_output_____" ], [ "reshaped = []\nfor i in data:\n reshaped.append(i.reshape(1, 28, 28))\ndata = np.array(reshaped)\n\nreshaped_test = []\nfor i in test_data:\n reshaped_test.append(i.reshape(1,28,28))\ntest_data = np.array(reshaped_test)", "_____no_output_____" ], [ "x = torch.FloatTensor(data)\ny = torch.LongTensor(labels.tolist())\n\ntest_x = torch.FloatTensor(test_data)\ntest_y = torch.LongTensor(labels_test.tolist())", "_____no_output_____" ], [ "class Network(nn.Module): \n \n def __init__(self):\n super(Network, self).__init__()\n self.conv1 = nn.Conv2d(1, 10, 3)\n self.pool1 = nn.MaxPool2d(2)\n \n self.conv2 = nn.Conv2d(10, 20, 3)\n self.pool2 = nn.MaxPool2d(2)\n \n self.conv3 = nn.Conv2d(20, 30, 3) \n self.dropout1 = nn.Dropout2d()\n \n self.fc3 = nn.Linear(30 * 3 * 3, 270) \n self.fc4 = nn.Linear(270, 26)\n \n self.softmax = nn.LogSoftmax(dim=1)\n \n \n def forward(self, x):\n x = self.conv1(x)\n x = F.relu(x)\n x = self.pool1(x)\n \n x = self.conv2(x)\n x = F.relu(x)\n x = self.pool2(x)\n \n x = self.conv3(x)\n x = F.relu(x)\n x = self.dropout1(x)\n \n x = x.view(-1, 30 * 3 * 3) \n x = F.relu(self.fc3(x))\n x = F.relu(self.fc4(x))\n \n return self.softmax(x)\n \n def test(self, predictions, labels):\n \n self.eval()\n correct = 0\n for p, l in zip(predictions, labels):\n if p == l:\n correct += 1\n \n acc = correct / len(predictions)\n print(\"Correct predictions: %5d / %5d (%5f)\" % (correct, len(predictions), acc))\n \n def evaluate(self, predictions, labels):\n \n correct = 0\n for p, l in zip(predictions, labels):\n if p == l:\n correct += 1\n \n acc = correct / len(predictions)\n return(acc)", "_____no_output_____" ], [ "!pip install torchsummary\nfrom torchsummary import summary\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")", "Collecting torchsummary\r\n Downloading https://files.pythonhosted.org/packages/7d/18/1474d06f721b86e6a9b9d7392ad68bed711a02f3b61ac43f13c719db50a6/torchsummary-1.5.1-py3-none-any.whl\r\nInstalling collected packages: torchsummary\r\nSuccessfully installed torchsummary-1.5.1\r\n\u001b[33mYou are using pip version 19.0.3, however version 19.1.1 is available.\r\nYou should consider upgrading via the 'pip install --upgrade pip' command.\u001b[0m\r\n" ], [ "model = Network().to(device)\nsummary(model, (1, 28, 28))", "----------------------------------------------------------------\n Layer (type) Output Shape Param #\n================================================================\n Conv2d-1 [-1, 10, 26, 26] 100\n MaxPool2d-2 [-1, 10, 13, 13] 0\n Conv2d-3 [-1, 20, 11, 11] 1,820\n MaxPool2d-4 [-1, 20, 5, 5] 0\n Conv2d-5 [-1, 30, 3, 3] 5,430\n Dropout2d-6 [-1, 30, 3, 3] 0\n Linear-7 [-1, 270] 73,170\n Linear-8 [-1, 26] 7,046\n LogSoftmax-9 [-1, 26] 0\n================================================================\nTotal params: 87,566\nTrainable params: 87,566\nNon-trainable params: 0\n----------------------------------------------------------------\nInput size (MB): 0.00\nForward/backward pass size (MB): 0.09\nParams size (MB): 0.33\nEstimated Total Size (MB): 0.43\n----------------------------------------------------------------\n" ], [ "net = Network()\n\noptimizer = optim.SGD(net.parameters(),0.001, momentum=0.7)\nloss_func = nn.CrossEntropyLoss()", "_____no_output_____" ], [ "loss_log = []\nacc_log = []\n\nfor e in range(50):\n for i in range(0, x.shape[0], 100):\n x_mini = x[i:i + 100] \n y_mini = y[i:i + 100] \n \n optimizer.zero_grad()\n net_out = net(Variable(x_mini))\n \n loss = loss_func(net_out, Variable(y_mini))\n loss.backward()\n optimizer.step()\n \n if i % 1000 == 0:\n #pred = net(Variable(test_data_formated))\n loss_log.append(loss.item())\n acc_log.append(net.evaluate(torch.max(net(Variable(test_x[:500])).data, 1)[1], test_y[:500]))\n \n print('Epoch: {} - Loss: {:.6f}'.format(e + 1, loss.item()))", "Epoch: 1 - Loss: 2.778113\nEpoch: 2 - Loss: 1.506864\nEpoch: 3 - Loss: 1.207511\nEpoch: 4 - Loss: 1.101670\nEpoch: 5 - Loss: 0.797975\nEpoch: 6 - Loss: 0.557896\nEpoch: 7 - Loss: 0.557907\nEpoch: 8 - Loss: 0.614582\nEpoch: 9 - Loss: 0.325634\nEpoch: 10 - Loss: 0.363065\nEpoch: 11 - Loss: 0.443552\nEpoch: 12 - Loss: 0.479769\nEpoch: 13 - Loss: 0.118218\nEpoch: 14 - Loss: 0.192110\nEpoch: 15 - Loss: 0.244929\nEpoch: 16 - Loss: 0.228905\nEpoch: 17 - Loss: 0.280728\nEpoch: 18 - Loss: 0.125452\nEpoch: 19 - Loss: 0.212401\nEpoch: 20 - Loss: 0.094986\nEpoch: 21 - Loss: 0.091961\nEpoch: 22 - Loss: 0.104891\nEpoch: 23 - Loss: 0.133923\nEpoch: 24 - Loss: 0.121697\nEpoch: 25 - Loss: 0.073409\nEpoch: 26 - Loss: 0.090660\nEpoch: 27 - Loss: 0.046819\nEpoch: 28 - Loss: 0.068359\nEpoch: 29 - Loss: 0.146648\nEpoch: 30 - Loss: 0.149475\nEpoch: 31 - Loss: 0.072113\nEpoch: 32 - Loss: 0.122548\nEpoch: 33 - Loss: 0.089070\nEpoch: 34 - Loss: 0.177833\nEpoch: 35 - Loss: 0.014688\nEpoch: 36 - Loss: 0.100387\nEpoch: 37 - Loss: 0.134296\nEpoch: 38 - Loss: 0.050925\nEpoch: 39 - Loss: 0.013028\nEpoch: 40 - Loss: 0.039003\nEpoch: 41 - Loss: 0.029252\nEpoch: 42 - Loss: 0.042314\nEpoch: 43 - Loss: 0.048494\nEpoch: 44 - Loss: 0.041367\nEpoch: 45 - Loss: 0.127981\nEpoch: 46 - Loss: 0.026515\nEpoch: 47 - Loss: 0.039231\nEpoch: 48 - Loss: 0.015036\nEpoch: 49 - Loss: 0.045324\nEpoch: 50 - Loss: 0.099188\n" ], [ "plt.figure(figsize=(10,8))\nplt.plot(loss_log[2:])\nplt.plot(acc_log)\nplt.plot(np.ones(len(acc_log)), linestyle='dashed')\nplt.show()", "_____no_output_____" ], [ "predictions = net(Variable(test_x))\nnet.test(torch.max(predictions.data, 1)[1], test_y)", "Correct predictions: 6120 / 7172 (0.853318)\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4aa6669cec2b29d17e4cbd05534e87db66f774bd
25,490
ipynb
Jupyter Notebook
g2-p9-RemuestreoBootstrap.ipynb
candelac/astrometria-g2
a5bd904c283f9d6dff536569747e4cb6d3eca1dc
[ "MIT" ]
1
2020-10-07T13:47:18.000Z
2020-10-07T13:47:18.000Z
g2-p9-RemuestreoBootstrap.ipynb
candelac/astrometria-g2
a5bd904c283f9d6dff536569747e4cb6d3eca1dc
[ "MIT" ]
null
null
null
g2-p9-RemuestreoBootstrap.ipynb
candelac/astrometria-g2
a5bd904c283f9d6dff536569747e4cb6d3eca1dc
[ "MIT" ]
null
null
null
75.637982
14,516
0.804512
[ [ [ "# Remuestreo Bootstrap", "_____no_output_____" ], [ "Entre los métodos inferenciales que permiten cuantificar el grado de confianza que se puede tener de un estadı́sitico, y saber cuán acertados son los resultados sobre los parámetros de la población, se encuentran las técnias de remuestreo.\n\nEstas técnicas tienen la ventaja de que no necesitan datos distribuidos normalmente, muestras muy grandes y fórmulas complicadas. Además permiten obtener resultados muchas veces más exactos que otros métodos.\n\n\nEl bootstrap es un mecanismo que se centra en el remuestreo de datos dentro de una muestra aleatoria, diseñado para aproximar la precisión de un estimador.\n\n\nEl método se basa en: dada una muestra aleatoria con 'n' observaciones, se construyen con ella 'B' \"muestras Bootstrap\" del mismo tamaño con reposición (es decir los valores se pueden repeitir).\nPara cada una de las B nuevas muestras, se realiza una estimación del parámetro de interés $\\theta$.\nLuego, se usan los B valores bootstrap estimados para aproximar la distribución del estimador del parámetro.\nEsta distribución se utiliza para hacer más inferencias estadísticas, como la estimación del error estándar de $\\theta$ o un intervalo de confianza para el mismo.\n", "_____no_output_____" ], [ "EL intervalo de confianza que se calcula a partir de los datos de la muestra, es un intervalo en donde se estima que estará cierto valor desconocido, como el parámtero poblacional, con un determinado nivel de confianza.Se denomina nivel de significancia a $\\alpha$ y representa la probabilidad de que el intervalo contenga el parámetro poblacional.", "_____no_output_____" ], [ "\n", "_____no_output_____" ], [ "En este ejercicio se quiere diseñar una función que por medio del método de boostrap resampling estime la varianza de una V.A. a partir de una muestra de datos. Se toma como 'muestra' a las magnitudes de estrellas pertenecientes a cúmulos globulares los cuales se encuentran en la columna número 6 (contando desde cero) del archivo 'cumulos_globulares.dat'.\n\nPrimero para estimar la varianza, se calcula la varianza muestral.\n", "_____no_output_____" ] ], [ [ "from math import *\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport random\nimport seaborn as sns", "_____no_output_____" ], [ "sns.set()", "_____no_output_____" ], [ "muestra = np.genfromtxt('cumulos_globulares.dat', usecols=6) #se carga el archivo\nmuestra = muestra[~np.isnan(muestra)] #tiene NaNs, así que usa solo los numéricos.\nn=len(muestra) #defino n como el tamaño de la muestra\n\nxm= sum(muestra)/n #Calculo la media muestral\ns2= sum((muestra-xm)**2)/(n-1) #Calculo varianza muestral\nprint('Varianza muestral:', s2)", "Varianza muestral: 2.225189740761169\n" ] ], [ [ "\n", "_____no_output_____" ], [ "A continuación, se realizan remuestreos para aplicar el método de bootstrap y calcular el intervalo de confianza.\n\nSe define la función 'boot' que realiza realiza 'B' muestras nuevas aleatorias del mismo tamaño que la original utilizando la función 'np.random.choice'. Para cada muestra se calcula la varianza muestral y se guardan en una lista.\n \nAbajo se grafica la distribución obtenida para la varianza para verla visualmente.", "_____no_output_____" ] ], [ [ "def boot(muestra, n, B=1000): #defino función con B=cantidad de muestras bootstraps\n var_mues=[]\n for i in range(B):\n muestra_nueva=np.random.choice(muestra, size=n) #genera una muestra aleatoria a partir de un array de tamaño n\n xm= sum(muestra_nueva)/n #calculo media muestral\n s2= sum((muestra_nueva-xm)**2)/(n-1) #calculo varianza muestral\n var_mues.append(s2) \n return var_mues\n\n#Grafico el histograma de las varianzas calculadas\nvar = boot(muestra, n) # varianzas muestrales de las distintas muestras\nplt.hist(var, color='gold')\nplt.title('Distribución muestral de la varianza')\nplt.xlabel('$S^2$')\nplt.ylabel('Frecuencia absoluta')\nplt.show()", "_____no_output_____" ] ], [ [ "A continuación, se quiere calcular los intervalos de confidencia del estimador de la varianza con un nivel de significancia $\\alpha$ dado. El intervalo de confianza va a estar definido entre los valores $(q_1, q_2)$, tal que el área bajo la curva de la distribución encerrada entre ellos es igual a $\\alpha$.\n\nComo en el histograma formado para la varianza se ve que la distribución que se forma es simétrica, se pide que el intervalo de confianza sea simétrico. Por lo tanto, las colas de la distribución (es decir $S^2<q_1$ y $S^2>q_2$), van a tener un área bajo la curva de valor $\\frac{1-\\alpha}{2}$ cada una.\nLuego, se buscan los valores de $q_1$ y $q_2$ que cumplan con lo siguiente:\n\n$$\\frac{N(S^2<q_1)}{B}=\\frac{1-\\alpha}{2}$$\n\n$$\\frac{N(S^2>q_2)}{B}=\\frac{1-\\alpha}{2}$$\n\ndonde N() indica el número de valores de $S^2$ que cumplen esa codición.", "_____no_output_____" ], [ "Programa para calcular q1:", "_____no_output_____" ] ], [ [ "def IC_q1(var, a): #a es alpha \n var.sort() #ordeno los valores de menor a mayor\n suma=0\n y=(1-a)/2 #condición que quiero que se cumpla\n for i in range(len(var)):\n x=var[i] #defino como x el elemento i de la varianza\n suma=suma+x #los sumo\n t=suma/(len(var)) #divido por la cantidad de muestras\n if t<= y:\n None\n else:\n q1=x\n break\n return q1", "_____no_output_____" ] ], [ [ "Programa para calcular q2:", "_____no_output_____" ] ], [ [ "def IC_q2(var, a): \n var.sort(reverse=True) #ordeno los valores de mayor a menor\n suma=0\n y=(1-a)/2 \n for i in range(len(var)):\n x=var[i] \n suma=suma+x \n t=suma/(len(var)) \n if t<= y:\n None\n else:\n q2=x\n break\n return q2", "_____no_output_____" ] ], [ [ "\n", "_____no_output_____" ], [ "Como ejemplo, se toma el valor de $\\alpha$=0.95 y 0.9 para computar el valor final obtenido para la varianza con su intervalo de confianza.", "_____no_output_____" ] ], [ [ "q1=IC_q1(var, a=0.95)\nprint('Valor de q1=', q1)\n\nq2=IC_q2(var, a=0.95)\nprint('Valor de q2=', q2)\n\nprint('El valor que se obtiene para la varianza es ', s2, 'con un intervalo de confianza de (', q1, ',', q2,').')", "Valor de q1= 1.5683505561684126\nValor de q2= 2.981743348961207\nEl valor que se obtiene para la varianza es 2.225189740761169 con un intervalo de confianza de ( 1.5683505561684126 , 2.981743348961207 ).\n" ], [ "q1=IC_q1(var, a=0.9)\nprint('Valor de q1=', q1)\n\nq2=IC_q2(var, a=0.9)\nprint('Valor de q2=', q2)\n\nprint('El valor que se obtiene para la varianza es ', s2, 'con un intervalo de confianza de (', q1, ',', q2,').')", "Valor de q1= 1.6623962998712989\nValor de q2= 2.938861900165473\nEl valor que se obtiene para la varianza es 2.225189740761169 con un intervalo de confianza de ( 1.6623962998712989 , 2.938861900165473 ).\n" ] ], [ [ "\n", "_____no_output_____" ], [ "\n", "_____no_output_____" ], [ "## Conclusiones\n\nPor medio del método de remuestreo bootstrap se puede conocer la varianza de una variable aleatoria y una estimación de su incerteza de la cual no se tiene conocimiento sobre su distribución. Además se puede calcular un intervalo de confianza para un determinado valor de $\\alpha$ mediante el calculo de los límites inferiores y superiores del intervalo.\n\nSe puede ver que la distribución de la varianza tiene forma de campana centrada en el valor estimado de la varianza muestral, por lo que el intervalo de confianza es simétrico.\n\nTambién se ve, con los últimos ejemplos que si el valor de $\\alpha$ decrece, el IC también.\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ] ]
4aa668fc2c1865afd8ebeb82882552069c9dbd10
61,215
ipynb
Jupyter Notebook
notebooks/Clustering - K means.ipynb
ELC/ML-Tutorial
05d4d4e424976b245fa6bf05b60dfc90109e3782
[ "MIT" ]
null
null
null
notebooks/Clustering - K means.ipynb
ELC/ML-Tutorial
05d4d4e424976b245fa6bf05b60dfc90109e3782
[ "MIT" ]
null
null
null
notebooks/Clustering - K means.ipynb
ELC/ML-Tutorial
05d4d4e424976b245fa6bf05b60dfc90109e3782
[ "MIT" ]
null
null
null
57.156863
9,572
0.704305
[ [ [ "# Table of Contents\n <p><div class=\"lev1 toc-item\"><a href=\"#Initialization\" data-toc-modified-id=\"Initialization-1\"><span class=\"toc-item-num\">1&nbsp;&nbsp;</span>Initialization</a></div><div class=\"lev1 toc-item\"><a href=\"#Load-Data\" data-toc-modified-id=\"Load-Data-2\"><span class=\"toc-item-num\">2&nbsp;&nbsp;</span>Load Data</a></div><div class=\"lev1 toc-item\"><a href=\"#Raw-KMeans\" data-toc-modified-id=\"Raw-KMeans-3\"><span class=\"toc-item-num\">3&nbsp;&nbsp;</span>Raw KMeans</a></div><div class=\"lev2 toc-item\"><a href=\"#Averaging\" data-toc-modified-id=\"Averaging-31\"><span class=\"toc-item-num\">3.1&nbsp;&nbsp;</span>Averaging</a></div><div class=\"lev1 toc-item\"><a href=\"#Testing-All-Columns-Combinations\" data-toc-modified-id=\"Testing-All-Columns-Combinations-4\"><span class=\"toc-item-num\">4&nbsp;&nbsp;</span>Testing All Columns Combinations</a></div><div class=\"lev1 toc-item\"><a href=\"#Manual\" data-toc-modified-id=\"Manual-5\"><span class=\"toc-item-num\">5&nbsp;&nbsp;</span>Manual</a></div><div class=\"lev2 toc-item\"><a href=\"#Creating-Dataset\" data-toc-modified-id=\"Creating-Dataset-51\"><span class=\"toc-item-num\">5.1&nbsp;&nbsp;</span>Creating Dataset</a></div><div class=\"lev2 toc-item\"><a href=\"#Final-Result\" data-toc-modified-id=\"Final-Result-52\"><span class=\"toc-item-num\">5.2&nbsp;&nbsp;</span>Final Result</a></div><div class=\"lev2 toc-item\"><a href=\"#Evolution-of-Centroids\" data-toc-modified-id=\"Evolution-of-Centroids-53\"><span class=\"toc-item-num\">5.3&nbsp;&nbsp;</span>Evolution of Centroids</a></div><div class=\"lev1 toc-item\"><a href=\"#Compare-Manual-with-Sci-Kit-Learn\" data-toc-modified-id=\"Compare-Manual-with-Sci-Kit-Learn-6\"><span class=\"toc-item-num\">6&nbsp;&nbsp;</span>Compare Manual with Sci Kit Learn</a></div><div class=\"lev2 toc-item\"><a href=\"#Comparing-Speed\" data-toc-modified-id=\"Comparing-Speed-61\"><span class=\"toc-item-num\">6.1&nbsp;&nbsp;</span>Comparing Speed</a></div><div class=\"lev3 toc-item\"><a href=\"#Sci-Kit-Learn\" data-toc-modified-id=\"Sci-Kit-Learn-611\"><span class=\"toc-item-num\">6.1.1&nbsp;&nbsp;</span>Sci-Kit Learn</a></div><div class=\"lev3 toc-item\"><a href=\"#Manual\" data-toc-modified-id=\"Manual-612\"><span class=\"toc-item-num\">6.1.2&nbsp;&nbsp;</span>Manual</a></div><div class=\"lev2 toc-item\"><a href=\"#Comparing-Accuracy\" data-toc-modified-id=\"Comparing-Accuracy-62\"><span class=\"toc-item-num\">6.2&nbsp;&nbsp;</span>Comparing Accuracy</a></div>", "_____no_output_____" ], [ "# Initialization", "_____no_output_____" ] ], [ [ "import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom sklearn.cluster import KMeans\nfrom sklearn import model_selection, preprocessing\nfrom itertools import combinations, chain\nfrom matplotlib import animation, rc\nimport matplotlib as mpl\nfrom IPython.display import HTML\nfrom collections import defaultdict\nfrom random import shuffle\n%matplotlib inline\nmpl.rcParams['figure.figsize'] = (9,9)\nrc('animation', html='html5')", "_____no_output_____" ] ], [ [ "# Load Data", "_____no_output_____" ] ], [ [ "df = pd.read_excel('../data/titanic.xls')\ndf = df.fillna(0)", "_____no_output_____" ], [ "df.tail()", "_____no_output_____" ] ], [ [ "# Raw KMeans ", "_____no_output_____" ] ], [ [ "dropped_columns = ['name', 'survived', 'body', 'ticket', 'home.dest', 'cabin']\n\ntext_columns = [col for col in df.columns if df[col].dtype not in [np.int64, np.float64]]\ndummy_columns = [col for col in text_columns if col not in dropped_columns]", "_____no_output_____" ], [ "X_df = df.drop(dropped_columns, 1)\nX_df = pd.get_dummies(X_df, columns=dummy_columns, drop_first=True)\n\nX = np.array(X_df).astype(float)\nX = preprocessing.scale(X)\n\ny = np.array(df['survived']).reshape(-1, 1)", "_____no_output_____" ], [ "def KMeans_process(X, y, call=KMeans):\n clf = call(n_clusters=2)\n clf.fit(X)\n correct = 0\n for x_i, y_i in zip(X, y):\n predict_me = np.array(x_i).reshape(1, -1)\n prediction = clf.predict(predict_me)\n if prediction == y_i:\n correct += 1\n acc = correct / len(X)\n return acc if acc > 0.5 else 1- acc", "_____no_output_____" ], [ "acc = KMeans_process(X, y)\nprint(f'One Shot Accuracy: {round(acc * 100, 2)}%')", "One Shot Accuracy: 61.57%\n" ] ], [ [ "## Averaging", "_____no_output_____" ] ], [ [ "total = 0\nn = 10\n\nfor i in range(n):\n total += process(X, y)\n \nprint(f'Average Accuracy: {round(total / n * 100, 2)}%')", "Average Accuracy: 67.02%\n" ] ], [ [ "# Testing All Columns Combinations ", "_____no_output_____" ] ], [ [ "a = (combinations(df.columns, i) for i in range(len(df.columns)))\nall_posibilities = chain.from_iterable(a)", "_____no_output_____" ], [ "acc_max = 0\nbest_columns = []\n\nfor dropped_columns in all_posibilities:\n text_columns = [col for col in df.columns if df[col].dtype not in [np.int64, np.float64]]\n dummy_columns = [col for col in text_columns if col not in dropped_columns]\n \n X_df = df.drop(list(dropped_columns), 1)\n X_df = pd.get_dummies(X_df, columns=dummy_columns, drop_first=True)\n \n X = np.array(X_df).astype(float)\n X = preprocessing.scale(X)\n \n acc = KMeans_process(X, y)\n \n if acc > acc_max:\n acc_max = acc\n best_columns = dropped_columns\n print(acc_max, best_columns, len(X_df.columns))\n\nacc_max, best_columns", "0.6417112299465241 () 2838\n0.6707410236822001 ('ticket',) 1900\n0.7097020626432391 ('fare',) 2837\n0.9663865546218487 ('pclass', 'name') 1531\n0.9992360580595875 ('pclass', 'fare', 'boat') 2808\n" ] ], [ [ "# Manual", "_____no_output_____" ], [ "## Creating Dataset", "_____no_output_____" ] ], [ [ "X = np.array([[1, 2],\n [1.5, 1.8],\n [5, 8 ],\n [8, 8],\n [1, 0.6],\n [9,11]])\n\nplt.scatter(X[:,0], X[:,1], s=150)\nplt.show()\ncolors = 10*[\"g\",\"r\",\"c\",\"b\",\"k\"]", "_____no_output_____" ], [ "class K_Means:\n \n def __init__(self, n_clusters=2, tol=0.001, max_iter=300):\n self.k = n_clusters\n self.tol = tol\n self.max_iter = max_iter\n self.steps = []\n \n def fit(self, data):\n self.centroids = {key:data[i] for key, i in enumerate(np.random.randint(0, len(data), size=self.k))}\n \n for i in range(self.max_iter):\n self.classification = defaultdict(list)\n \n for featureset in data:\n distances = {np.linalg.norm(featureset - coord):centroid\n for centroid, coord in self.centroids.items()}\n classification = distances[min(distances)]\n self.classification[classification].append(featureset)\n \n prev_centroids = dict(self.centroids)\n \n for class_, points in self.classification.items():\n self.centroids[class_] = np.average(points, axis=0)\n \n optimized = True\n \n for old, new in zip(prev_centroids.values(), self.centroids.values()):\n if np.sum(abs(new - old) / old * 100) > self.tol:\n optimized = False\n break\n \n self.steps.append((prev_centroids, self.classification))\n \n if optimized:\n break\n \n def predict(self, data):\n distances = {np.linalg.norm(data - centroid):class_ for class_, centroid in self.centroids.items()}\n return distances[min(distances)]\n \n def visualize(self):\n if len(list(self.classification.values())[0][0]) != 2:\n print('Your data have to be 2 Dimensional to be plotted')\n return\n \n fig, ax = plt.subplots()\n \n for centroid in self.centroids.values():\n x, y = centroid\n ax.scatter(x, y, marker='x', color='k', s=200)\n\n for class_, points in self.classification.items():\n color = colors[class_]\n points = np.array(points)\n x, y = points.T\n ax.scatter(x, y, marker='o', color=color, s=150)", "_____no_output_____" ] ], [ [ "## Final Result", "_____no_output_____" ] ], [ [ "clf = K_Means()\nclf.fit(X)\nclf.visualize()\nclf.predict([8,9])", "_____no_output_____" ] ], [ [ "## Evolution of Centroids", "_____no_output_____" ] ], [ [ "def animate(i, steps):\n ax.cla()\n \n centroids, classification = steps[i]\n \n ax.set_xlim(x_min - x_margin, x_max + x_margin)\n ax.set_ylim(y_min - y_margin, y_max + y_margin)\n \n for centroid in centroids.values():\n x, y = centroid\n ax.scatter(x, y, marker='x', color='k', s=200)\n\n for class_, points in classification.items():\n color = colors[class_]\n points = np.array(points)\n x, y = points.T\n ax.scatter(x, y, marker='o', color=color, s=150)\n return []\n \n\nfig, ax = plt.subplots()\n\nclf = K_Means()\nclf.fit(X)\nxs = X[:, 0]\nys = X[:, 1]\nx_max, x_min = max(xs), min(xs)\nx_margin = 0.1 * x_max\ny_max, y_min = max(ys), min(ys)\ny_margin = 0.1 * y_max\n\nsteps = clf.steps\n\nanim = animation.FuncAnimation(fig, animate, frames=len(steps), interval=750, blit=True, fargs=(steps,))\n\nplt.close()\n\nHTML(anim.to_html5_video())", "_____no_output_____" ] ], [ [ "# Compare Manual with Sci Kit Learn", "_____no_output_____" ] ], [ [ "df = pd.read_excel('../data/titanic.xls')\ndf = df.fillna(0)", "_____no_output_____" ], [ "dropped_columns = ['name', 'survived', 'body', 'ticket', 'home.dest', 'cabin']\n\ntext_columns = [col for col in df.columns if df[col].dtype not in [np.int64, np.float64]]\ndummy_columns = [col for col in text_columns if col not in dropped_columns]", "_____no_output_____" ], [ "X_df = df.drop(dropped_columns, 1)\nX_df = pd.get_dummies(X_df, columns=dummy_columns, drop_first=True)\n\nX = np.array(X_df).astype(float)\nX = preprocessing.scale(X)\n\ny = np.array(df['survived']).reshape(-1, 1)", "_____no_output_____" ] ], [ [ "## Comparing Speed", "_____no_output_____" ], [ "### Sci-Kit Learn", "_____no_output_____" ] ], [ [ "%timeit KMeans_process(X, y)", "446 ms ± 34.2 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)\n" ] ], [ [ "### Manual ", "_____no_output_____" ] ], [ [ "%timeit KMeans_process(X, y, call=K_Means )", "95.3 ms ± 9.31 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)\n" ] ], [ [ "## Comparing Accuracy", "_____no_output_____" ] ], [ [ "KMeans_process(X, y)", "_____no_output_____" ], [ "KMeans_process(X, y, call=K_Means)", "_____no_output_____" ], [ "acc_sklearn = KMeans_process(X, y)\nacc_manual = KMeans_process(X, y, call=K_Means)\nprint(f'One Shot Accuracy Manual KMeans: {round(acc_manual * 100, 2)}%')\nprint(f'One Shot Accuracy SKlearn KMeans: {round(acc_sklearn * 100, 2)}%')\n\nbetter = 0\nn = 20\nfor i in range(n):\n acc_sklearn = KMeans_process(X, y)\n acc_manual = KMeans_process(X, y, call=K_Means)\n better += (acc_manual - acc_sklearn) / acc_sklearn\nprint(f'The manual KMeans was {round(better*100/n, 2)}% better than SKLearn')", "One Shot Accuracy Manual KMeans: 62.57%\nOne Shot Accuracy SKlearn KMeans: 72.73%\nThe manual KMeans was -6.1% better than SKLearn\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
4aa66aaa0e3377fe215fc09b6d389ca159122776
2,315
ipynb
Jupyter Notebook
NotebookExamples/fsharp/Docs/Output in notebooks.ipynb
eerhardt/try
bc3ca4cda1bf1d969a2b30b6e258050ca74845bf
[ "MIT" ]
1
2020-11-09T03:08:32.000Z
2020-11-09T03:08:32.000Z
NotebookExamples/fsharp/Docs/Output in notebooks.ipynb
hassoon1986/try
7f9a36899330659108e9779f312b77f6d56da416
[ "MIT" ]
null
null
null
NotebookExamples/fsharp/Docs/Output in notebooks.ipynb
hassoon1986/try
7f9a36899330659108e9779f312b77f6d56da416
[ "MIT" ]
1
2021-09-06T03:58:12.000Z
2021-09-06T03:58:12.000Z
19.291667
198
0.504536
[ [ [ "# How do present output in a Notebook", "_____no_output_____" ], [ "Expressions will evaluate and print the result.", "_____no_output_____" ] ], [ [ "1 + 3", "_____no_output_____" ] ], [ [ "Note the ```Out[]``` label, that will appear only in the case a cell evaluates producing are turn value. Even with statements if the last line produces a value it will be labelled as ``Out[]``", "_____no_output_____" ] ], [ [ "let r = new System.Random();\nr.Next(0,10)", "_____no_output_____" ] ], [ [ "To display output use the ```display``` api, that uses advanced formatting to print to screen", "_____no_output_____" ] ], [ [ "display(\"hello world\");", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]