repo_name
stringlengths
6
130
hexsha
list
file_path
list
code
list
apis
list
vencia/gym-gomoku
[ "521bb2872d6a201ac5d1ee0881bcd4e996897d14" ]
[ "gym_gomoku/envs/gomoku.py" ]
[ "import numpy as np\nimport gym\nfrom gym import spaces\nfrom gym import error\nfrom gym.utils import seeding\nfrom six import StringIO\nimport sys, os\nimport six\n\nfrom gym_gomoku.envs.util import gomoku_util\nfrom gym_gomoku.envs.util import make_random_policy\nfrom gym_gomoku.envs.util import make_beginner_policy\nfrom gym_gomoku.envs.util import make_medium_policy\nfrom gym_gomoku.envs.util import make_expert_policy\n\n# Rules from Wikipedia: Gomoku is an abstract strategy board game, Gobang or Five in a Row, it is traditionally played with Go pieces (black and white stones) on a go board with 19x19 or (15x15) \n# The winner is the first player to get an unbroken row of five stones horizontally, vertically, or diagonally. (so-calle five-in-a row)\n# Black plays first if white did not win in the previous game, and players alternate in placing a stone of their color on an empty intersection.\n\nclass GomokuState(object):\n '''\n Similar to Go game, Gomoku state consists of a current player and a board.\n Actions are exposed as integers in [0, num_actions), which is to place stone on empty intersection\n '''\n def __init__(self, board, color):\n '''\n Args:\n board: current board\n color: color of current player\n '''\n assert color in ['black', 'white'], 'Invalid player color'\n self.board, self.color = board, color\n \n def act(self, action):\n '''\n Executes an action for the current player\n \n Returns:\n a new GomokuState with the new board and the player switched\n '''\n return GomokuState(self.board.play(action, self.color), gomoku_util.other_color(self.color))\n \n def __repr__(self):\n '''stream of board shape output'''\n # To Do: Output shape * * * o o\n return 'To play: {}\\n{}'.format(six.u(self.color), self.board.__repr__())\n\n# Sampling without replacement Wrapper \n# sample() method will only sample from valid spaces\nclass DiscreteWrapper(spaces.Discrete):\n def __init__(self, n):\n self.n = n\n self.valid_spaces = list(range(n))\n \n def sample(self):\n '''Only sample from the remaining valid spaces\n '''\n if len(self.valid_spaces) == 0:\n print (\"Space is empty\")\n return None\n np_random, _ = seeding.np_random()\n randint = np_random.randint(len(self.valid_spaces))\n return self.valid_spaces[randint]\n \n def remove(self, s):\n '''Remove space s from the valid spaces\n '''\n if s is None:\n return\n if s in self.valid_spaces:\n self.valid_spaces.remove(s)\n else:\n print (\"space %d is not in valid spaces\" % s)\n\n\n### Environment\nclass GomokuEnv(gym.Env):\n '''\n GomokuEnv environment. Play against a fixed opponent.\n '''\n metadata = {\"render.modes\": [\"human\", \"ansi\"]}\n \n def __init__(self, player_color, opponent, board_size):\n \"\"\"\n Args:\n player_color: Stone color for the agent. Either 'black' or 'white'\n opponent: Name of the opponent policy, e.g. random, beginner, medium, expert\n board_size: board_size of the board to use\n \"\"\"\n self.board_size = board_size\n self.player_color = player_color\n \n self._seed()\n \n # opponent\n self.opponent_policy = None\n self.opponent = opponent\n \n # Observation space on board\n shape = (self.board_size, self.board_size) # board_size * board_size\n self.observation_space = spaces.Box(np.zeros(shape), np.ones(shape))\n \n # One action for each board position\n self.action_space = DiscreteWrapper(self.board_size**2)\n \n # Keep track of the moves\n self.moves = []\n \n # Empty State\n self.state = None\n \n # reset the board during initialization\n self._reset()\n \n def _seed(self, seed=None):\n self.np_random, seed1 = seeding.np_random(seed)\n # Derive a random seed.\n seed2 = seeding.hash_seed(seed1 + 1) % 2**32\n return [seed1, seed2]\n \n def _reset(self):\n self.state = GomokuState(Board(self.board_size), gomoku_util.BLACK) # Black Plays First\n self._reset_opponent(self.state.board) # (re-initialize) the opponent,\n self.moves = []\n \n # Let the opponent play if it's not the agent's turn, there is no resign in Gomoku\n if self.state.color != self.player_color:\n self.state, _ = self._exec_opponent_play(self.state, None, None)\n opponent_action_coord = self.state.board.last_coord\n self.moves.append(opponent_action_coord)\n \n # We should be back to the agent color\n assert self.state.color == self.player_color\n \n # reset action_space\n self.action_space = DiscreteWrapper(self.board_size**2)\n \n self.done = self.state.board.is_terminal()\n return self.state.board.encode()\n \n def _close(self):\n self.opponent_policy = None\n self.state = None\n \n def _render(self, mode=\"human\", close=False):\n if close:\n return\n outfile = StringIO() if mode == 'ansi' else sys.stdout\n outfile.write(repr(self.state) + '\\n')\n return outfile\n \n def _step(self, action):\n '''\n Args: \n action: int\n Return: \n observation: board encoding, \n reward: reward of the game, \n done: boolean, \n info: state dict\n Raise:\n Illegal Move action, basically the position on board is not empty\n '''\n assert self.state.color == self.player_color # it's the player's turn\n \n # If already terminal, then don't do anything\n if self.done:\n return self.state.board.encode(), 0., True, {'state': self.state}\n \n # Player play\n prev_state = self.state\n self.state = self.state.act(action)\n self.moves.append(self.state.board.last_coord)\n self.action_space.remove(action) # remove current action from action_space\n \n # Opponent play\n if not self.state.board.is_terminal():\n self.state, opponent_action = self._exec_opponent_play(self.state, prev_state, action)\n self.moves.append(self.state.board.last_coord)\n self.action_space.remove(opponent_action) # remove opponent action from action_space\n # After opponent play, we should be back to the original color\n assert self.state.color == self.player_color\n \n # Reward: if nonterminal, there is no 5 in a row, then the reward is 0\n if not self.state.board.is_terminal():\n self.done = False\n return self.state.board.encode(), 0., False, {'state': self.state}\n \n # We're in a terminal state. Reward is 1 if won, -1 if lost\n assert self.state.board.is_terminal(), 'The game is terminal'\n self.done = True\n \n # Check Fianl wins\n exist, win_color = gomoku_util.check_five_in_row(self.state.board.board_state) # 'empty', 'black', 'white'\n reward = 0.\n if win_color == \"empty\": # draw\n reward = 0.\n else:\n player_wins = (self.player_color == win_color) # check if player_color is the win_color\n reward = 1. if player_wins else -1.\n return self.state.board.encode(), reward, True, {'state': self.state}\n \n def _exec_opponent_play(self, curr_state, prev_state, prev_action):\n '''There is no resign in gomoku'''\n assert curr_state.color != self.player_color\n opponent_action = self.opponent_policy(curr_state, prev_state, prev_action)\n return curr_state.act(opponent_action), opponent_action\n \n @property\n def _state(self):\n return self.state\n \n @property\n def _moves(self):\n return self.moves\n \n def _reset_opponent(self, board):\n if self.opponent == 'random':\n self.opponent_policy = make_random_policy(self.np_random)\n elif self.opponent == 'beginner':\n self.opponent_policy = make_beginner_policy(self.np_random)\n elif self.opponent == 'medium':\n self.opponent_policy = make_medium_policy(self.np_random)\n elif self.opponent == 'expert':\n self.opponent_policy = make_expert_policy(self.np_random)\n else:\n raise error.Error('Unrecognized opponent policy {}'.format(self.opponent))\n\nclass Board(object):\n '''\n Basic Implementation of a Go Board, natural action are int [0,board_size**2)\n '''\n \n def __init__(self, board_size):\n self.size = board_size\n self.board_state = [[gomoku_util.color_dict['empty']] * board_size for i in range(board_size)] # initialize board states to empty\n self.move = 0 # how many move has been made\n self.last_coord = (-1,-1) # last action coord\n self.last_action = None # last action made\n \n def coord_to_action(self, i, j):\n ''' convert coordinate i, j to action a in [0, board_size**2)\n '''\n a = i * self.size + j # action index\n return a\n \n def action_to_coord(self, a):\n coord = (a // self.size, a % self.size)\n return coord\n \n def get_legal_move(self):\n ''' Get all the next legal move, namely empty space that you can place your 'color' stone\n Return: Coordinate of all the empty space, [(x1, y1), (x2, y2), ...]\n '''\n legal_move = []\n for i in range(self.size):\n for j in range(self.size):\n if (self.board_state[i][j] == 0):\n legal_move.append((i, j))\n return legal_move\n \n def get_legal_action(self):\n ''' Get all the next legal action, namely empty space that you can place your 'color' stone\n Return: Coordinate of all the empty space, [(x1, y1), (x2, y2), ...]\n '''\n legal_action = []\n for i in range(self.size):\n for j in range(self.size):\n if (self.board_state[i][j] == 0):\n legal_action.append(self.coord_to_action(i, j))\n return legal_action\n \n def copy(self, board_state):\n '''update board_state of current board values from input 2D list\n '''\n input_size_x = len(board_state)\n input_size_y = len(board_state[0])\n assert input_size_x == input_size_y, 'input board_state two axises size mismatch'\n assert len(self.board_state) == input_size_x, 'input board_state size mismatch'\n for i in range(self.size):\n for j in range(self.size):\n self.board_state[i][j] = board_state[i][j]\n \n def play(self, action, color):\n '''\n Args: input action, current player color\n Return: new copy of board object\n '''\n b = Board(self.size)\n b.copy(self.board_state) # create a board copy of current board_state\n b.move = self.move\n \n coord = self.action_to_coord(action)\n # check if it's legal move\n if (b.board_state[coord[0]][coord[1]] != 0): # the action coordinate is not empty\n raise error.Error(\"Action is illegal, position [%d, %d] on board is not empty\" % ((coord[0]+1),(coord[1]+1)))\n \n b.board_state[coord[0]][coord[1]] = gomoku_util.color_dict[color]\n b.move += 1 # move counter add 1\n b.last_coord = coord # save last coordinate\n b.last_action = action\n return b\n \n def is_terminal(self):\n exist, color = gomoku_util.check_five_in_row(self.board_state)\n is_full = gomoku_util.check_board_full(self.board_state)\n if (is_full): # if the board if full of stones and no extra empty spaces, game is finished\n return True\n else:\n return exist\n \n def __repr__(self):\n ''' representation of the board class\n print out board_state\n '''\n out = \"\"\n size = len(self.board_state)\n \n letters = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')[:size]\n numbers = list(range(1, 100))[:size]\n \n label_move = \"Move: \" + str(self.move) + \"\\n\"\n label_letters = \" \" + \" \".join(letters) + \"\\n\"\n label_boundry = \" \" + \"+-\" + \"\".join([\"-\"] * (2 * size)) + \"+\" + \"\\n\"\n \n # construct the board output\n out += (label_move + label_letters + label_boundry)\n \n for i in range(size-1,-1,-1):\n line = \"\"\n line += (str(\"%2d\" % (i+1)) + \" |\" + \" \")\n for j in range(size):\n # check if it's the last move\n line += gomoku_util.color_shape[self.board_state[i][j]]\n if (i,j) == self.last_coord:\n line += \")\"\n else:\n line += \" \"\n line += (\"|\" + \"\\n\")\n out += line\n out += (label_boundry + label_letters)\n return out\n \n def encode(self):\n '''Return: np array\n np.array(board_size, board_size): state observation of the board\n '''\n img = np.array(self.board_state) # shape [board_size, board_size]\n return img\n" ]
[ [ "numpy.array", "numpy.ones", "numpy.zeros" ] ]
evvanErb/facialRecognitionCommandLineProgram
[ "2a4404626795bf835883bda89bc5adc106f794ac" ]
[ "main.py" ]
[ "#Python 3.x\n\nimport face_recognition\nimport cv2\nimport numpy as np\nimport os\nimport sys\n\nfrom settings import *\nfrom facialDetection import haarDetectFaceLocations, hogDetectFaceLocations\nfrom utilityFunctions import setupDatabase, detectAndRecognizeFacesInImage\n\ndef addPhoto(fileName, personName):\n \"\"\"\n Load a supplied photo and add detected facial encoding to the database\n \"\"\"\n\n #Check if image is a jpg\n if (fileName[-4:] != \".jpg\"):\n print(\"\\n[!] File extenstion must be .jpg!\\n\")\n return\n\n #Check image exists\n if (not os.path.isfile(fileName)):\n print(\"\\n[!] File does not exist!\\n\")\n return\n\n #Check no illegal characters in file name\n for c in ILLEGAL_FILE_NAMES:\n if (c in personName):\n print(\"\\n[!] Provided name contains an illegal argument\\n\")\n return\n\n #Load image\n image = face_recognition.load_image_file(fileName)\n\n #Use the name in the filename as the identity key\n identity = os.path.splitext(os.path.basename(fileName))[0]\n\n #Get the face location\n locationsHog = hogDetectFaceLocations(image)\n\n locationsHaar = haarDetectFaceLocations(image)\n\n #Get the face encoding\n encodingsHaar = face_recognition.face_encodings(image, locationsHaar)\n encodingsHog = face_recognition.face_encodings(image, locationsHog)\n\n #check if exactly one face is in the photo\n if ((len(encodingsHaar) == 0) or (len(encodingsHog) == 0)):\n print(\"\\n[!] No face detected in the provided photo\\n\")\n return\n\n elif ((len(encodingsHaar) > 1) or (len(encodingsHog) > 1)):\n print(\"\\n[!] More than one face detected in the provided photo\\n\")\n return\n\n #Set path to respective dataset\n directoryToAddTo = DATABASE_PATH + personName\n\n #Look for directory\n exists = False\n for subdir, dirs, files in os.walk(DATABASE_PATH):\n if (subdir == directoryToAddTo):\n exists = True\n\n #If directory doesnt exist, make it\n if (not exists):\n os.mkdir(directoryToAddTo)\n\n #Save data to file\n np.savetxt((directoryToAddTo + \"/\" + identity + \"Haar.txt\"),\n encodingsHaar[0])\n np.savetxt((directoryToAddTo + \"/\" + identity + \"Hog.txt\"),\n encodingsHog[0])\n\n print(\"\\n[*] Face successfully added!\\n\")\n\ndef runScanPhotoFaceRecognition(fileName, useHOG=False):\n \"\"\"\n Manages facial recogntion on photos\n \"\"\"\n #Check if image is a jpg\n if (fileName[-4:] != \".jpg\"):\n print(\"\\n[!] File extenstion must be .jpg!\\n\")\n return\n\n elif (not os.path.isfile(fileName)):\n print(\"\\n[!] File does not exist!\\n\")\n return\n\n #Setup database\n database = setupDatabase()\n\n #Load image\n image = face_recognition.load_image_file(fileName)\n\n #Run facial detection and recognition on image\n detectAndRecognizeFacesInImage(image,\n database, useHOG, True)\n\n #Convert image from BGR to RGB and display the resulting image\n image = image[:, :, ::-1]\n cv2.imshow(fileName, image)\n\n print(\"\\n[*] Press Q to quit\\n\")\n\n #Hit 'q' on the keyboard to quit!\n cv2.waitKey(0)\n\ndef runFaceRecognition(useHOG=False):\n \"\"\"\n Manages live facial recognition\n \"\"\"\n #Open a handler for the camera\n video_capture = cv2.VideoCapture(CAMERA_DEVICE_ID)\n\n #Setup database\n database = setupDatabase()\n\n skipFrame = 0\n\n while video_capture.isOpened():\n #Skip every 2 frames to increase frame rate\n if (skipFrame < 2):\n skipFrame += 1\n continue\n else:\n skipFrame = 0\n\n #Read frame from camera and check that it went ok\n ok, frame = video_capture.read()\n if not ok:\n print(\"\\n[!] Error reading frame from camera. \", end=\"\")\n print(\"Video capture stopped.\\n\")\n break\n\n #Run facial detection and recognition on image\n detectAndRecognizeFacesInImage(frame,\n database, useHOG)\n\n #Display the resulting image\n cv2.imshow('Video', frame)\n\n #Hit 'q' on the keyboard to quit!\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n\n #Release handle to the webcam\n video_capture.release()\n cv2.destroyAllWindows()\n\ndef main():\n #Check if there is an argument\n if (len(sys.argv) < 2):\n print(\"\\n[!] No arguments!\\n\")\n return\n\n #Get user argument\n argument = sys.argv[1]\n\n if (argument == \"addface\"):\n #If user didnt supply a photo path\n if (len(sys.argv) < 4):\n print(\"\\n[!] Not enough arguments!\\n\")\n return\n\n #Otherwise add photo to database\n photoPath = sys.argv[2]\n name = sys.argv[3]\n addPhoto(photoPath, name)\n\n elif (argument == \"run\"):\n print(\"\\n[*] Press Q to quit\\n\")\n runFaceRecognition()\n\n elif (argument == \"runhog\"):\n print(\"\\n[*] Press Q to quit\\n\")\n runFaceRecognition(True)\n\n elif (argument == \"scanphoto\"):\n #If user didnt supply a photo path\n if (len(sys.argv) < 3):\n print(\"\\n[!] No photo path!\\n\")\n return\n\n #Otherwise add photo to database\n photoPath = sys.argv[2]\n runScanPhotoFaceRecognition(photoPath)\n\n elif (argument == \"scanphotohog\"):\n #If user didnt supply a photo path\n if (len(sys.argv) < 3):\n print(\"\\n[!] No photo path!\\n\")\n return\n\n #Otherwise add photo to database\n photoPath = sys.argv[2]\n runScanPhotoFaceRecognition(photoPath, True)\n\n elif (argument == \"help\"):\n print(\"\\nArguments for Live Facial Recognition Software include:\\n\")\n print(\"1. python3 main.py addface image_path person_name:\", end=\"\")\n print(\" adds a face encoding to the database\")\n print(\"2. python3 main.py run : runs webcam face recognition\")\n print(\"3. python3 main.py help : prints this menu\")\n print(\"4. python3 main.py scanphoto image_path : \", end=\"\")\n print(\"scans a photo for face recognition\")\n print(\"5. python3 main.py runhog : runs webcam face \", end=\"\")\n print(\"recognition using HOG face detection\")\n print(\"6. python3 main.py scanphotohog image_path : \", end=\"\")\n print(\"scans a photo for face recognition using HOG face detection\\n\")\n\n else:\n print(\"\\n[!] Unknown argument!\\n\")\n\n\nmain()\n" ]
[ [ "numpy.savetxt" ] ]
goodlux/tutorials
[ "bbd7c7b83413164baf7a86b03b56c57a7ec3e75b" ]
[ "advanced_source/numpy_extensions_tutorial.py" ]
[ "# -*- coding: utf-8 -*-\n\"\"\"\nCreating extensions using numpy and scipy\n=========================================\n**Author**: `Adam Paszke <https://github.com/apaszke>`_\n\n**Updated by**: `Adam Dziedzic` [https://github.com/adam-dziedzic](https://github.com/adam-dziedzic)\n\nIn this tutorial, we shall go through two tasks:\n\n1. Create a neural network layer with no parameters.\n\n - This calls into **numpy** as part of its implementation\n\n2. Create a neural network layer that has learnable weights\n\n - This calls into **SciPy** as part of its implementation\n\"\"\"\n\nimport torch\nfrom torch.autograd import Function\n\n###############################################################\n# Parameter-less example\n# ----------------------\n#\n# This layer doesn’t particularly do anything useful or mathematically\n# correct.\n#\n# It is aptly named BadFFTFunction\n#\n# **Layer Implementation**\n\nfrom numpy.fft import rfft2, irfft2\n\n\nclass BadFFTFunction(Function):\n\n def forward(self, input):\n numpy_input = input.detach().numpy()\n result = abs(rfft2(numpy_input))\n return input.new(result)\n\n def backward(self, grad_output):\n numpy_go = grad_output.numpy()\n result = irfft2(numpy_go)\n return grad_output.new(result)\n\n# since this layer does not have any parameters, we can\n# simply declare this as a function, rather than as an nn.Module class\n\n\ndef incorrect_fft(input):\n return BadFFTFunction()(input)\n\n###############################################################\n# **Example usage of the created layer:**\n\ninput = torch.randn(8, 8, requires_grad=True)\nresult = incorrect_fft(input)\nprint(result)\nresult.backward(torch.randn(result.size()))\nprint(input)\n\n###############################################################\n# Parametrized example\n# --------------------\n#\n# In deep learning literature, this layer is confusingly referred\n# to as convolution while the actual operation is cross-correlation\n# (the only difference is that filter is flipped for convolution,\n# which is not the case for cross-correlation).\n#\n# Implementation of a layer with learnable weights, where cross-correlation\n# has a filter (kernel) that represents weights.\n#\n# The backward pass computes the gradient wrt the input and the gradient wrt the filter.\n\nfrom numpy import flip\nimport numpy as np\nfrom scipy.signal import convolve2d, correlate2d\nfrom torch.nn.modules.module import Module\nfrom torch.nn.parameter import Parameter\n\n\nclass ScipyConv2dFunction(Function):\n @staticmethod\n def forward(ctx, input, filter, bias):\n # detach so we can cast to NumPy\n input, filter, bias = input.detach(), filter.detach(), bias.detach()\n result = correlate2d(input.numpy(), filter.numpy(), mode='valid')\n result += bias.numpy()\n ctx.save_for_backward(input, filter, bias)\n return torch.from_numpy(result)\n\n @staticmethod\n def backward(ctx, grad_output):\n grad_output = grad_output.detach()\n input, filter, bias = ctx.saved_tensors\n grad_output = grad_output.numpy()\n grad_bias = np.sum(grad_output, keepdims=True)\n grad_input = convolve2d(grad_output, filter.numpy(), mode='full')\n # the previous line can be expressed equivalently as:\n # grad_input = correlate2d(grad_output, flip(flip(filter.numpy(), axis=0), axis=1), mode='full')\n grad_filter = correlate2d(input.numpy(), grad_output, mode='valid')\n return torch.from_numpy(grad_input), torch.from_numpy(grad_filter).to(torch.float), torch.from_numpy(grad_bias).to(torch.float)\n\n\nclass ScipyConv2d(Module):\n def __init__(self, filter_width, filter_height):\n super(ScipyConv2d, self).__init__()\n self.filter = Parameter(torch.randn(filter_width, filter_height))\n self.bias = Parameter(torch.randn(1, 1))\n\n def forward(self, input):\n return ScipyConv2dFunction.apply(input, self.filter, self.bias)\n\n\n###############################################################\n# **Example usage:**\n\nmodule = ScipyConv2d(3, 3)\nprint(\"Filter and bias: \", list(module.parameters()))\ninput = torch.randn(10, 10, requires_grad=True)\noutput = module(input)\nprint(\"Output from the convolution: \", output)\noutput.backward(torch.randn(8, 8))\nprint(\"Gradient for the input map: \", input.grad)\n\n###############################################################\n# **Check the gradients:**\n\nfrom torch.autograd.gradcheck import gradcheck\n\nmoduleConv = ScipyConv2d(3, 3)\n\ninput = [torch.randn(20, 20, dtype=torch.double, requires_grad=True)]\ntest = gradcheck(moduleConv, input, eps=1e-6, atol=1e-4)\nprint(\"Are the gradients correct: \", test)\n" ]
[ [ "numpy.sum", "numpy.fft.irfft2", "torch.from_numpy", "torch.randn", "numpy.fft.rfft2", "torch.autograd.gradcheck.gradcheck" ] ]
aleatory-science/pyro
[ "4ed1d8a9cbc989f6f63afacc7cd98fae50761247" ]
[ "examples/visual_rama.py" ]
[ "import logging\nimport math\nimport pickle\nimport sys\nimport warnings\nfrom math import pi\nfrom pathlib import Path\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport torch\nfrom numpy import array\n\nimport pyro\nfrom pyro import poutine\nfrom pyro.distributions import (\n Beta,\n Categorical,\n Dirichlet,\n Gamma,\n SineBivariateVonMises,\n SineSkewed,\n Uniform,\n VonMises,\n)\nfrom pyro.infer import config_enumerate, Predictive\nfrom tests.common import tensors_default_to\n\nlogging.getLogger('matplotlib.font_manager').disabled = True\n\nAMINO_ACIDS = ['M', 'N', 'I', 'F', 'E', 'L', 'R', 'D', 'G', 'K', 'Y', 'T', 'H', 'S', 'P', 'A', 'V', 'Q', 'W', 'C']\n\nnp.set_printoptions(threshold=sys.maxsize)\n\nshuffle = {'M': array([11614, 17418, 777, 2375, 12989, 12655, 9033, 15888, 2464,\n 16899, 13633, 4495, 10579, 17166, 11999, 669, 11935, 9256,\n 13616, 10387, 3192, 2128, 10368, 7949, 17615, 9555, 8451,\n 2041, 3699, 495, 3921, 8582, 12909, 13780, 5433, 15187,\n 13731, 6321, 14458, 16139, 1324, 9220, 16091, 10279, 14608,\n 9, 14370, 4422, 8376, 11653, 9784, 10355, 5161, 6731,\n 9812, 9246, 6635, 2724, 12370, 1318, 508, 8267, 4034,\n 16339, 10880, 2926, 3078, 3543, 1232, 15319, 5261, 15279,\n 5384, 17657, 8087, 10310, 9909, 6207, 3675, 8318, 7444,\n 8304, 10148, 12786, 3207, 344, 8314, 824, 6587, 15788,\n 16728, 917, 12050, 7272, 13405, 9819, 10068, 1174, 8528,\n 7104, 11742, 5620, 12691, 16393, 6984, 2399, 17071, 9144,\n 5142, 2331, 14672, 1135, 9651, 6372, 12231, 1624, 4236,\n 12540, 2357, 5822, 9249, 12008, 4113, 13732, 1570, 3020,\n 2784, 9738, 6729, 5737, 14657, 2862, 13223, 4259, 2986,\n 9905, 6913, 15552, 13725, 5277, 1010, 16243, 5514, 5184,\n 2674, 13030, 16730, 5670, 3788, 14938, 12176, 1760, 2511,\n 4264, 6687, 11197, 3466, 3422, 2141, 4640, 1853, 3222,\n 2344, 4861, 15212, 8477, 5217, 12290, 5641, 13893, 9213,\n 10154, 3801, 11368, 11124, 3457, 16025, 10384, 3746, 14682,\n 7310, 9125, 15836, 8588, 5100, 11056, 8640, 7524, 12469,\n 544, 6932, 1035, 7107, 10028, 9981, 16289, 5803, 4775,\n 43, 10571, 3191, 2518, 16587, 13469, 8338, 16012, 8985,\n 7982, 5023, 16429, 12904, 8681, 16991, 2893, 10672, 7349,\n 8555, 6052, 16008, 15474, 9873, 673, 4512, 1683, 7774,\n 7993, 6526, 4441, 6357, 3499, 11850, 3448, 1472, 8854,\n 1736, 6013, 9233, 9177, 874, 17252, 1047, 16532, 9469,\n 17689, 16381, 13623, 4500, 10640, 12022, 7603, 7607, 15177,\n 1863, 10730, 7426, 500, 9844, 5919, 2637, 6937, 14509,\n 5558, 6206, 13327, 2933, 8291, 1061, 6927, 12336, 4464,\n 17161, 4623, 3755, 7656, 2208, 2792, 12074, 558, 8723,\n 12876, 10980, 13891, 59, 11928, 17769, 13277, 14780, 909,\n 8169, 4696, 1521, 10728, 13284, 4172, 17797, 14191, 10325,\n 8900, 17616, 6536, 15120, 8521, 10966, 14589, 10066, 15960,\n 17333, 10947, 15365, 12499, 9374, 6625, 8000, 8359, 14888,\n 7403, 8111, 5223, 5773, 3150, 2082, 399, 1506, 9800,\n 14337, 8505, 13203, 4756, 9822, 11889, 1610, 3677, 10792,\n 3168, 4953, 11659, 219, 5527, 5725, 15927, 6200, 3381,\n 2762, 4656, 1164, 6181, 10442, 15789, 4096, 7823, 17054,\n 9936, 6229, 11493, 14079, 9754, 13359, 5221, 14241, 14225,\n 5295, 9510, 17576, 9539, 216, 1029, 4203, 68, 8619,\n 1679, 317, 13966, 4442, 17120, 17719, 7423, 14649, 11743,\n 10841, 2902, 11630, 13456, 1789, 17528, 3042, 7146, 11687,\n 5032, 3301, 484, 15955, 9333, 7599, 16118, 11560, 10095,\n 453, 5106, 10187, 10480, 14468, 11044, 14257, 11170, 3768,\n 16682, 16003, 7735, 10998, 17162, 13774, 10048, 4013, 15457,\n 8224, 17744, 5761, 15945, 17415, 7709, 16325, 13478, 15445,\n 15678, 11562, 17787, 3811, 4154, 15042, 6513, 10155, 14021,\n 12763, 5787, 209, 9432, 12536, 2805, 16, 10050, 11015,\n 1316, 1719, 5804, 3152, 8223, 16342, 11179, 1780, 11639,\n 2931, 14059, 5738, 17675, 11177, 6089, 9306, 1752, 10243,\n 15191, 7358, 8824, 3376, 6203, 1486, 9183, 1372, 6421,\n 9753, 5260, 1014, 12205, 3564, 17477, 17424, 13146, 17497,\n 701, 117, 17050, 5616, 15683, 10660, 17560, 14154, 15226,\n 2115, 2987, 5249, 627, 12438, 17629, 13861, 6038, 4968,\n 599, 10390, 17555, 10970, 5256, 2123, 10375, 9327, 5601,\n 13558, 16576, 217, 774, 4510, 10173, 11046, 6184, 11895,\n 15700, 2766, 16510, 9891, 2703, 16646, 7326, 145, 12177,\n 8252, 7793, 6170, 14593, 9339, 2946, 9645, 15557, 3574,\n 5342, 15570, 12807, 16751, 4456, 13694, 1093, 12728, 165,\n 5653, 10529, 5522, 8162, 15596, 1661, 13904, 16491, 15277,\n 12895, 14992, 7460, 12770, 15027, 14116, 4427, 13880, 613,\n 14508, 3332, 17297, 5851, 17150, 13155, 17562, 17031, 16702,\n 260, 2701, 11510, 5668, 13809, 6045, 9703, 12009, 13889,\n 11975, 10702, 7010, 11155, 1810, 10311, 180, 5362, 15854,\n 11789, 3533, 4307, 16236, 8508, 12147, 8616, 4542, 10516,\n 9788, 2798, 14879, 6509, 2496, 1633, 6989, 13404, 4075,\n 5245, 8630, 12558, 12776, 14498, 16206, 13510, 11017, 15380,\n 7778, 8341, 12417, 14477, 15928, 9824, 13884, 12723, 11390,\n 16754, 390, 1490, 8510, 6581, 13916, 17754, 12862, 14101,\n 17501, 15080, 13108, 6235, 12456, 6082, 2498, 8949, 5121,\n 9598, 8105, 13841, 4073, 12907, 16517, 5471, 15829, 3147,\n 6381, 6355, 51, 16651, 3740, 8393, 16018, 8688, 11161,\n 12075, 190, 5845, 12262, 4020, 5565, 5508, 4302, 8884,\n 10114, 3136, 3435, 9992, 10595, 7397, 11111, 15368, 9314,\n 8379, 9160, 10298, 10365, 6319, 3765, 14971, 8812, 9022,\n 6254, 6618, 11887, 2653, 2366, 4035, 1528, 14097, 14648,\n 13989, 14618, 12194, 8957, 9045, 4845, 7242, 1935, 5557,\n 12242, 5257, 4788, 9898, 17756, 10020, 1811, 4094, 11617,\n 12094, 15025, 11822, 4773, 15669, 7826, 16583, 4202, 3985,\n 17857, 1143, 8591, 16649, 2090, 582, 1229, 8707, 11550,\n 869, 15494, 12853, 2458, 12804, 7405, 3773, 6538, 14333,\n 17842, 11553, 11598, 7278, 17126, 13225, 12986, 15508, 103,\n 648, 8433, 2685, 14012, 6982, 3842, 4300, 2636, 17493,\n 8120, 1306, 7424, 8702, 3006, 9511, 12201, 17848, 5447,\n 9904, 16949, 6258, 3722, 5138, 12806, 7794, 16904, 9915,\n 9444, 9575, 17237, 2663, 16581, 4485, 16716, 11794, 13611,\n 1108, 13110, 5227, 822, 7967, 15782, 14541, 10379, 48,\n 704, 15192, 13761, 7660, 2719, 3598, 17221, 1442, 11029,\n 15843, 1483, 9866, 4387, 11134, 8235, 11976, 14060, 10086,\n 411, 6037, 5900, 7040, 14637, 12595, 7385, 6510, 3923,\n 15161, 6807, 6709, 16853, 1001, 7187, 14288, 11968, 3012,\n 2623, 5842, 10696, 14159, 12730, 3547, 3315, 4548, 2790,\n 5798, 12297, 17757, 8881, 3023, 137, 11187, 14017, 16323,\n 1465, 7797, 12202, 16478, 7733, 13569, 3183, 11621, 14181,\n 1330, 9232, 5880, 14409, 1359, 4692, 15270, 12135, 7988,\n 4871, 16799, 12739, 1383, 17810, 5357, 13827, 7084, 8815,\n 4697, 12229, 16544, 1552, 13091, 11327, 12261, 13400, 3636,\n 14266, 14974, 8766, 8375, 3776, 1592, 8541, 894, 2855,\n 5577, 15679, 13986, 5516, 8836, 11824, 10677, 11310, 952,\n 15206, 2947, 7728, 14789, 13162, 17438, 9393, 7926, 16749,\n 17822, 835, 12309, 8632, 12083, 14230, 2079, 5332, 17614,\n 9267, 6315, 15060, 1301, 14601, 921, 3484, 8503, 6761,\n 3057, 7421, 2750, 8265, 8853, 3508, 4798, 15295, 126,\n 2756, 3402, 463, 1427, 11549, 1928, 14473, 3895, 8851,\n 17403, 8925, 14069, 5413, 13303, 17691, 4934, 9792, 16869,\n 7186, 7706, 5473, 6523, 8862, 5192, 13496, 8188, 10762,\n 8371, 7590, 11367, 7586, 8317, 17542, 6765, 1423, 14271,\n 6476, 9969, 14887, 3365, 1583, 11335, 3098, 12697, 9533,\n 17121, 6360, 8724, 14245, 13289, 10406, 3052, 9742, 11932,\n 6272, 10350, 3213, 12591, 6020, 14142, 4027, 10626, 12051,\n 4911, 9151, 435, 6452, 7241, 7305, 12137, 2493, 2235,\n 12525, 1202, 8683, 17676, 4823, 1879, 40, 15105, 4139,\n 3201, 4825, 4646, 13407, 12323, 1585, 4856, 7386, 7715,\n 13950, 15297, 8907, 1155, 17069, 2999, 11154, 2192, 1729,\n 7234, 6288, 16071, 7798, 3228, 17859, 15795, 10067, 9571,\n 17565, 17344, 9836, 10264, 16570, 2856, 14298, 2713, 10075,\n 3551, 7389, 5171, 2250, 13770, 13449, 13175, 13184, 7518,\n 10839, 568, 16921, 14344, 6953, 5967, 601, 8533, 5955,\n 2625, 5897, 8131, 14914, 14883, 10859, 11627, 122, 15940,\n 17752, 4065, 13870, 6143, 5888, 6242, 2311, 6578, 17480,\n 12611, 10359, 8069, 15478, 838, 9504, 7245, 10210, 12664,\n 6073, 16109, 7376, 16579, 10935, 6430, 13929, 1671, 7703,\n 3974, 6081, 1130, 8721, 11608, 1154, 11790, 4123, 14204,\n 15298, 9881, 12854, 13851, 2717, 2903, 9596, 2383, 13384,\n 15704, 11202, 5636, 12136, 15301, 7069, 8276, 934, 9273,\n 17112, 358, 9096, 17074, 5608, 3102, 1710, 13415, 2992,\n 6688, 13968, 15864, 9027, 3833, 3935, 7247, 6854, 11782,\n 442, 99, 17239, 3013, 4147, 1212, 9695, 408, 3117,\n 6781, 11322, 9277, 7924, 6060, 8319, 11071, 3082, 200,\n 6790, 5588, 3517, 4435, 15369, 12453, 17119, 13346, 3030,\n 1457, 5569, 4955, 12523, 5491, 17594, 1354, 3868, 14038,\n 16755, 11362, 10113, 2552, 4162, 11619, 832, 1792, 3002,\n 370, 17034, 7005, 4734, 17256, 14359, 13658, 4626, 9706,\n 17499, 17440, 5911, 1829, 5824, 7588, 12973, 11868, 15613,\n 3028, 825, 9250, 17323, 2178, 17545, 2733, 5446, 12053,\n 11226, 8929, 6808, 5020, 14254, 5509, 17502, 10416, 7461,\n 16182, 10582, 1443, 8429, 11096, 14728, 4347, 4043, 3450,\n 10327, 5003, 16683, 12516, 6044, 421, 7783, 6672, 17468,\n 8013, 15796, 13644, 6502, 290, 5893, 4770, 11100, 6833,\n 5419, 13566, 6448, 4791, 14697, 3149, 16456, 16314, 496,\n 16737, 2370, 12717, 13994, 3560, 2647, 5515, 691, 9670,\n 15871, 5663, 15713, 622, 1560, 3662, 3248, 4467, 12133,\n 5248, 291, 13946, 3317, 11691, 3247, 1711, 14401, 16163,\n 8873, 3263, 58, 2529, 9713, 14300, 15537, 5526, 5150,\n 8847, 14250, 3761, 11529, 9678, 5204, 13411, 10084, 5067,\n 5771, 14950, 5906, 11255, 5632, 10788, 8080, 14826, 14963,\n 4015, 12296, 4544, 14654, 8228, 4275, 17798, 11456, 16168,\n 14735, 13670, 11681, 15490, 9046, 5033, 10043, 9655, 3355,\n 11389, 11225, 7228, 10198, 11407, 11033, 1314, 11880, 4394,\n 1321, 10202, 16205, 10518, 8601, 12356, 4218, 10026, 12805,\n 6819, 1861, 3799, 511, 7018, 17145, 4310, 10552, 7075,\n 3556, 1602, 16924, 6010, 5424, 127, 5318, 15455, 4039,\n 7506, 9764, 11858, 3846, 8284, 12258, 3611, 7033, 16196,\n 4428, 9968, 15715, 10820, 14717, 8308, 639, 7700, 9700,\n 840, 6474, 14899, 2822, 1939, 15335, 10142, 7813, 15030,\n 6925, 5109, 15485, 10933, 14999, 14917, 5189, 10906, 1997,\n 5045, 4313, 7357, 6047, 17543, 1569, 16652, 334, 13830,\n 10180, 15873, 11609, 15693, 2962, 15996, 10611, 3735, 8917,\n 3787, 12154, 9750, 9433, 16035, 4881, 17152, 8863, 2444,\n 11158, 9771, 5195, 9807, 1783, 536, 10793, 2448, 12308,\n 9391, 11992, 17696, 805, 9386, 14736, 2176, 10378, 4247,\n 1981, 11537, 866, 113, 15059, 13171, 3784, 2677, 9945,\n 16750, 3080, 10346, 11463, 42, 6590, 12451, 7507, 11624,\n 6902, 15079, 2785, 7847, 4609, 12582, 2569, 2922, 17644,\n 15264, 10313, 11411, 7738, 4944, 12403, 16357, 15003, 4915,\n 11234, 7447, 6249, 13349, 10888, 10939, 4714, 6758, 1958,\n 76, 8928, 13608, 231, 16175, 9847, 4688, 5683, 3821,\n 16566, 10335, 10665, 2878, 16056, 8363, 16463, 13322, 12710,\n 976, 14841, 4176, 4976, 6711, 2005, 7888, 5080, 7061,\n 5101, 14405, 10492, 3068, 17750, 1353, 8288, 9259, 17784,\n 15273, 9023, 8739, 12253, 17733, 17214, 11363, 8194, 17357,\n 4005, 12268, 2472, 2681, 13112, 10590, 10787, 14157, 8858,\n 4469, 12781, 17099, 14638, 8966, 5717, 6046, 16820, 10755,\n 10474, 8904, 16609, 4617, 17213, 15849, 1509, 17789, 6106,\n 972, 11588, 13957, 17134, 5390, 17276, 13352, 6036, 7624,\n 3816, 6716, 16445, 2954, 15311, 15628, 7436, 14668, 11364,\n 5397, 1068, 4893, 10832, 16487, 9635, 11522, 17286, 16382,\n 3645, 10826, 4195, 8130, 17040, 857, 4818, 2887, 15447,\n 9643, 5234, 10504, 16839, 6445, 6901, 16870, 12696, 5472,\n 14612, 3127, 2574, 16201, 11336, 9516, 9285, 1338, 4143,\n 12745, 16361, 10978, 8755, 1773, 5111, 5005, 3169, 12374,\n 15783, 15377, 8806, 5365, 10082, 10306, 6830, 7467, 5859,\n 15232, 8939, 16960, 12899, 6780, 5807, 9077, 5938, 5646,\n 11402, 783, 7884, 6053, 10856, 7837, 5875, 1368, 10957,\n 15597, 12575, 12096, 15366, 17007, 5072, 14358, 4221, 7846,\n 15727, 14766, 14277, 2764, 5398, 6773, 15073, 8745, 10103,\n 14993, 9982, 937, 7318, 6858, 12873, 14677, 9234, 4325,\n 5726, 16589, 9414, 11523, 14651, 10618, 2198, 5062, 7171,\n 9956, 13669, 7250, 4896, 14855, 15826, 12514, 10711, 1923,\n 5034, 4682, 2316, 14100, 5205, 16868, 3625, 5837, 3654,\n 3725, 3000, 4007, 4384, 817, 6336, 948, 14011, 2033,\n 10318, 4699, 5454, 9629, 9601, 6417, 1627, 16578, 13855,\n 12317, 2442, 12930, 8569, 5560, 1304, 1632, 16976, 17441,\n 13409, 5029, 1619, 14530, 3827, 12905, 14774, 12833, 17804,\n 7533, 1259, 8796, 16525, 11196, 3786, 11996, 6696, 17476,\n 11961, 3720, 6812, 812, 2753, 9383, 2026, 12276, 7180,\n 999, 17235, 9688, 8888, 14669, 12884, 8898, 3324, 13024,\n 10652, 1131, 6462, 10008, 8562, 14564, 10766, 7677, 452,\n 10120, 12071, 2727, 3492, 6030, 4747, 15256, 16879, 3856,\n 8979, 14738, 7288, 8241, 7746, 1144, 13548, 15972, 13847,\n 15336, 11856, 7219, 7545, 100, 6320, 10407, 16747, 7604,\n 477, 17607, 2052, 4560, 2736, 16027, 12448, 9029, 2646,\n 15061, 4111, 13020, 16691, 1778, 2118, 10260, 9924, 13234,\n 13170, 543, 6432, 340, 15987, 7582, 3180, 14491, 9123,\n 13672, 6205, 15544, 6492, 10003, 17255, 12285, 3514, 5093,\n 7509, 13076, 13319, 3832, 7062, 16234, 15969, 12182, 5439,\n 16633, 12916, 1745, 2096, 15812, 8835, 166, 15896, 10062,\n 13298, 11773, 5885, 11558, 8275, 10925, 14259, 7539, 10865,\n 11425, 9666, 3708, 17561, 11135, 1124, 3559, 12627, 610,\n 6588, 605, 6051, 11696, 10057, 3894, 749, 12927, 1401,\n 5731, 12824, 7834, 17505, 17083, 9364, 3758, 13286, 16923,\n 13845, 14499, 14848, 16209, 660, 11169, 10814, 6742, 1415,\n 8727, 2432, 3762, 15640, 90, 3988, 9281, 16229, 16301,\n 7039, 470, 3252, 7891, 11748, 8751, 8901, 8125, 13429,\n 12590, 15163, 4712, 5960, 17141, 15999, 15933, 13791, 7415,\n 11137, 6433, 4928, 8049, 7543, 1638, 14568, 12560, 85,\n 16790, 679, 1600, 14681, 6699, 15740, 3441, 11579, 4476,\n 15248, 3633, 5664, 8718, 354, 17714, 5561, 11636, 9218,\n 13198, 15280, 13253, 2330, 13475, 6112, 14465, 6743, 17143,\n 1265, 10569, 2351, 4701, 8331, 1182, 308, 10680, 12103,\n 14764, 7118, 11473, 15985, 17217, 13733, 12614, 1988, 7081,\n 6762, 11128, 11472, 16895, 8705, 11027, 2315, 5576, 9129,\n 4880, 8624, 13925, 9715, 1126, 12736, 14336, 14301, 5652,\n 12212, 3603, 3977, 3909, 10135, 941, 2613, 9189, 4815,\n 6849, 4924, 17449, 9923, 4858, 9307, 14652, 11777, 1044,\n 3257, 11307, 9396, 9876, 7994, 15091, 13306, 12799, 6822,\n 6478, 15183, 16511, 10443, 10091, 6202, 2411, 7897, 2018,\n 16625, 11847, 10837, 6949, 9401, 6671, 11983, 695, 7704,\n 15203, 14064, 10512, 9950, 1598, 16843, 16992, 13304, 13583,\n 13495, 12213, 4289, 2875, 15716, 5058, 10900, 535, 11583,\n 14121, 14449, 17599, 3504, 5475, 5705, 11299, 5684, 5753,\n 13300, 7112, 15125, 11959, 12029, 7771, 3913, 2853, 798,\n 8469, 1929, 7392, 17106, 3333, 1665, 11737, 11342, 15673,\n 15971, 14575, 4480, 2382, 15718, 13397, 9054, 6900, 781,\n 3616, 590, 2840, 15604, 10257, 4438, 15786, 2478, 16130,\n 6909, 12483, 10471, 12637, 1904, 4760, 591, 5630, 7383,\n 6069, 7606, 6555, 12442, 4628, 7432, 15747, 7193, 7431,\n 14433, 12426, 14954, 16872, 2584, 9387, 10898, 5242, 7618,\n 14151, 8404, 5775, 17360, 9592, 17303, 16485, 3882, 11801,\n 1557, 8768, 5833, 14832, 8380, 6608, 11692, 7073, 17445,\n 6295, 11952, 620, 14732, 9589, 10553, 819, 7959, 13310,\n 11854, 2193, 5939, 13022, 9830, 9073, 10159, 7289, 1392,\n 13294, 6795, 17751, 8869, 56, 82, 1797, 17231, 17664,\n 6645, 1200, 10873, 9085, 6361, 6851, 12414, 14556, 3666,\n 13552, 1361, 8444, 9768, 427, 15242, 13685, 15923, 4898,\n 741, 4361, 6057, 7549, 13132, 14813, 12150, 14998, 16251,\n 13208, 12245, 9573, 6928, 780, 17118, 3262, 16974, 9562,\n 13470, 10212, 5933, 14371, 863, 14624, 14501, 14437, 4265,\n 11780, 9646, 6751, 17304, 16120, 17392, 3927, 0, 11719,\n 6626, 12443, 10015, 2737, 4070, 14400, 12327, 15631, 15565,\n 10049, 15790, 3723, 15160, 13116, 16468, 13035, 6480, 17372,\n 10943, 15543, 12901, 6270, 4365, 17033, 2273, 12609, 5173,\n 6072, 11978, 3567, 10149, 9769, 12663, 12602, 4654, 15939,\n 13905, 15626, 5280, 11855, 17569, 12013, 7791, 13652, 8345,\n 13370, 6399, 1433, 880, 379, 6787, 7870, 16471, 11422,\n 626, 6378, 237, 15367, 6335, 13427, 2317, 8774, 14897,\n 12768, 3120, 7514, 13202, 11156, 3094, 4288, 2470, 17489,\n 9228, 10473, 2100, 5479, 4400, 625, 2129, 12467, 15082,\n 10071, 16146, 645, 5068, 8954, 189, 3501, 15564, 15376,\n 5839, 12113, 7551, 13743, 10042, 6368, 5953, 16626, 3440,\n 346, 1149, 15816, 5524, 6591, 12039, 17026, 8918, 15007,\n 17585, 17471, 6586, 13129, 4863, 324, 1717, 12790, 4647,\n 2372, 12672, 4173, 4174, 5657, 7572, 4583, 3407, 8517,\n 4526, 3571, 16509, 10377, 4632, 5506, 8895, 12247, 8370,\n 16115, 8776, 15627, 3840, 8356, 6543, 8828, 6683, 16972,\n 17075, 7443, 712, 119, 5007, 17581, 409, 10415, 12818,\n 6592, 11035, 6772, 845, 5931, 11763, 10709, 13240, 7861,\n 10642, 13922, 7756, 8249, 10338, 3210, 10404, 858, 12617,\n 12919, 10115, 6420, 3434, 1641, 3922, 10121, 9880, 2923,\n 5238, 4322, 176, 11531, 138, 12359, 7630, 6488, 14132,\n 7864, 6546, 2630, 6297, 6718, 8450, 15184, 4049, 1626,\n 15083, 374, 7139, 10824, 5814, 13835, 11263, 13079, 5729,\n 5395, 7226, 3826, 1378, 3938, 10698, 14488, 3276, 11018,\n 621, 16374, 15178, 14986, 5300, 15383, 17785, 1704, 11209,\n 10746, 12489, 3390, 1541, 6539, 14430, 5496, 17763, 9253,\n 2938, 4844, 11230, 15039, 2416, 15602, 4277, 4904, 992,\n 11652, 14440, 16791, 17690, 9704, 5122, 15651, 13486, 551,\n 2214, 8313, 16995, 6769, 11832, 6692, 15724, 4514, 8020,\n 17807, 6643, 5309, 4371, 16402, 8937, 4180, 9744, 14122,\n 11753, 4918, 4118, 9541, 3824, 16962, 13149, 15966, 1744,\n 10178, 6481, 7065, 10118, 11183, 16765, 3810, 4090, 3343,\n 10032, 14315, 6986, 2490, 3040, 16714, 16452, 7747, 3834,\n 10190, 13582, 15476, 11586, 16503, 2429, 4466, 15408, 13589,\n 13515, 7907, 16877, 17278, 10764, 2187, 8602, 8994, 2260,\n 4703, 8691, 2249, 6084, 11693, 9863, 12321, 9019, 15101,\n 8144, 609, 2576, 8600, 12816, 14231, 17336, 7675, 7306,\n 14910, 13101, 2953, 4754, 7498, 13176, 7899, 13453, 7976,\n 7644, 529, 12362, 16420, 17328, 12981, 8501, 1311, 7765,\n 16319, 8015, 4635, 14877, 16941, 8086, 3124, 12395, 13113,\n 16818, 10030, 1140, 11057, 10783, 15744, 898, 12972, 5247,\n 4644, 11477, 1726, 15219, 720, 5700, 12749, 9041, 14229,\n 2469, 4224, 1596, 16880, 1112, 14421, 2045, 5704, 12812,\n 12574, 6458, 13144, 15903, 3109, 12490, 8654, 12908, 17136,\n 10285, 13921, 4538, 702, 1479, 133, 2604, 16318, 3904,\n 11783, 3449, 12306, 5863, 7273, 6158, 12731, 14485, 12360,\n 4336, 9141, 12891, 4939, 7445, 6285, 12965, 11982, 1128,\n 6517, 14908, 11007, 16183, 12517, 5220, 13423, 928, 939,\n 12016, 8783, 1701, 17405, 10296, 6434, 13168, 14946, 764,\n 6245, 325, 4393, 17765, 7951, 1664, 6138, 16052, 4658,\n 1917, 13808, 8958, 12018, 7795, 3483, 5720, 16961, 4338,\n 12423, 3172, 462, 16789, 3112, 2183, 6375, 8295, 670,\n 2189, 1612, 10761, 12959, 11104, 9427, 607, 2658, 11014,\n 15862, 6735, 14479, 8209, 17230, 2816, 17417, 15899, 11987,\n 15399, 17269, 13386, 8905, 954, 10847, 13444, 10448, 11612,\n 3037, 15665, 4131, 13498, 2238, 8914, 17080, 7708, 16918,\n 8219, 6533, 998, 16550, 3939, 16742, 12843, 3689, 2447,\n 1057, 6954, 16353, 12368, 8277, 17039, 2392, 14402, 10239,\n 17113, 2111, 1708, 15343, 11282, 15984, 5592, 9385, 1080,\n 13756, 12225, 7388, 12406, 1524, 9150, 9147, 5004, 5296,\n 4426, 2598, 8614, 6589, 7475, 8133, 14146, 11694, 17345,\n 9298, 10321, 13572, 12101, 14235, 5290, 5302, 14945, 11819,\n 3828, 6908, 4140, 11863, 16561, 3304, 17821, 9435, 893,\n 17104, 13293, 6187, 11192, 1440, 6348, 5117, 420, 5973,\n 2030, 17783, 3031, 2643, 12028, 16856, 7589, 10537, 8550,\n 4645, 3738, 11036, 15539, 11068, 979, 2241, 3714, 17575,\n 16461, 16256, 4283, 6094, 3781, 14470, 16936, 5918, 9442,\n 16150, 5872, 8046, 2057, 7670, 9569, 17650, 12304, 14320,\n 12340, 11431, 4877, 12019, 17109, 5225, 4291, 12447, 7054,\n 12272, 7359, 9465, 17473, 5696, 8520, 115, 11118, 17758,\n 12906, 11974, 5064, 1091, 16657, 16041, 5597, 3732, 9247,\n 13507, 11112, 15261, 4565, 801, 7800, 9192, 7252, 16772,\n 7896, 16132, 8286, 4494, 13111, 5370, 140, 2168, 6186,\n 13379, 953, 5649, 4022, 1391, 9544, 6732, 11465, 9834,\n 3361, 3239, 15725, 8604, 10541, 6212, 809, 8669, 9567,\n 10649, 9156, 12751, 15853, 2757, 13537, 491, 222, 17301,\n 13047, 3727, 4671, 12621, 17266, 12550, 10323, 3553, 7281,\n 6814, 6250, 2167, 16539, 9894, 3586, 8225, 7620, 14889,\n 16421, 10493, 10681, 17068, 10890, 17732, 16591, 653, 2789,\n 3668, 8876, 4508, 15592, 7529, 10222, 776, 9692, 3618,\n 884, 12630, 8387, 3374, 15431, 10831, 10602, 3809, 2300,\n 5382, 11697, 1106, 1635, 4081, 10610, 17702, 17648, 14083,\n 537, 14824, 4231, 6659, 146, 8961, 14924, 15217, 12698,\n 5349, 15310, 9137, 16063, 7300, 11343, 3876, 13610, 5012,\n 17347, 4715, 13868, 13551, 16894, 6493, 17533, 8850, 13290,\n 920, 7983, 278, 13978, 9636, 10663, 520, 3613, 12439,\n 12802, 17067, 5373, 12809, 10328, 17718, 11635, 8258, 1172,\n 3745, 1032, 8438, 4152, 7622, 6380, 6411, 5711, 1363,\n 17199, 14741, 5587, 3729, 4379, 13122, 2181, 16559, 15389,\n 15175, 2397, 3481, 14167, 13121, 83, 2959, 13627, 3309,\n 10946, 16058, 4227, 9630, 13734, 2479, 4932, 3398, 16812,\n 2735, 13613, 10186, 7974, 17512, 16643, 2916, 16492, 2608,\n 12455, 8703, 17539, 3153, 10089, 475, 13182, 4474, 2194,\n 2395, 11190, 11109, 10376, 16761, 4271, 2979, 3084, 10322,\n 11237, 9508, 9446, 12092, 9895, 6522, 3378, 8478, 12618,\n 6125, 8617, 5874, 6714, 14825, 8362, 12647, 17773, 8245,\n 5817, 13395, 15847, 11555, 13796, 16172, 9798, 17796, 16248,\n 17611, 9474, 6157, 16934, 1876, 1404, 11394, 7874, 1449,\n 16845, 1945, 16571, 3696, 12416, 455, 17354, 15111, 2165,\n 15472, 1163, 4966, 15409, 13072, 16638, 3861, 5125, 7577,\n 10809, 9897, 3684, 14305, 9450, 9506, 14884, 11875, 12233,\n 4233, 4269, 17604, 15814, 2924, 13484, 16527, 6231, 11092,\n 873, 7367, 14076, 9739, 2075, 723, 1274, 7265, 1954,\n 6217, 13990, 17510, 821, 14515, 1462, 14015, 7679, 14232,\n 7161, 6377, 8495, 14226, 4535, 2268, 9493, 4564, 4895,\n 10932, 13093, 8044, 1103, 7893, 3089, 6454, 10162, 1549,\n 8542, 1189, 9697, 4169, 17553, 2944, 13554, 14459, 14186,\n 17812, 6514, 6354, 8394, 10941, 12386, 8677, 9062, 6341,\n 2132, 4921, 14834, 14120, 13233, 5179, 11795, 1629, 2976,\n 6512, 16519, 17287, 2142, 14274, 17820, 11443, 14691, 11778,\n 1584, 9067, 4171, 9952, 7923, 14309, 9672, 16108, 2852,\n 15949, 9724, 2204, 14240, 10330, 6190, 5396, 13895, 13629,\n 16304, 1282, 17374, 9317, 17271, 3764, 12314, 11262, 2801,\n 310, 5240, 16640, 1529, 450, 7422, 507, 4153, 8459,\n 11429, 10158, 13648, 3297, 264, 14858, 12894, 5966, 9937,\n 9402, 1333, 6674, 15800, 11606, 3418, 3982, 3859, 6613,\n 15117, 10006, 403, 2362, 4670, 9675, 5057, 5581, 2027,\n 9650, 5493, 923, 9614, 17577, 17192, 17046, 17647, 17566,\n 8797, 17802, 10736, 4478, 1236, 15387, 13179, 14820, 4903,\n 17679, 3015, 14482, 11703, 15735, 7494, 5096, 15134, 14335,\n 12889, 14902, 3831, 3160, 1328, 9109, 16670, 13886, 5689,\n 7632, 13347, 2909, 3115, 12648, 2673, 17079, 11701, 2460,\n 4114, 12721, 1784, 12024, 14496, 12541, 15442, 10232, 12974,\n 15930, 7480, 9187, 4303, 5149, 5882, 8470, 563, 9345,\n 7149, 3242, 15338, 7077, 1623, 1571, 11204, 7591, 17394,\n 2234, 3373, 6810, 17234, 485, 11461, 11440, 12450, 12132,\n 1497, 8414, 13237, 4376, 3841, 4884, 612, 12444, 5313,\n 7319, 2283, 17247, 10275, 16572, 10147, 17442, 15825, 17035,\n 14165, 8315, 14706, 10646, 13260, 13439, 12266, 55, 13158,\n 12925, 15386, 2722, 2072, 15086, 16986, 3305, 13829, 11116,\n 542, 17709, 12506, 7257, 7722, 13332, 1298, 10183, 9677,\n 15711, 11533, 9846, 2588, 15606, 6681, 16524, 6791, 637,\n 1980, 11172, 6961, 3320, 294, 4386, 14830, 10607, 17507,\n 7026, 10622, 11613, 8561, 6117, 8639, 3439, 8861, 6806,\n 14448, 6508, 13751, 1891, 87, 13134, 12738, 7188, 707,\n 11159, 8694, 1369, 1210, 9794, 10230, 5802, 5536, 710,\n 15140, 15128, 11360, 3880, 11898, 8711, 10539, 16954, 9024,\n 1178, 6159, 11603, 6507, 9343, 11817, 1166, 17834, 9356,\n 16294, 1096, 3716, 17535, 11893, 1469, 8425, 11328, 15912,\n 2270, 13542, 5572, 7396, 4383, 5784, 15533, 3491, 3601,\n 3258, 12345, 4589, 7021, 13512, 13188, 10977, 12501, 7628,\n 1655, 11152, 14000, 9948, 3025, 8993, 15435, 5148, 993,\n 11293, 17250, 8019, 2726, 12982, 17818, 3650, 3384, 12333,\n 8500, 12758, 17322, 7787, 5940, 7243, 848, 1364, 9725,\n 5716, 12564, 16973, 5314, 14689, 467, 10371, 254, 1831,\n 3802, 3911, 12373, 7408, 5210, 4161, 7427, 17051, 7932,\n 10658, 4804, 9604, 15246, 17825, 3596, 3589, 16223, 15023,\n 15342, 2086, 7101, 10749, 9098, 10096, 10392, 903, 14777,\n 13897, 15733, 14460, 15001, 7950, 16363, 14594, 11910, 15467,\n 9423, 142, 5876, 17571, 17384, 3059, 14137, 5647, 9167,\n 13981, 14426, 17309, 1523, 3198, 14374, 5934, 4603, 14395,\n 3319, 8788, 14258, 5155, 15664, 14571, 3594, 12082, 15517,\n 13992, 12938, 13160, 1319, 6987, 10123, 16298, 4368, 13815,\n 11460, 15580, 1220, 2539, 1878, 15174, 8786, 14415, 3221,\n 12689, 17597, 4010, 8712, 1932, 15948, 16507, 15190, 14086,\n 7204, 7702, 10373, 6855, 17619, 14406, 12294, 12032, 16285,\n 17622, 13707, 5251, 11865, 13862, 11067, 1544, 14110, 3100,\n 8002, 16631, 17459, 16601, 13214, 3686, 5951, 16983, 6797,\n 16138, 419, 655, 8059, 856, 15168, 17602, 14412, 15006,\n 8667, 11189, 3472, 6860, 14907, 12796, 8642, 1673, 16358,\n 5183, 8207, 17854, 10895, 4402, 742, 13460, 13505, 16734,\n 15432, 8564, 12694, 11528, 12562, 15935, 17461, 2043, 14875,\n 14040, 4991, 16204, 13074, 15048, 2859, 13262, 10272, 11215,\n 17122, 7051, 17254, 17207, 16523, 6247, 11004, 14153, 16483,\n 8708, 8572, 9889, 1670, 7940, 10366, 12411, 13995, 7534,\n 171, 2672, 1793, 9525, 4241, 13342, 2174, 12260, 3101,\n 17491, 11144, 3412, 17090, 6575, 196, 3272, 8350, 4051,\n 17694, 15306, 2037, 10269, 4136, 13860, 4287, 7625, 6328,\n 7283, 9503, 4390, 8132, 2787, 6423, 16399, 7731, 7532,\n 11591, 13431, 15989, 17450, 3822, 2990, 974, 10806, 1728,\n 15850, 675, 4509, 2898, 9492, 11120, 5137, 331, 14425,\n 1094, 7449, 11507, 398, 15424, 5958, 929, 12522, 4217,\n 9523, 499, 931, 4334, 13156, 9329, 2076, 3568, 13663,\n 15672, 14295, 14304, 13625, 10844, 4956, 2243, 15807, 1984,\n 3697, 4423, 6979, 3906, 8408, 5777, 8266, 9990, 8867,\n 15278, 15607, 7298, 9275, 6825, 3144, 16366, 2718, 4366,\n 10422, 11998, 4504, 3891, 5250, 15915, 13963, 14221, 12815,\n 9453, 15561, 10382, 15509, 7692, 15554, 6821, 430, 8043,\n 13597, 5203, 9710, 16147, 2394, 1579, 12616, 8434, 3791,\n 285, 4294, 8606, 12042, 1986, 8889, 7082, 5925, 13667,\n 16865, 9590, 1617, 7191, 3914, 17363, 9338, 8653, 11298,\n 12761, 4636, 11716, 878, 10811, 2648, 12458, 2364, 10638,\n 1501, 11150, 17556, 16679, 961, 4309, 3051, 10133, 10309,\n 6155, 14084, 3284, 3470, 13567, 841, 6147, 11242, 14136,\n 10485, 17681, 9617, 13191, 14580, 5752, 4838, 5128, 15057,\n 10165, 17474, 5574, 15313, 14414, 4990, 3359, 14203, 8540,\n 14569, 10884, 8212, 1574, 6032, 2350, 4616, 5687, 13686,\n 7492, 2120, 8565, 2291, 9007, 8116, 3066, 4585, 5218,\n 13421, 5267, 10545, 9091, 12646, 8236, 3644, 15499, 11918,\n 1534, 10653, 4067, 11344, 13657, 14823, 8326, 3165, 9958,\n 15593, 5718, 410, 13114, 15349, 12512, 3572, 4328, 2952,\n 9300, 3705, 7707, 16329, 2163, 583, 16692, 14423, 16580,\n 7540, 5534, 16654, 6553, 5188, 10354, 10641, 4017, 9384,\n 9606, 12963, 4084, 10754, 8655, 9271, 3431, 4943, 2034,\n 6891, 7997, 15995, 3475, 16110, 9907, 10520, 7583, 9148,\n 17218, 17692, 16069, 16615, 13975, 12675, 12465, 9608, 940,\n 1179, 5971, 15739, 13499, 12482, 5351, 13493, 1983, 5808,\n 8007, 15827, 9153, 3185, 9777, 1858, 8948, 12282, 8413,\n 11195, 14026, 3717, 9908, 15314, 3548, 1581, 1838, 7470,\n 12394, 9980, 2310, 10398, 16245, 5505, 3224, 12823, 7,\n 15880, 14373, 1120, 11787, 11901, 14581, 4806, 10630, 5151,\n 8034, 7314, 3097, 12216, 6142, 1894, 12274, 12658, 16407,\n 15176, 7031, 1572, 11217, 8024, 8645, 3637, 8307, 11539,\n 12325, 5326, 14537, 16563, 14511, 4693, 15056, 13356, 7768,\n 988, 10600, 12991, 9481, 12594, 10072, 5084, 3498, 7009,\n 1033, 5180, 1707, 1231, 7821, 7296, 6389, 16767, 11025,\n 6612, 1227, 1302, 7438, 588, 16162, 6920, 17749, 17835,\n 14348, 9047, 7900, 11740, 2412, 13446, 12265, 3405, 13002,\n 1969, 5455, 149, 14901, 14912, 11923, 10027, 15823, 16598,\n 8076, 9561, 12316, 1766, 7938, 1171, 5114, 8163, 1451,\n 13556, 11530, 12002, 7641, 9135, 7297, 10281, 11252, 2760,\n 2081, 15170, 7147, 9439, 6556, 6161, 5595, 1963, 9718,\n 7674, 6422, 14085, 17400, 3965, 11324, 8864, 1964, 14731,\n 1133, 17228, 1245, 5449, 14223, 15135, 14342, 9035, 6665,\n 11565, 16876, 4252, 10106, 12048, 2220, 14292, 12787, 15239,\n 13782, 13878, 3243, 5736, 17425, 136, 17317, 13019, 6015,\n 6011, 3468, 1173, 15018, 1020, 4235, 5586, 11857, 2716,\n 1148, 2758, 10235, 2257, 11526, 9176, 8368, 10083, 17210,\n 15824, 6028, 3566, 12772, 830, 7128, 6981, 11265, 11774,\n 9627, 5301, 15179, 11374, 15707, 1263, 9998, 11995, 2502,\n 12066, 12681, 8192, 13010, 181, 8431, 4349, 8620, 8759,\n 11332, 17701, 7881, 2566, 306, 7517, 14375, 6690, 5780,\n 14410, 12886, 14029, 15761, 3199, 11106, 2813, 5315, 5051,\n 7945, 8605, 5607, 9542, 8432, 7546, 16549, 7510, 10836,\n 4225, 5886, 16501, 15271, 4434, 15947, 2379, 13205, 15359,\n 9737, 15000, 13737, 7681, 11276, 1998, 6497, 12413, 4077,\n 3850, 11489, 13682, 9912, 745, 12430, 12088, 525, 14983,\n 2690, 10559, 15340, 5760, 6570, 14856, 12932, 17454, 11232,\n 10710, 17760, 4801, 13044, 10303, 13726, 12371, 14675, 6713,\n 1480, 1420, 8962, 16776, 17364, 13329, 4988, 7020, 8160,\n 4732, 17661, 13341, 2389, 8027, 7246, 3777, 9244, 17176,\n 16026, 11535, 12928, 7453, 15072, 11418, 28, 16919, 9133,\n 4038, 16086, 12125, 11203, 14175, 14867, 762, 14893, 4481,\n 11452, 4183, 17663, 14557, 3707, 10362, 2169, 5709, 8493,\n 6551, 5160, 7842, 1573, 12240, 2047, 46, 7429, 14643,\n 7868, 7092, 7253, 6042, 13222, 6889, 12330, 12488, 6895,\n 2212, 7831, 12542, 4340, 13900, 13130, 13357, 14324, 12951,\n 9145, 9890, 8018, 5129, 16222, 17183, 14346, 11305, 7521,\n 9443, 17017, 12732, 17324, 11570, 7255, 14695, 11184, 6491,\n 11256, 7751, 14831, 14246, 13488, 15720, 6240, 17843, 13724,\n 13067, 14206, 16373, 14647, 10735, 14476, 1430, 2772, 14524,\n 4931, 11176, 172, 724, 9860, 11171, 15831, 16432, 3752,\n 13974, 11062, 8171, 1992, 10879, 6541, 11876, 11129, 14760,\n 7312, 14480, 10909, 6279, 12014, 14198, 17436, 7684, 16141,\n 4952, 12434, 9315, 11008, 5538, 1911, 11258, 11594, 10252,\n 16073, 1320, 8988, 6670, 9092, 14417, 4648, 4570, 17727,\n 4199, 2593, 2984, 6197, 17274, 13930, 5098, 12789, 1043,\n 17003, 12461, 4980, 12410, 1877, 2386, 3045, 3268, 13282,\n 1051, 6558, 16148, 7515, 7355, 16666, 16439, 8722, 10347,\n 10266, 17819, 8423, 14467, 8158, 14093, 1990, 5591, 13949,\n 3947, 17486, 11404, 14255, 121, 15977, 17541, 717, 11668,\n 8017, 17793, 5310, 4702, 15230, 15358, 1757, 8222, 10343,\n 12070, 1520, 17660, 11021, 7464, 8756, 17713, 8924, 7816,\n 17755, 1628, 13764, 11453, 12625, 2971, 2570, 6379, 2133,\n 9460, 14282, 8385, 9368, 15906, 5465, 7592, 14933, 11675,\n 10215, 116, 2188, 10705, 3154, 11387, 14773, 9686, 6453,\n 12827, 1637, 14161, 3092, 2028, 5145, 14595, 10453, 11872,\n 12680, 13390, 8239, 5066, 8310, 15290, 10967, 2152, 8789,\n 5194, 9997, 3289, 12555, 392, 2036, 10866, 9162, 3161,\n 2158, 10425, 3734, 6150, 104, 3157, 11707, 1999, 3854,\n 10684, 6460, 13115, 9607, 11985, 14991, 17841, 16379, 8525,\n 14561, 316, 5693, 6382, 441, 1411, 1138, 12513, 6813,\n 16473, 218, 13586, 10349, 4999, 30, 13853, 6495, 9451,\n 15851, 1406, 9747, 13335, 12158, 892, 4933, 10018, 4381,\n 14786, 16207, 6284, 6209, 13664, 5801, 6075, 10345, 259,\n 15653, 2486, 12839, 15415, 667, 15690, 3655, 3162, 14193,\n 16291, 15165, 2820, 16365, 12376, 16036, 2607, 3944, 1847,\n 6276, 5350, 159, 3793, 9294, 16469, 12794, 3459, 14943,\n 7225, 14148, 323, 9837, 9962, 17087, 1307, 17045, 4797,\n 16740, 7567, 13814, 11551, 3627, 1563, 6593, 15320, 6090,\n 12519, 2180, 7003, 4612, 3534, 10290, 2303, 15545, 13883,\n 2582, 376, 6056, 12263, 16020, 8781, 14306, 14818, 8968,\n 9957, 5634, 13193, 2358, 13500, 16950, 2309, 8384, 1887,\n 4834, 14507, 13912, 15732, 13937, 1377, 9843, 5187, 9175,\n 3530, 13849, 17008, 4437, 10418, 9902, 2094, 10128, 357,\n 6340, 15586, 12813, 15772, 7676, 15355, 15309, 15791, 9563,\n 12408, 15304, 5236, 3620, 7608, 16046, 15510, 15942, 16299,\n 2277, 11449, 1601, 12037, 7548, 8977, 11388, 12326, 10041,\n 3357, 12462, 10605, 2401, 9988, 1132, 9741, 5487, 8032,\n 7343, 2229, 5549, 6942, 15897, 1920, 16409, 2534, 9743,\n 8978, 5102, 1185, 4783, 11938, 4652, 14664, 3612, 14783,\n 6603, 6216, 9927, 10237, 15785, 4418, 13866, 7271, 10996,\n 2060, 9558, 8832, 12666, 5980, 2967, 7490, 6831, 12960,\n 3312, 6554, 11297, 16039, 1466, 4929, 9679, 8687, 15889,\n 17149, 10987, 1151, 16326, 3806, 12817, 4577, 15643, 3275,\n 9336, 2592, 6463, 1779, 4318, 3406, 8742, 11563, 8204,\n 17379, 3352, 210, 7525, 9497, 4994, 13301, 6131, 1078,\n 1782, 10862, 17634, 4517, 14762, 8074, 10549, 2664, 5409,\n 9490, 15943, 10683, 269, 11656, 11520, 1088, 3669, 4761,\n 15308, 7091, 1356, 8943, 13562, 17335, 9581, 6893, 15498,\n 12943, 7697, 8159, 6746, 2210, 1365, 10356, 10277, 1463,\n 16312, 4312, 4907, 3421, 7110, 14377, 2649, 5615, 1006,\n 5025, 17103, 14023, 12620, 2812, 17242, 14962, 6673, 17600,\n 13118, 6573, 17828, 2823, 9758, 11962, 2957, 1421, 10314,\n 16932, 13041, 199, 6726, 17712, 3106, 2794, 9316, 16786,\n 5274, 6950, 8771, 4243, 150, 13864, 15703, 16834, 1900,\n 15887, 5480, 17264, 96, 10965, 4149, 12381, 896, 9729,\n 9266, 11198, 9136, 13451, 8185, 7858, 4262, 1927, 14701,\n 14851, 1273, 9052, 9002, 1373, 4554, 7122, 17412, 13128,\n 14502, 3099, 15608, 431, 12978, 711, 9693, 1961, 14590,\n 14256, 12570, 348, 9879, 15202, 11199, 14095, 6152, 12383,\n 10023, 15010, 14545, 8127, 15150, 9361, 15799, 1870, 3354,\n 5785, 758, 7638, 12622, 1021, 2148, 10822, 15730, 10339,\n 369, 950, 8369, 5993, 15595, 14310, 15515, 13385, 7587,\n 5930, 6894, 235, 16016, 14885, 13998, 10904, 9496, 13965,\n 11264, 7106, 5921, 13061, 13196]), 'N': array([22969, 14562, 13381, 32672, 38640, 31982, 39229, 13566, 13739,\n 706, 39022, 27129, 19865, 35669, 35781, 31077, 22093, 32461,\n 3213, 38625, 8376, 42457, 6251, 2519, 34613, 35891, 43881,\n 24946, 22928, 39852, 40176, 9109, 10539, 18944, 23243, 12645,\n 38312, 41646, 1459, 33895, 43363, 42255, 39809, 4780, 2465,\n 25874, 13193, 15583, 6006, 36804, 10012, 8071, 17683, 3270,\n 31016, 506, 28806, 34578, 18832, 38343, 19416, 27585, 36057,\n 19386, 29639, 4151, 20112, 21142, 34228, 37631, 32851, 3760,\n 15661, 6587, 1299, 1073, 14894, 35965, 42601, 6791, 36380,\n 15759, 27354, 10514, 24184, 35686, 7934, 1089, 15222, 8414,\n 10952, 40332, 1597, 844, 14315, 36309, 9552, 7775, 41227,\n 19735, 34999, 6706, 12296, 28755, 4725, 32794, 29280, 27315,\n 31205, 12692, 8685, 36694, 29640, 10387, 16024, 15971, 3620,\n 4682, 43653, 23024, 15013, 21894, 40370, 11687, 15154, 11561,\n 42011, 40413, 605, 34388, 7500, 8700, 5017, 13901, 3688,\n 953, 37355, 1252, 31139, 37673, 12148, 355, 4058, 11368,\n 7058, 36038, 43196, 5300, 39620, 12221, 4315, 13431, 3924,\n 28034, 3974, 6169, 3404, 43438, 23002, 22752, 6928, 18821,\n 23857, 10648, 31270, 29245, 42574, 32592, 33090, 29596, 3913,\n 8777, 19419, 8829, 34026, 18368, 27968, 5072, 9425, 27142,\n 40775, 22304, 12259, 22387, 13666, 34760, 4715, 41615, 32563,\n 10866, 22348, 38995, 23340, 39875, 14280, 21528, 16246, 9083,\n 38463, 41532, 23730, 25959, 8018, 27496, 37797, 1867, 22584,\n 39611, 2913, 28225, 19725, 43927, 8219, 2243, 8208, 29945,\n 5928, 18412, 41961, 23187, 22927, 3339, 43894, 33823, 5241,\n 27631, 12499, 7456, 22446, 37298, 30610, 22885, 18388, 12696,\n 2806, 1214, 5114, 10637, 11343, 42408, 40469, 7306, 38770,\n 32539, 42867, 27503, 39812, 23900, 13909, 14417, 3730, 30174,\n 40483, 2176, 9291, 2822, 8542, 23703, 43908, 39181, 38951,\n 41834, 25092, 42784, 38985, 5218, 18996, 22429, 38095, 34236,\n 9599, 22476, 35561, 4306, 21164, 34642, 15996, 30931, 23800,\n 41067, 22339, 19513, 38238, 3514, 15545, 32338, 24222, 23418,\n 14225, 4009, 40081, 16656, 5409, 41992, 4742, 31730, 23010,\n 14171, 42862, 15395, 17303, 4282, 21239, 13784, 43698, 24089,\n 37990, 6491, 22621, 24127, 17200, 25317, 1873, 7551, 29031,\n 37404, 3522, 30932, 26748, 26245, 21090, 41689, 30519, 672,\n 23576, 34983, 39165, 18233, 26577, 478, 7514, 4352, 24442,\n 5073, 29350, 7926, 7323, 25770, 40408, 18027, 31914, 823,\n 34337, 22478, 24466, 32846, 23868, 13610, 32174, 2880, 41084,\n 22404, 18505, 31034, 1878, 12985, 33266, 43607, 34022, 4055,\n 25532, 6432, 23292, 36519, 4219, 28923, 11404, 28319, 10656,\n 24989, 15555, 42516, 31367, 20144, 26626, 6642, 8290, 4073,\n 42976, 43779, 21461, 8203, 9107, 7324, 16728, 27972, 8231,\n 33473, 4574, 16287, 27406, 20939, 1471, 16001, 39054, 20757,\n 9718, 21371, 9703, 19193, 31895, 27225, 11884, 42739, 551,\n 18415, 7555, 12312, 4308, 11021, 38920, 15270, 11043, 31823,\n 26685, 23470, 25011, 1099, 20909, 27099, 8156, 38178, 9040,\n 38029, 17104, 34068, 40785, 4046, 4805, 42907, 40372, 991,\n 30887, 27036, 24525, 3439, 37128, 29784, 10077, 14373, 34502,\n 3252, 7622, 792, 25549, 29872, 17666, 16726, 26340, 10525,\n 15933, 31265, 23062, 20935, 26203, 9693, 34557, 29720, 20072,\n 12091, 4076, 8723, 32926, 116, 4982, 5289, 21428, 3353,\n 36566, 38544, 40527, 43100, 399, 38459, 7524, 41344, 14483,\n 36142, 6846, 20, 35351, 20211, 15200, 39174, 1030, 6628,\n 304, 20235, 29829, 20225, 19641, 6957, 43147, 21344, 20089,\n 16160, 28738, 13099, 35887, 37848, 15469, 42229, 22458, 27760,\n 13443, 14286, 30293, 11379, 23111, 6598, 42987, 40975, 30407,\n 41697, 23304, 21471, 32459, 28000, 35916, 37546, 4360, 42241,\n 29046, 27806, 20425, 42967, 12373, 30508, 38945, 1046, 34404,\n 31397, 11319, 18566, 3608, 32809, 12679, 9694, 39678, 36569,\n 6097, 9054, 42655, 41962, 22035, 26782, 40596, 305, 43360,\n 25321, 28526, 33269, 2664, 30244, 41169, 1391, 29178, 34944,\n 20818, 2034, 18717, 37345, 5526, 1940, 10457, 14975, 31498,\n 21154, 7258, 9697, 23075, 39841, 8657, 31158, 16360, 31414,\n 17960, 3301, 41260, 43560, 16556, 28153, 6721, 36753, 42159,\n 2859, 13619, 21096, 18066, 22143, 13718, 5137, 172, 32783,\n 2507, 5633, 31522, 10365, 41741, 21255, 41383, 4441, 41373,\n 7887, 3928, 2388, 6434, 32285, 6763, 19098, 3378, 38431,\n 6058, 2004, 42995, 32776, 34069, 415, 14728, 29524, 38792,\n 28496, 7558, 33781, 26199, 13091, 7444, 35232, 2342, 25383,\n 23661, 35563, 24102, 29182, 34793, 29588, 3647, 23433, 6838,\n 34293, 41395, 3855, 7447, 27020, 8831, 43814, 31287, 8381,\n 35266, 36376, 22118, 26928, 11130, 13719, 14939, 43637, 25838,\n 31312, 25589, 14590, 26935, 30971, 7976, 32554, 27919, 4152,\n 28233, 17544, 34833, 11501, 4571, 14192, 34875, 15784, 28161,\n 30979, 37699, 11781, 40830, 26298, 28519, 10502, 24354, 43693,\n 9935, 9008, 34087, 754, 30014, 14797, 22008, 27791, 28241,\n 39710, 19229, 12001, 34888, 17648, 17597, 30739, 24689, 1385,\n 24560, 29095, 10647, 10571, 9258, 29498, 27555, 8080, 37460,\n 21372, 35536, 8962, 2122, 11365, 1133, 12693, 21701, 24329,\n 27302, 774, 18406, 29529, 20858, 10655, 9756, 10176, 14048,\n 40766, 32628, 43400, 6398, 42609, 34126, 33386, 4155, 10812,\n 13541, 18392, 40870, 21862, 41892, 8502, 21942, 30264, 39416,\n 643, 37131, 9534, 42322, 38270, 26535, 4470, 40086, 18526,\n 32239, 2747, 24253, 34122, 38, 4369, 40337, 39177, 38457,\n 182, 10470, 1805, 15518, 977, 1422, 21680, 26069, 19430,\n 30263, 39998, 31927, 2534, 26958, 27589, 12630, 12146, 25631,\n 39834, 38476, 33406, 42556, 6088, 8451, 2205, 14195, 7423,\n 1526, 15086, 23932, 31567, 34472, 36825, 26878, 19039, 16601,\n 27750, 34092, 23606, 14516, 27533, 42276, 41087, 19894, 35084,\n 6839, 40973, 18355, 23331, 32727, 32112, 29276, 33328, 35867,\n 36586, 17229, 1367, 26934, 10148, 41458, 11438, 21289, 2768,\n 22606, 27118, 37507, 24211, 13693, 19435, 708, 32941, 22908,\n 20166, 15544, 11632, 21580, 22223, 12542, 22895, 13155, 43809,\n 28104, 23746, 13756, 13755, 18098, 35960, 36579, 28603, 35832,\n 36576, 43255, 6270, 19018, 20075, 10881, 22349, 4273, 41713,\n 31095, 8078, 49, 14860, 19825, 39562, 5488, 8992, 16291,\n 23967, 25079, 25301, 351, 39787, 31656, 41232, 28849, 5837,\n 8897, 24281, 873, 370, 43950, 18440, 18734, 86, 39850,\n 39182, 38637, 17056, 12636, 39035, 23711, 9398, 28403, 40214,\n 21119, 43941, 32046, 29206, 39763, 39135, 12313, 12203, 13484,\n 9633, 24721, 35127, 34785, 9518, 6603, 43722, 40106, 8170,\n 25469, 39671, 41457, 34226, 9206, 3395, 25344, 43585, 43204,\n 24923, 6221, 26024, 15876, 26187, 29011, 29386, 25835, 37461,\n 23989, 34777, 12680, 41743, 17340, 10129, 19084, 7464, 25377,\n 40312, 36077, 23383, 40129, 5070, 22109, 38576, 13721, 24348,\n 10141, 26580, 15322, 6165, 37923, 34464, 34626, 22360, 43472,\n 5939, 33458, 3076, 8684, 42941, 5762, 238, 29136, 39397,\n 13633, 27539, 38369, 27962, 1284, 10424, 25779, 4564, 5551,\n 34962, 21007, 7519, 8925, 15499, 32742, 21863, 3186, 4863,\n 22758, 17554, 40868, 34434, 24803, 27510, 12961, 11739, 21985,\n 31515, 11510, 25924, 21357, 4994, 38632, 26068, 10592, 34722,\n 6387, 19375, 33236, 3314, 43824, 41649, 2897, 5141, 40363,\n 27561, 17613, 3298, 34071, 12962, 22351, 1893, 33800, 15637,\n 3215, 11723, 37755, 393, 20298, 2920, 23065, 18305, 32893,\n 8438, 17995, 3002, 22317, 39367, 22599, 19750, 20485, 3291,\n 34145, 34593, 34740, 33031, 27421, 5627, 27351, 19002, 34929,\n 11382, 4939, 21307, 29512, 42853, 43110, 21187, 41204, 23719,\n 28745, 8402, 35097, 31809, 41515, 20545, 31673, 31948, 4277,\n 38901, 27758, 21796, 1323, 15234, 35918, 4129, 32500, 33754,\n 19973, 21446, 15192, 38236, 1371, 32936, 43018, 34168, 34015,\n 40686, 30275, 16865, 7871, 17026, 32947, 18446, 14976, 27405,\n 39702, 302, 4492, 707, 1464, 26380, 4281, 10196, 37020,\n 35944, 15317, 26285, 31463, 36395, 15003, 28093, 26261, 15051,\n 7484, 2455, 10143, 8033, 16767, 43687, 41435, 8629, 21455,\n 20332, 29572, 36115, 6100, 40410, 10421, 1439, 40281, 38769,\n 1033, 28391, 30280, 28762, 41601, 43594, 4854, 39759, 40877,\n 32514, 39560, 10127, 10194, 41862, 42058, 3166, 43084, 22111,\n 29302, 40777, 15602, 8904, 14233, 4172, 340, 12453, 18598,\n 25897, 16048, 564, 24214, 8710, 16793, 16643, 13023, 43747,\n 15144, 16390, 7083, 127, 43268, 13354, 10069, 2867, 14957,\n 10298, 33818, 36112, 15494, 18617, 33827, 21619, 30092, 11669,\n 21810, 5016, 28615, 16102, 29447, 16618, 10999, 29214, 18152,\n 7, 26283, 5160, 43617, 30089, 39656, 4316, 31261, 42973,\n 38602, 23158, 41176, 41733, 11846, 24918, 22569, 31262, 4824,\n 11352, 7829, 40355, 10334, 42971, 35459, 43732, 25201, 39924,\n 37050, 12095, 4028, 25129, 29395, 43590, 26308, 38973, 37685,\n 19151, 8805, 30062, 6939, 35790, 17473, 13427, 10902, 35433,\n 27879, 29541, 25715, 18879, 13914, 40754, 31525, 42198, 34708,\n 41137, 13318, 16915, 40790, 20609, 18841, 7211, 2173, 42559,\n 15458, 18579, 14905, 2731, 1209, 4254, 10553, 22647, 17178,\n 7748, 33670, 24808, 21804, 22393, 7052, 21081, 38450, 29495,\n 21242, 24125, 40309, 23330, 25432, 14063, 41041, 19364, 17602,\n 30190, 40273, 30518, 35975, 29138, 8452, 37419, 31735, 32604,\n 19143, 14752, 20921, 37660, 15644, 31778, 39164, 6540, 16061,\n 16609, 40356, 19446, 2220, 2158, 31208, 28037, 21176, 27650,\n 32083, 15692, 21836, 21669, 25839, 32779, 42863, 6643, 25382,\n 43153, 5500, 42319, 7768, 9256, 39356, 40515, 4907, 1860,\n 995, 1981, 9029, 3832, 5234, 20016, 36766, 21107, 1296,\n 38966, 34010, 847, 39421, 2532, 17046, 2549, 24154, 15740,\n 20436, 4796, 23321, 11337, 38692, 9316, 31852, 24335, 31792,\n 4221, 35452, 25626, 13826, 4773, 37182, 17235, 40587, 2846,\n 22990, 22171, 12780, 37324, 6510, 34299, 12990, 9026, 38249,\n 43993, 14653, 14659, 3582, 32312, 28993, 6551, 34712, 32739,\n 8512, 10034, 36420, 35134, 7772, 8074, 34590, 31630, 37396,\n 23982, 32118, 7044, 6574, 11780, 40688, 9431, 39957, 17584,\n 30210, 39129, 32177, 20847, 5197, 11214, 12923, 32615, 11273,\n 27994, 19195, 30359, 26134, 13490, 34982, 39583, 1381, 19938,\n 31323, 33267, 19439, 4233, 40949, 38832, 11974, 23419, 5681,\n 20534, 1350, 15076, 23214, 25718, 28125, 18461, 18914, 17783,\n 36703, 29013, 37148, 17357, 16685, 31887, 19992, 7969, 19305,\n 16154, 6991, 4585, 13392, 2129, 20202, 32731, 1840, 2375,\n 35807, 28941, 35668, 35559, 29859, 36093, 26009, 6321, 4529,\n 38467, 24314, 32957, 3095, 26368, 42361, 22795, 40997, 27449,\n 19468, 42068, 17699, 33426, 24696, 2491, 24531, 19078, 4925,\n 30457, 37716, 38258, 30899, 33744, 11749, 20866, 16741, 8504,\n 14880, 32606, 26312, 32059, 28152, 17380, 22952, 22988, 29432,\n 34925, 41051, 32931, 24879, 36902, 2508, 21184, 40798, 28866,\n 27166, 19627, 1621, 30292, 38924, 21933, 14426, 28464, 38087,\n 20158, 14845, 9594, 2840, 19027, 10276, 36418, 16702, 21525,\n 15756, 14209, 22017, 28289, 28359, 41720, 27584, 28100, 7864,\n 22557, 36869, 7959, 16337, 13, 22846, 3074, 28366, 30696,\n 26547, 18683, 12879, 35961, 40422, 19276, 35502, 34036, 12128,\n 21291, 5194, 37007, 5853, 35700, 18592, 9404, 3969, 12731,\n 24984, 19555, 29921, 24333, 28299, 16176, 10686, 7533, 31050,\n 39066, 5053, 23128, 3032, 12279, 17503, 15423, 18919, 35469,\n 11502, 26256, 17739, 11606, 22062, 1933, 30079, 21895, 34520,\n 37027, 40957, 36846, 42105, 2983, 37185, 27518, 25666, 29757,\n 21664, 2108, 29660, 25533, 33257, 16349, 35423, 3460, 1608,\n 29893, 18648, 33729, 27785, 12449, 17774, 28721, 33005, 27620,\n 22233, 4568, 9138, 42019, 39369, 41446, 30334, 8475, 39816,\n 12408, 11973, 24518, 37807, 3695, 3305, 20018, 31861, 36498,\n 19860, 7915, 33181, 3097, 33561, 2001, 13299, 38521, 28150,\n 3699, 16435, 30624, 8282, 911, 30348, 33120, 34176, 14124,\n 7045, 18021, 1066, 43536, 18460, 1285, 5690, 24983, 31432,\n 34500, 11474, 38409, 19956, 25952, 42950, 6887, 9713, 11035,\n 5523, 13501, 37821, 488, 28635, 22861, 27667, 7650, 34459,\n 41946, 12514, 16836, 42135, 3072, 34895, 43829, 43009, 36262,\n 2829, 34345, 32932, 9121, 32602, 15254, 689, 37326, 33986,\n 41890, 41611, 27603, 42459, 10359, 1457, 37086, 40459, 31134,\n 23491, 39606, 26447, 35022, 2862, 21315, 13743, 30614, 40230,\n 32300, 11394, 9539, 36437, 11257, 5930, 11839, 24528, 247,\n 19332, 41703, 41901, 7900, 18910, 20645, 15150, 39276, 14770,\n 35683, 15416, 2879, 17649, 19004, 813, 29194, 43278, 4812,\n 42730, 33972, 23925, 32858, 18840, 20000, 13930, 9953, 3045,\n 22388, 4818, 41929, 34481, 4883, 13489, 21673, 39097, 28772,\n 5056, 18219, 31790, 30058, 2531, 30443, 11658, 35058, 12311,\n 10894, 30134, 38019, 20412, 43902, 38405, 41168, 7325, 8121,\n 34141, 20361, 6974, 14635, 42153, 24924, 4501, 28321, 165,\n 13094, 6252, 31362, 39200, 20758, 1141, 39815, 21829, 26081,\n 11503, 36778, 30913, 32529, 34522, 3546, 42061, 16394, 16186,\n 17693, 42239, 450, 14105, 11422, 37659, 3869, 19754, 8846,\n 36433, 18301, 32305, 37624, 18975, 32977, 37612, 27291, 42526,\n 8024, 24766, 37292, 3712, 16848, 19811, 28187, 27730, 22134,\n 33340, 11442, 23371, 17595, 39254, 13341, 3058, 33504, 10900,\n 37039, 17599, 36151, 43626, 25183, 17988, 17480, 23333, 15455,\n 28423, 42146, 17208, 25698, 39391, 17975, 14473, 17550, 35701,\n 29469, 7957, 27703, 34528, 21118, 25351, 22424, 32684, 30884,\n 19215, 25545, 24510, 4530, 35091, 43636, 1938, 19644, 1419,\n 14277, 39114, 4813, 27529, 12239, 6606, 2703, 26799, 30029,\n 9581, 37066, 15400, 30179, 35309, 18687, 4132, 23052, 24817,\n 40945, 38817, 4346, 25565, 17557, 15979, 43017, 432, 28356,\n 30121, 40271, 12350, 30378, 27338, 26957, 29739, 34357, 1607,\n 35477, 17436, 35912, 30203, 14201, 1843, 19174, 1108, 22466,\n 28951, 40739, 39456, 24693, 1349, 25936, 23460, 7795, 26452,\n 13849, 39844, 31306, 39750, 25446, 15411, 41141, 34254, 43167,\n 17837, 31449, 28761, 17396, 5692, 41919, 33449, 33183, 4232,\n 2443, 11789, 5654, 30323, 1706, 38109, 25394, 17692, 39547,\n 29474, 2974, 26630, 12727, 15535, 16805, 16774, 32886, 484,\n 1096, 41452, 43192, 11359, 20632, 2353, 2417, 16680, 1400,\n 38161, 28763, 27218, 9553, 43305, 41156, 36180, 22658, 14088,\n 27949, 37875, 128, 40498, 23956, 4157, 10158, 26152, 19119,\n 11872, 1567, 24438, 37766, 15804, 29036, 27418, 24935, 43836,\n 6581, 38828, 21893, 20355, 26866, 16441, 3758, 42603, 20932,\n 13657, 15695, 38434, 251, 9210, 30470, 40216, 1988, 43499,\n 19818, 32617, 1787, 22014, 8117, 36258, 43695, 35053, 43326,\n 38290, 3122, 729, 29998, 28136, 21952, 12663, 40156, 32479,\n 10734, 31353, 30230, 28518, 27408, 34207, 39766, 31695, 26521,\n 43586, 20879, 25609, 4323, 4734, 8576, 30820, 1458, 2072,\n 13577, 40856, 24544, 33545, 31387, 19599, 24598, 1230, 16109,\n 22788, 16350, 7124, 15895, 1627, 31338, 23894, 39553, 39278,\n 39681, 3585, 21397, 37818, 32104, 8405, 8687, 25402, 35409,\n 3358, 13477, 13975, 42932, 7674, 4096, 6039, 19444, 9469,\n 25788, 8441, 4535, 14822, 43222, 38066, 2288, 40477, 34896,\n 26607, 3303, 8161, 25399, 37090, 21123, 27853, 40772, 31488,\n 5179, 25395, 19607, 4295, 40298, 31376, 41001, 17972, 34483,\n 24682, 32404, 943, 21719, 1620, 13253, 8854, 5213, 6626,\n 34889, 27742, 33175, 22622, 24780, 38322, 40275, 30200, 21736,\n 33151, 19051, 33149, 3373, 16073, 22370, 28974, 26843, 32942,\n 34901, 23697, 36513, 40463, 23261, 9067, 29877, 10320, 37199,\n 24429, 37271, 24849, 35333, 37282, 38044, 30515, 32912, 2759,\n 37724, 26828, 7701, 26177, 1011, 25747, 23923, 42724, 41701,\n 14394, 22968, 13238, 36585, 3400, 28393, 24413, 42814, 11819,\n 6584, 7905, 17280, 6184, 3589, 1538, 38447, 25615, 2067,\n 4793, 38743, 24316, 38592, 282, 34114, 40865, 13374, 1024,\n 37680, 22531, 34001, 2991, 31858, 34395, 4903, 3448, 3320,\n 28563, 26603, 11893, 8591, 41641, 37017, 20180, 3112, 32515,\n 20896, 36658, 27348, 13789, 10492, 41218, 6544, 32496, 6436,\n 14197, 26190, 39881, 1317, 35350, 9521, 33785, 14293, 2757,\n 14832, 22582, 33888, 39632, 40365, 17865, 15999, 17372, 38852,\n 37157, 41535, 965, 36860, 25, 35059, 8864, 31192, 39745,\n 6078, 41328, 6327, 30468, 18347, 28964, 4422, 31180, 38491,\n 19021, 28628, 10218, 6783, 30864, 5461, 35373, 4694, 249,\n 857, 33776, 22617, 29163, 188, 3596, 15109, 30727, 42432,\n 34527, 36448, 15480, 9864, 6065, 36126, 32722, 35392, 30753,\n 4054, 14319, 7956, 6340, 12775, 31126, 11604, 34974, 24361,\n 21476, 28829, 5175, 17875, 21551, 36228, 41063, 24165, 29662,\n 14833, 37098, 17074, 56, 8200, 41498, 34757, 32804, 27797,\n 38247, 25831, 24965, 18605, 39795, 14962, 36499, 41396, 12418,\n 41983, 30469, 5700, 613, 17432, 274, 32952, 33581, 34139,\n 19925, 24358, 22208, 11946, 37415, 30720, 7789, 43387, 7877,\n 15325, 30033, 3912, 4161, 34932, 10720, 16531, 783, 42843,\n 24760, 42888, 35057, 5377, 9580, 4731, 9133, 28743, 34720,\n 13059, 24765, 16588, 10185, 26485, 15554, 6481, 41594, 41226,\n 4983, 17322, 42057, 8727, 28528, 26634, 2656, 27701, 5071,\n 7908, 32847, 18984, 8916, 22094, 6179, 9235, 22364, 38177,\n 29876, 38373, 25192, 32516, 5217, 41080, 22873, 41722, 11937,\n 22842, 16459, 4545, 13143, 18812, 34116, 32573, 20768, 14454,\n 2489, 38624, 9880, 23435, 5294, 22828, 10057, 17883, 39451,\n 22578, 21439, 14205, 20281, 2972, 8612, 1035, 34118, 9609,\n 23277, 8123, 27057, 35914, 20981, 7607, 27810, 19153, 1476,\n 39448, 10005, 29806, 26938, 27451, 1316, 19900, 10798, 35971,\n 34413, 20408, 30853, 33872, 39590, 32470, 4466, 14453, 31336,\n 40567, 26128, 28878, 19396, 32885, 17833, 33254, 36614, 37770,\n 3222, 22590, 28448, 42756, 38699, 24122, 20115, 7730, 35280,\n 17130, 36667, 38245, 30658, 14517, 30084, 22043, 25820, 39207,\n 18413, 14226, 24176, 216, 27484, 39036, 42505, 37668, 40436,\n 32139, 26879, 22189, 28135, 38958, 14003, 18981, 27067, 14611,\n 43002, 3440, 8283, 4727, 23808, 4988, 7435, 37859, 3100,\n 20917, 572, 10683, 33248, 5972, 30660, 18958, 21427, 15083,\n 23519, 23671, 37173, 3258, 5942, 41189, 204, 11117, 8296,\n 10415, 3873, 6083, 37067, 38367, 35453, 15470, 19564, 12402,\n 38688, 17765, 35982, 2361, 504, 10651, 482, 23434, 18291,\n 43943, 1605, 19606, 18776, 35208, 31877, 23585, 34815, 17673,\n 3120, 9590, 5871, 13675, 35250, 5630, 13041, 29702, 14208,\n 39589, 18054, 8842, 32364, 29468, 22683, 14228, 30420, 13837,\n 42749, 1578, 27277, 3059, 26747, 11410, 34359, 9183, 23159,\n 35195, 5680, 7384, 42031, 41454, 34552, 23306, 4412, 40406,\n 18690, 16466, 39706, 11660, 42395, 11595, 30623, 16376, 5696,\n 17681, 2377, 34121, 7851, 22184, 37166, 21682, 25461, 32653,\n 22321, 36609, 26492, 34717, 16389, 15601, 39947, 19552, 37105,\n 39374, 33131, 34672, 34340, 42002, 8278, 6936, 14636, 28911,\n 4066, 16550, 17388, 3274, 12611, 4260, 20125, 33124, 7327,\n 10117, 30102, 31950, 24173, 21100, 39335, 43678, 28066, 15158,\n 20203, 19470, 27591, 40063, 19691, 34021, 23984, 803, 7405,\n 17096, 15901, 19951, 26708, 30529, 24787, 37116, 4641, 13415,\n 737, 42281, 6687, 24807, 21600, 16311, 30497, 21648, 22718,\n 109, 11423, 36234, 10082, 35934, 24306, 530, 21729, 2131,\n 23107, 19199, 22052, 33834, 6211, 30990, 12488, 39125, 13442,\n 11140, 21500, 33049, 21557, 20280, 29340, 16743, 39237, 1039,\n 16282, 25130, 8229, 25737, 13280, 10267, 29416, 20254, 43409,\n 1953, 18660, 23430, 20120, 7779, 35514, 10307, 43630, 887,\n 15956, 34990, 14014, 40415, 20919, 9992, 37393, 43386, 33080,\n 12138, 12982, 24387, 12360, 17535, 10503, 5672, 22951, 13116,\n 20232, 32993, 19684, 9030, 281, 27332, 21524, 3667, 34698,\n 344, 1619, 33867, 26995, 16570, 24624, 43453, 12862, 18970,\n 3037, 21195, 41422, 42012, 25135, 32716, 41571, 18491, 39885,\n 9879, 26253, 6147, 7182, 20336, 12284, 23842, 25329, 7016,\n 35650, 36051, 31607, 38456, 4942, 43121, 22870, 31714, 18176,\n 7082, 5384, 7700, 5047, 8984, 7090, 19019, 13020, 13854,\n 43879, 8063, 9902, 15629, 15818, 18160, 36296, 22695, 31715,\n 13276, 10567, 33427, 42489, 10737, 4486, 33270, 10642, 21008,\n 37607, 39073, 5178, 43574, 35739, 23523, 23740, 6317, 12341,\n 9154, 20101, 39577, 24428, 33577, 12038, 12871, 32513, 23897,\n 3648, 7400, 30314, 39069, 29807, 25932, 43124, 12811, 32907,\n 23796, 11548, 27924, 42744, 34543, 38068, 31448, 28059, 30327,\n 29, 7693, 12154, 4660, 7330, 1723, 39550, 33839, 21651,\n 32393, 38524, 27668, 38827, 35569, 19010, 442, 42206, 28658,\n 26804, 7947, 42232, 13438, 9921, 5342, 19736, 41275, 41736,\n 242, 4391, 37544, 12370, 10794, 41944, 246, 21054, 38362,\n 33656, 41669, 37918, 17777, 894, 4334, 27826, 20366, 15806,\n 3433, 25376, 1528, 1248, 35223, 40420, 25318, 33669, 16259,\n 2445, 35426, 24504, 19201, 23369, 39178, 4543, 6911, 38310,\n 26365, 36060, 33523, 1421, 1077, 1556, 1569, 401, 37650,\n 69, 28245, 31585, 6022, 28975, 31539, 23392, 18400, 8898,\n 23096, 20902, 35149, 19680, 12281, 31127, 3755, 29309, 22792,\n 32138, 42273, 5699, 16343, 19188, 42637, 12567, 31949, 38931,\n 38337, 5031, 16439, 25956, 20974, 40423, 40440, 13530, 28943,\n 43964, 31977, 27041, 41829, 31781, 39705, 15908, 17773, 39910,\n 39940, 5583, 23628, 21872, 33359, 13409, 35168, 2054, 34161,\n 40210, 22405, 15813, 8230, 18522, 43177, 20341, 6618, 34761,\n 25234, 33262, 42781, 43046, 34563, 28969, 36556, 30161, 41190,\n 3556, 27805, 30271, 25719, 12124, 31653, 13175, 20399, 3429,\n 39414, 21630, 3675, 7297, 2348, 14614, 37572, 42522, 22079,\n 12869, 13984, 35094, 36903, 2708, 10342, 2787, 15684, 38698,\n 29615, 13052, 38315, 2213, 19744, 6182, 3602, 37495, 6365,\n 18298, 37091, 7672, 36567, 19091, 24395, 4512, 10072, 15630,\n 22690, 30312, 22529, 16970, 13805, 23420, 17152, 900, 14919,\n 26868, 42363, 29906, 32752, 8607, 25945, 18948, 13398, 31485,\n 15054, 2081, 620, 41517, 20418, 7178, 10255, 21758, 19070,\n 36011, 20025, 2572, 41453, 37344, 32896, 5944, 4816, 15106,\n 4138, 4950, 20076, 4153, 27720, 40348, 12833, 3664, 32797,\n 42610, 34694, 43079, 31902, 38771, 13896, 33595, 28231, 25125,\n 41005, 6567, 33476, 20244, 35370, 36319, 39655, 5005, 8626,\n 19512, 17895, 10518, 13957, 9567, 30096, 30945, 20652, 19339,\n 27816, 6244, 42394, 3661, 22335, 24711, 39158, 43078, 26272,\n 26379, 28300, 12412, 27766, 22029, 13831, 26875, 26702, 29908,\n 8432, 26826, 4708, 12838, 18358, 9198, 40662, 15433, 38326,\n 42717, 36687, 10517, 11391, 35810, 17474, 16707, 3808, 13493,\n 22191, 38523, 23506, 25773, 40124, 21069, 6013, 38833, 15189,\n 28047, 19790, 39289, 32502, 6630, 40444, 28781, 9896, 19267,\n 31851, 33868, 23254, 3526, 8332, 9975, 28367, 10259, 21869,\n 10179, 22196, 37225, 6698, 35172, 133, 29953, 20928, 4284,\n 24206, 24854, 16144, 13207, 5639, 4345, 21783, 2799, 21923,\n 39554, 42646, 1497, 37657, 25950, 33341, 15486, 33860, 9016,\n 20501, 19484, 30365, 7455, 10159, 33916, 20931, 6444, 6326,\n 12859, 25154, 10696, 33661, 801, 11075, 12674, 8703, 39127,\n 34058, 4604, 43947, 32526, 28672, 23994, 39771, 25567, 2957,\n 36603, 630, 28595, 2203, 13064, 40409, 24993, 3812, 2650,\n 35645, 9561, 12041, 7513, 9020, 37410, 17722, 4608, 8413,\n 30432, 15541, 15326, 28587, 30167, 19777, 15210, 25974, 6138,\n 27258, 16433, 6193, 35747, 17311, 40454, 5705, 37125, 15607,\n 1922, 41412, 24858, 24049, 28349, 21577, 25065, 26243, 4430,\n 42981, 35673, 8709, 40858, 17049, 21834, 2424, 6623, 39434,\n 20933, 18820, 23604, 11263, 41786, 20324, 33955, 17849, 39879,\n 6204, 17892, 8265, 28275, 41618, 28386, 13195, 33619, 16785,\n 35319, 16474, 27182, 13361, 25878, 29483, 43044, 12299, 29108,\n 23455, 1107, 13054, 14367, 3560, 2100, 19724, 9015, 34011,\n 41762, 6441, 38385, 40402, 9253, 28713, 16123, 16329, 8539,\n 19504, 10800, 39249, 21847, 25048, 12744, 6160, 17822, 30638,\n 18445, 27707, 31279, 12364, 29102, 957, 4629, 32480, 41194,\n 28234, 10462, 4450, 31628, 31837, 16515, 28937, 36759, 41299,\n 19474, 23046, 41840, 7666, 14295, 17961, 527, 43807, 9888,\n 23058, 10701, 18120, 6795, 37062, 6037, 8243, 15235, 29191,\n 16322, 42590, 26795, 28461, 7130, 35704, 34748, 12750, 30111,\n 5364, 23253, 22552, 35718, 33631, 43899, 9278, 25223, 18324,\n 38264, 14579, 28908, 26162, 31757, 5724, 30966, 30487, 41449,\n 17414, 38085, 18274, 9481, 4270, 12067, 34290, 33641, 36135,\n 26694, 39321, 27026, 4275, 15923, 10344, 11627, 22182, 518,\n 28530, 43749, 29985, 21271, 10469, 140, 15674, 33759, 1187,\n 13809, 42472, 7723, 19030, 14806, 29943, 17954, 27439, 38407,\n 9001, 11994, 43668, 2469, 34681, 31001, 12031, 30802, 40746,\n 19192, 8750, 8335, 35662, 21715, 23063, 19129, 15524, 9012,\n 6077, 20684, 12178, 9620, 5158, 16992, 11444, 7171, 33404,\n 42895, 27394, 33773, 2082, 88, 41468, 24779, 41323, 7385,\n 28002, 28489, 108, 6133, 42910, 21629, 27253, 40311, 41851,\n 3657, 31048, 34657, 7029, 39976, 6256, 12892, 43260, 12194,\n 28707, 25968, 33689, 1716, 23580, 36722, 35554, 18743, 936,\n 16392, 42447, 6845, 22460, 29804, 30883, 28133, 29964, 39784,\n 13073, 28375, 11599, 7048, 1557, 33957, 33333, 42517, 41181,\n 31128, 21484, 35229, 23307, 33084, 22139, 22248, 7923, 36197,\n 16873, 20763, 9324, 29835, 1923, 28121, 4728, 30695, 29316,\n 20411, 31119, 27161, 9676, 42648, 28057, 13265, 37903, 12762,\n 38535, 39708, 35923, 35321, 36122, 18462, 15175, 33358, 21723,\n 2997, 7570, 16823, 36758, 33317, 42728, 17508, 12028, 27992,\n 636, 10978, 20641, 35709, 10059, 299, 37891, 13219, 41687,\n 24162, 17872, 23509, 22561, 19323, 9455, 40486, 3055, 13749,\n 31311, 43583, 21538, 24352, 16128, 9315, 14479, 36423, 1472,\n 38622, 30434, 13968, 37366, 40400, 1158, 1508, 786, 43412,\n 16239, 22744, 7160, 37161, 8845, 13821, 4108, 14793, 32815,\n 36056, 2145, 17428, 40978, 43345, 30988, 5586, 7722, 569,\n 21876, 30421, 20348, 19448, 24988, 35051, 29701, 41413, 13339,\n 14802, 13573, 31204, 21330, 36574, 15093, 4368, 3031, 40017,\n 15366, 40020, 1899, 3279, 14008, 14628, 17084, 543, 175,\n 1289, 16094, 28556, 15306, 36534, 9089, 24190, 30002, 3040,\n 17721, 23120, 18754, 10252, 31769, 20620, 11027, 15517, 28869,\n 5095, 13235, 28516, 1129, 40076, 14882, 29566, 31683, 34608,\n 29938, 23335, 42764, 23594, 43184, 29748, 37576, 3662, 43769,\n 17925, 24235, 27152, 13661, 15678, 7970, 8914, 2255, 18945,\n 23877, 8369, 28522, 20611, 14231, 43189, 14211, 17253, 35306,\n 31689, 35993, 3053, 28236, 1871, 38959, 14386, 31196, 35972,\n 38205, 243, 8771, 43793, 28808, 6529, 27389, 41661, 31469,\n 26117, 36704, 25194, 17526, 35510, 13503, 33711, 29730, 1689,\n 25386, 41312, 13956, 32818, 7539, 40441, 36275, 4513, 1121,\n 2045, 23126, 2363, 11466, 538, 14928, 35358, 9264, 27412,\n 20949, 22165, 33743, 33873, 25326, 36828, 25453, 41016, 24542,\n 4558, 2370, 8092, 27192, 6, 6051, 10778, 22645, 22905,\n 6829, 15799, 2614, 13368, 9726, 24767, 20985, 22342, 1358,\n 43671, 3897, 11159, 10786, 35767, 40634, 5380, 20925, 13561,\n 12969, 13946, 7910, 42700, 3985, 28467, 43954, 3173, 39483,\n 39185, 6952, 36514, 38601, 181, 18299, 20680, 13751, 1583,\n 40880, 10369, 13499, 30726, 2374, 38478, 27851, 3271, 6809,\n 9653, 39668, 1492, 10508, 5494, 380, 18028, 21784, 11638,\n 19696, 41875, 14527, 10643, 30311, 30904, 2517, 15023, 33495,\n 11895, 43821, 36779, 29034, 39390, 14037, 22002, 27637, 36972,\n 31093, 9721, 16749, 64, 36707, 21061, 15124, 27434, 34751,\n 14799, 36777, 373, 5810, 8319, 25087, 39661, 31545, 26017,\n 22696, 31454, 7596, 4858, 1111, 6713, 2518, 25651, 37427,\n 9443, 25044, 38175, 20634, 3705, 18503, 30568, 27724, 23334,\n 5788, 35922, 32101, 34509, 2144, 29669, 2672, 43546, 14497,\n 5051, 10695, 21288, 17140, 2486, 36642, 27812, 24898, 23992,\n 13345, 13864, 20488, 12199, 40199, 26588, 3226, 27243, 42140,\n 40139, 16011, 42566, 11688, 33369, 43849, 30534, 32968, 17149,\n 30729, 43232, 24512, 35342, 3494, 4364, 15550, 36010, 19580,\n 28934, 23579, 25342, 30145, 14839, 25875, 19307, 26164, 43083,\n 26940, 4521, 8926, 34843, 6788, 22080, 26313, 3939, 43994,\n 36482, 34361, 25782, 35227, 30245, 10723, 34610, 24824, 36382,\n 31108, 31702, 36688, 27467, 19673, 17328, 8361, 435, 14348,\n 20951, 12604, 13800, 28266, 13744, 38708, 25689, 8182, 17174,\n 22095, 18946, 4661, 39614, 39735, 19140, 12971, 41538, 16470,\n 18527, 33277, 15698, 3829, 11480, 24393, 33326, 34953, 25073,\n 11299, 7341, 25100, 13169, 27058, 41258, 17555, 39732, 30091,\n 8095, 1779, 20293, 23909, 38458, 36920, 3269, 12429, 42766,\n 40525, 22889, 13613, 31972, 13870, 21973, 23490, 27625, 9955,\n 26729, 21561, 1170, 14757, 41960, 39294, 21597, 26433, 40489,\n 4556, 27907, 39255, 25618, 34945, 35860, 23738, 18535, 1797,\n 17781, 23199, 24261, 38422, 15228, 19674, 42419, 32050, 2420,\n 40529, 9947, 28088, 28804, 25538, 11750, 41257, 40168, 3275,\n 33234, 11500, 1475, 8547, 13583, 13201, 18642, 26962, 11965,\n 10209, 10909, 36155, 11722, 39098, 915, 35278, 29679, 15639,\n 4998, 22016, 17236, 18501, 21743, 34904, 5578, 26355, 21993,\n 17284, 41662, 7412, 29146, 10872, 10557, 10756, 11077, 35629,\n 30143, 23311, 17616, 37418, 25465, 4034, 33871, 36493, 28508,\n 7006, 14399, 37475, 42393, 17139, 6185, 15930, 41445, 32181,\n 17321, 5471, 41977, 29965, 38651, 17968, 39138, 26391, 16611,\n 20108, 18678, 29464, 17233, 17618, 43515, 7439, 13599, 3907,\n 34199, 35082, 9479, 9186, 14688, 40832, 22400, 36079, 30622,\n 20745, 24604, 17336, 29330, 35534, 123, 7060, 23125, 15206,\n 23621, 8480, 15364, 15198, 13050, 16529, 9465, 18906, 1273,\n 22293, 8147, 2590, 8483, 15619, 8466, 41231, 8429, 37644,\n 44002, 1517, 2227, 5081, 10499, 36715, 33944, 30032, 16905,\n 30202, 36959, 42431, 11562, 15839, 40019, 18902, 8821, 33453,\n 18265, 13208, 31054, 8541, 21545, 39541, 16719, 18138, 32877,\n 20856, 10580, 41202, 33468, 26599, 42783, 37005, 4923, 17341,\n 22146, 41508, 34297, 26266, 39252, 3103, 7167, 40740, 37284,\n 40576, 15425, 9948, 37057, 2764, 6194, 13271, 41152, 26910,\n 26396, 14737, 33189, 40744, 3879, 23867, 25563, 25320, 33134,\n 37698, 7246, 12087, 24971, 17169, 6989, 37331, 15809, 38202,\n 9074, 14640, 29617, 37816, 9420, 19389, 42399, 19501, 38569,\n 6283, 19460, 32106, 41833, 28320, 5997, 27731, 22910, 24116,\n 7151, 39626, 28486, 12508, 15808, 20827, 31417, 16438, 1175,\n 10706, 1970, 7361, 24423, 5171, 7611, 36548, 36635, 38531,\n 9395, 7992, 29412, 27926, 24621, 4916, 38971, 16691, 4482,\n 29047, 37002, 37363, 39418, 38104, 3754, 38907, 14871, 31152,\n 36815, 17141, 544, 42488, 39647, 37432, 32662, 15219, 17850,\n 29756, 8623, 7114, 2484, 34309, 43750, 23660, 23820, 15052,\n 29644, 28503, 38784, 36370, 23641, 12847, 1977, 10454, 19247,\n 34381, 34166, 15090, 28764, 34864, 33402, 35219, 30855, 25769,\n 26644, 2931, 32424, 12202, 32512, 11238, 5901, 197, 43873,\n 22072, 21005, 4077, 16863, 21337, 7021, 1153, 13291, 2062,\n 41187, 8275, 16104, 17919, 27721, 3187, 13680, 33907, 29183,\n 33071, 31173, 1199, 32161, 3686, 19586, 22176, 27623, 11449,\n 9787, 38823, 7079, 39663, 132, 36770, 7705, 21879, 18247,\n 34805, 21649, 7466, 35924, 33731, 42066, 4325, 18303, 31409,\n 12262, 30480, 3781, 4122, 5288, 30565, 1794, 1337, 347,\n 38365, 12505, 21576, 12849, 12816, 35203, 27337, 39419, 40231,\n 26740, 26332, 42300, 16040, 17355, 15292, 21111, 16507, 40398,\n 1826, 2827, 9898, 13292, 17146, 7615, 28971, 12848, 24844,\n 20849, 16476, 37476, 10200, 28746, 12793, 3323, 28999, 31277,\n 3905, 38468, 35799, 323, 32762, 23466, 15058, 27813, 34461,\n 30477, 36046, 9158, 32486, 43103, 12979, 1274, 5527, 28536,\n 39045, 20980, 17556, 19261, 42170, 26617, 3680, 16730, 35359,\n 1018, 3862, 38810, 37167, 43140, 37234, 13852, 1673, 32955,\n 3927, 19679, 7051, 34417, 15889, 29004, 30683, 37754, 31932,\n 6636, 41157, 20846, 5528, 19720, 4859, 24219, 11894, 4497,\n 22693, 36435, 25000, 42958, 21137, 29616, 38342, 29260, 10350,\n 8054, 20790, 31531, 29491, 32487, 19126, 23629, 21465, 33715,\n 30725, 1542, 18111, 29446, 38891, 6274, 36668, 43602, 16683,\n 37925, 42074, 4349, 28568, 34625, 27920, 19415, 25165, 15160,\n 7987, 18117, 13775, 42666, 22074, 29427, 6423, 16143, 14850,\n 10140, 4720, 16576, 24091, 21340, 1461, 43557, 43572, 30191,\n 5591, 38366, 19968, 39830, 17670, 2686, 17299, 12713, 34994,\n 7625, 28381, 15243, 37506, 11490, 39643, 28398, 5772, 22835,\n 31657, 30207, 10867, 4620, 19309, 22878, 26676, 41932, 24417,\n 19103, 12495, 28282, 33817, 30897, 21433, 8998, 6548, 31643,\n 23756, 33925, 17537, 41758, 825, 34394, 24674, 42533, 17873,\n 9613, 3161, 23744, 13051, 18119, 24220, 26021, 40433, 28124,\n 10339, 2339, 12585, 15680, 31903, 38974, 7988, 8096, 6620,\n 2060, 36486, 5984, 38208, 17179, 41684, 21905, 31245, 37666,\n 23364, 7298, 17437, 40205, 7491, 19394, 7201, 25325, 33075,\n 17162, 10964, 43209, 20087, 12165, 26722, 7329, 41185, 16321,\n 3070, 35204, 7645, 9488, 28183, 40537, 43897, 40001, 39009,\n 10849, 7809, 4382, 15998, 145, 36205, 22269, 512, 34812,\n 20532, 33930, 39532, 35684, 18757, 3130, 22626, 33840, 40996,\n 25481, 38679, 2617, 19795, 40610, 18788, 35505, 23695, 31164,\n 43621, 3210, 36118, 43468, 31123, 16716, 23841, 42612, 40548,\n 18723, 27319, 25302, 13432, 7019, 6480, 11827, 7600, 10773,\n 15032, 15801, 34518, 4674, 16966, 26586, 7264, 8673, 11721,\n 992, 10658, 29551, 19871, 36812, 36991, 31494, 18163, 23391,\n 529, 36444, 22081, 35254, 4238, 33596, 23212, 27954, 5716,\n 2776, 38222, 38359, 35, 15992, 41586, 22937, 40989, 21549,\n 4519, 14155, 11823, 41164, 13890, 37964, 3840, 22805, 21178,\n 16101, 23171, 38628, 4857, 22234, 4713, 26079, 11291, 10577,\n 18599, 6358, 34405, 21874, 21799, 20963, 37865, 42194, 29800,\n 42423, 19433, 11406, 23288, 42714, 7594, 5676, 24406, 23716,\n 19577, 24718, 41683, 9790, 27066, 37374, 9573, 18696, 14921,\n 217, 11659, 25753, 24174, 3930, 12068, 14810, 35353, 42632,\n 1722, 32582, 15205, 43800, 9347, 1137, 2025, 191, 15706,\n 30819, 8820, 22770, 42869, 1727, 34013, 41745, 30416, 6308,\n 15137, 22450, 33091, 37784, 3383, 42321, 664, 14546, 9832,\n 16995, 3374, 8008, 33279, 13476, 28600, 20597, 26254, 2201,\n 22848, 19608, 33994, 28533, 10021, 36300, 41474, 4663, 20397,\n 18912, 36237, 1879, 22829, 40584, 5797, 10385, 33969, 25806,\n 9489, 11635, 1009, 7110, 16883, 34555, 39776, 4770, 39112,\n 43540, 22345, 22239, 20855, 30809, 11727, 11038, 13156, 7088,\n 35679, 39876, 39575, 43200, 20574, 23060, 30625, 43911, 33442,\n 39142, 39105, 34779, 43086, 11812, 19876, 23966, 623, 38319,\n 11964, 12807, 38517, 26158, 39699, 11261, 27612, 15468, 24218,\n 38336, 33251, 4790, 22383, 10806, 25417, 10403, 4750, 19102,\n 41418, 6596, 38146, 38501, 18783, 18136, 6945, 30503, 7830,\n 18732, 27183, 16197, 6871, 43577, 19410, 28889, 38565, 11898,\n 28703, 13067, 3909, 9927, 37183, 29860, 34457, 5720, 4944,\n 22024, 25710, 42663, 14669, 21718, 3363, 33199, 26529, 25537,\n 6734, 40200, 7374, 41215, 41286, 36537, 38756, 28232, 2885,\n 4124, 3792, 37728, 34558, 39673, 16093, 14878, 29570, 7278,\n 2939, 23789, 42975, 40451, 33459, 42683, 16606, 6433, 958,\n 23910, 5334, 9440, 24215, 9427, 38659, 8527, 32750, 18692,\n 7471, 2735, 15342, 21349, 6395, 43155, 39310, 19353, 3240,\n 587, 29520, 28190, 30270, 21201, 16157, 16067, 25047, 27375,\n 35279, 20772, 8334, 7158, 21986, 2218, 35146, 11333, 2151,\n 22898, 23375, 29774, 11938, 21146, 39405, 3962, 7867, 38086,\n 33988, 36784, 1505, 30595, 24101, 30879, 3235, 18565, 14984,\n 42312, 34286, 5123, 8642, 31255, 32358, 8582, 32611, 33456,\n 37101, 7574, 6384, 32853, 11208, 30555, 5062, 33878, 293,\n 20430, 7360, 36239, 25929, 3155, 22365, 26320, 39975, 37198,\n 31096, 34225, 43719, 36120, 10440]), 'I': array([33602, 9927, 42469, 51283, 49414, 33249, 46003, 35548, 35883,\n 37821, 16707, 18165, 20151, 30777, 36731, 32739, 20682, 5470,\n 8581, 13491, 51528, 29777, 33284, 17182, 20143, 31077, 22062,\n 16054, 18827, 25468, 46321, 19137, 35674, 31927, 41361, 2602,\n 22877, 26403, 39839, 46430, 52188, 20097, 7610, 42840, 45037,\n 36259, 23895, 15490, 7110, 10211, 28220, 53278, 27145, 36758,\n 8034, 27013, 21702, 36924, 9462, 49904, 43823, 32314, 5229,\n 47444, 642, 29637, 31761, 44212, 53235, 28281, 36849, 43463,\n 26674, 12054, 36353, 46067, 39483, 14116, 42171, 31812, 3165,\n 44250, 14742, 37476, 9627, 43471, 45768, 42730, 16268, 22503,\n 11145, 45743, 2835, 54328, 39354, 8908, 33894, 31767, 54773,\n 35621, 43343, 3117, 9933, 2158, 19185, 20031, 26119, 9324,\n 48429, 14167, 15891, 17646, 7247, 4146, 42668, 51527, 50474,\n 54951, 55112, 3578, 18515, 51099, 22910, 38268, 4009, 21739,\n 4366, 30124, 52458, 9750, 46520, 1122, 6935, 1436, 4067,\n 16127, 10176, 10583, 48420, 12703, 50051, 53016, 29228, 41674,\n 12523, 43835, 27315, 41897, 46623, 30298, 46703, 129, 42294,\n 36341, 7328, 1913, 5298, 7993, 45708, 8522, 55357, 36293,\n 54397, 49300, 2888, 32758, 50293, 26864, 54415, 52250, 52720,\n 9816, 7928, 14262, 11006, 18189, 27849, 37884, 12103, 3107,\n 45852, 20183, 20862, 1255, 5413, 42706, 42178, 19443, 42410,\n 44980, 729, 29511, 24443, 5575, 40126, 40067, 13699, 28627,\n 44098, 48320, 23226, 33766, 19572, 27312, 37318, 43147, 24373,\n 20759, 5516, 19132, 37050, 33178, 52363, 31048, 52722, 310,\n 37342, 2560, 21874, 9822, 38672, 7284, 35713, 11487, 27045,\n 15568, 29400, 3805, 8304, 20406, 50614, 11100, 13954, 8648,\n 13218, 34754, 54830, 14566, 14943, 55004, 1779, 12078, 12619,\n 49191, 4314, 13065, 21378, 18274, 160, 14947, 13779, 31035,\n 43704, 34868, 1718, 37588, 34420, 31876, 49631, 35336, 19174,\n 54385, 3224, 46874, 32242, 15771, 23570, 42748, 32015, 37706,\n 54254, 5827, 13078, 21320, 14521, 50732, 34076, 17120, 49078,\n 53031, 12657, 52556, 6348, 49202, 38992, 16654, 34142, 22573,\n 9838, 12219, 25789, 17792, 33770, 28618, 40365, 10978, 8583,\n 43670, 18441, 43817, 4560, 47231, 40895, 48740, 21447, 27902,\n 2525, 21518, 31495, 17641, 14744, 19918, 2488, 44669, 21940,\n 154, 1603, 15351, 18272, 48411, 19525, 22563, 50401, 33880,\n 26962, 35388, 36401, 54561, 8009, 47192, 32458, 25544, 27443,\n 24804, 6898, 2474, 21561, 20359, 50355, 8946, 50989, 52813,\n 8681, 13595, 5809, 10622, 22538, 48986, 25808, 13180, 1536,\n 35440, 37296, 18090, 23944, 33159, 32755, 9165, 13117, 40186,\n 27862, 5168, 17457, 2682, 35832, 33551, 44550, 44151, 9849,\n 11873, 48966, 54038, 44869, 44386, 53323, 14873, 38813, 23927,\n 1044, 48536, 6518, 44617, 18749, 14611, 48510, 43489, 43416,\n 46869, 27146, 35120, 6900, 38793, 27270, 5182, 496, 13141,\n 33192, 27077, 40609, 4365, 12389, 54800, 8838, 12612, 32868,\n 22368, 6532, 54850, 2789, 22774, 3259, 47889, 20991, 6714,\n 1581, 89, 54589, 28043, 12239, 54892, 53632, 45525, 1813,\n 54673, 43935, 26451, 16067, 40134, 3129, 24691, 5804, 20518,\n 37667, 11149, 12217, 31764, 47891, 21280, 27433, 26992, 54447,\n 9997, 28708, 42373, 12707, 21589, 19791, 27683, 13235, 25045,\n 10398, 28632, 9420, 25659, 14767, 6577, 1562, 50757, 1383,\n 27701, 37817, 18982, 25724, 54488, 39875, 4147, 50444, 25832,\n 35302, 40469, 28927, 27403, 14762, 14380, 13331, 11768, 33944,\n 1417, 7491, 50264, 50123, 33427, 43977, 44619, 46, 12957,\n 13324, 18007, 19847, 35740, 31390, 38743, 11937, 19930, 35288,\n 36703, 39800, 24525, 36672, 54419, 54246, 17985, 29835, 2019,\n 49642, 9296, 50397, 21300, 48441, 48623, 38767, 4930, 51258,\n 15507, 11572, 33490, 40344, 48789, 15848, 11264, 2467, 13622,\n 9549, 48221, 8269, 5984, 4249, 45542, 21348, 12939, 42158,\n 4919, 48847, 36226, 25763, 54714, 30774, 30497, 25911, 37804,\n 23572, 4794, 17407, 6729, 33276, 29859, 2141, 51613, 36903,\n 3427, 13096, 28641, 23059, 14721, 55148, 49187, 53840, 19545,\n 26736, 43135, 21862, 12511, 12465, 42535, 15051, 35492, 7041,\n 26411, 2307, 34628, 14996, 39237, 2880, 30623, 13254, 34961,\n 6197, 48992, 1928, 23294, 22730, 52157, 1946, 45459, 45274,\n 51623, 5889, 40391, 35306, 12199, 2044, 39616, 21375, 48249,\n 4781, 13836, 10858, 9715, 16411, 3900, 5281, 29197, 37696,\n 16535, 37109, 32998, 42587, 33074, 44222, 26560, 24491, 39767,\n 14385, 36624, 20583, 54392, 46864, 29229, 37872, 53423, 26993,\n 22616, 15052, 22216, 32794, 33017, 35904, 53979, 25799, 22301,\n 11678, 44840, 36211, 47674, 8764, 39913, 48092, 50362, 42750,\n 25559, 34512, 10727, 49305, 34755, 41709, 46187, 2188, 1125,\n 8777, 16598, 45012, 28336, 52097, 35468, 17887, 48594, 6393,\n 4557, 3762, 51307, 40199, 53855, 40647, 55255, 32321, 46861,\n 6601, 47634, 32991, 5011, 28321, 12448, 33268, 25696, 19463,\n 1543, 55063, 34862, 17768, 1246, 36698, 22675, 13257, 51464,\n 920, 31252, 30704, 51253, 5204, 17877, 19566, 52935, 40378,\n 40547, 51742, 40622, 44318, 15398, 10438, 8771, 23359, 18583,\n 51218, 28686, 28553, 52433, 7707, 12563, 514, 37814, 12827,\n 26710, 29134, 28531, 8355, 41631, 32973, 49956, 9491, 38107,\n 1245, 17217, 46407, 5202, 29478, 39168, 22336, 3570, 39905,\n 44171, 43406, 35321, 19674, 39241, 52117, 25022, 48913, 20706,\n 37752, 46342, 19552, 9930, 17794, 27525, 26235, 50311, 45536,\n 4225, 8925, 10782, 14743, 28797, 39874, 28004, 1307, 10595,\n 33122, 9699, 7431, 28130, 30650, 45891, 22584, 54761, 22857,\n 30832, 13164, 33292, 49201, 38661, 3815, 29503, 44401, 49551,\n 12800, 43303, 51461, 22311, 23219, 25599, 8926, 34061, 14690,\n 40716, 33873, 39669, 16441, 40717, 32771, 25753, 24284, 17125,\n 51787, 33087, 5234, 36779, 24159, 26034, 13482, 51761, 48657,\n 20163, 33772, 43908, 34897, 2662, 1525, 44875, 6464, 274,\n 38404, 45830, 29952, 38681, 45433, 40147, 9967, 14997, 46126,\n 50102, 1895, 2049, 16961, 46284, 11582, 6147, 21807, 10548,\n 35220, 22902, 1688, 3883, 25329, 21424, 52457, 54351, 17381,\n 53081, 15723, 50555, 43552, 9183, 29752, 47046, 51413, 51043,\n 2538, 8297, 16129, 30239, 8376, 15893, 53368, 40194, 44339,\n 36033, 20496, 50533, 49958, 45583, 8549, 2598, 6476, 38211,\n 11671, 32331, 8476, 45002, 21634, 38044, 8516, 15221, 24025,\n 16003, 55111, 18332, 49204, 28657, 38741, 20758, 48641, 13461,\n 41609, 3824, 26854, 25327, 36053, 2536, 35224, 43529, 19725,\n 31246, 14334, 16360, 3260, 18968, 50111, 12087, 49349, 27811,\n 27748, 47261, 36685, 41358, 48733, 13849, 26248, 14739, 18916,\n 7593, 8729, 12921, 28780, 31833, 55466, 37114, 53625, 36557,\n 26141, 46501, 12659, 49987, 34207, 20405, 54405, 6870, 24180,\n 25963, 11550, 49851, 47150, 41853, 51000, 4546, 11458, 35282,\n 34634, 33157, 44613, 11762, 48880, 25862, 16134, 50940, 19766,\n 54382, 1332, 3203, 12721, 15170, 26096, 32534, 8617, 31688,\n 855, 26301, 49112, 27255, 47710, 40000, 27080, 22860, 19733,\n 8970, 33920, 40978, 39849, 17104, 11686, 14104, 24094, 4304,\n 39314, 10001, 13890, 46922, 3715, 2395, 35114, 37387, 1579,\n 4880, 4520, 17532, 19703, 8263, 21049, 21387, 24939, 40263,\n 46243, 53414, 32764, 15271, 48604, 5763, 31465, 48166, 21281,\n 2875, 16429, 991, 27781, 7825, 34084, 2775, 3869, 51707,\n 26186, 40389, 23199, 19047, 19675, 40833, 33021, 15948, 6922,\n 25190, 13121, 16383, 31042, 30569, 46758, 15944, 18424, 18885,\n 34344, 44845, 23453, 40482, 44588, 12869, 14313, 1399, 30431,\n 19999, 21868, 35802, 26002, 32474, 17417, 53657, 29214, 43356,\n 20910, 1070, 36848, 4337, 39476, 51089, 26552, 1414, 26994,\n 54170, 35762, 26240, 35197, 19417, 7501, 51470, 14257, 25040,\n 37210, 47412, 30788, 8185, 41324, 33207, 4613, 25144, 5917,\n 17291, 54389, 54416, 33795, 31997, 25446, 55406, 21, 25451,\n 52849, 45846, 13822, 31664, 17900, 21939, 36584, 50038, 26553,\n 32923, 13467, 45732, 10659, 36487, 6084, 7477, 29099, 32855,\n 40655, 38545, 10109, 54241, 30247, 16036, 45980, 54396, 8489,\n 41889, 36262, 41575, 14518, 54121, 42146, 10945, 48100, 50448,\n 15247, 49947, 2568, 3680, 225, 36072, 52493, 35999, 41933,\n 24308, 33567, 39358, 1208, 22586, 13365, 51622, 31022, 53405,\n 50928, 26234, 6653, 36738, 3818, 37379, 10338, 30454, 21524,\n 28420, 27884, 20368, 36897, 37018, 889, 40887, 42199, 34768,\n 44852, 44352, 37570, 15326, 39541, 46848, 3020, 14204, 853,\n 8485, 50907, 35167, 1315, 17476, 13004, 43563, 23896, 45755,\n 29951, 17295, 50254, 48648, 53072, 454, 50073, 28295, 45761,\n 25996, 53909, 22022, 5230, 12910, 50272, 16651, 46532, 10641,\n 29836, 36289, 25497, 11152, 32722, 25123, 46916, 49772, 40917,\n 54000, 37733, 25820, 50276, 20973, 38503, 21701, 56, 7823,\n 15504, 15307, 34193, 32191, 22365, 17189, 32387, 52394, 2626,\n 28050, 7374, 38779, 39957, 3305, 4738, 54068, 49284, 25597,\n 24921, 22485, 30984, 34174, 27606, 11509, 50744, 50146, 13617,\n 44500, 41417, 15872, 10123, 11076, 8208, 28631, 34111, 4745,\n 43127, 27251, 15503, 52085, 30571, 23034, 7664, 7016, 23340,\n 28720, 43553, 48095, 31006, 7775, 43666, 50494, 52921, 11483,\n 10011, 5975, 23266, 48403, 28148, 18195, 26873, 16416, 26613,\n 2979, 53487, 46955, 43150, 14438, 28527, 27910, 38773, 30603,\n 44218, 37719, 15447, 17830, 437, 54948, 27818, 46128, 15812,\n 23362, 35704, 44390, 32521, 36514, 475, 6941, 54057, 37950,\n 3148, 26650, 14053, 5675, 3851, 11617, 29156, 32231, 36825,\n 49067, 40083, 30014, 52554, 22382, 4925, 52197, 2921, 40424,\n 42773, 47559, 4961, 53901, 27022, 23047, 41278, 17238, 31317,\n 31315, 29390, 5004, 54868, 3482, 11684, 3303, 39388, 47651,\n 2125, 26665, 40142, 53127, 5946, 19848, 53998, 31549, 48328,\n 21335, 51766, 4566, 15934, 25319, 46429, 53574, 29760, 36534,\n 25532, 5518, 2323, 53659, 21933, 28300, 38527, 44368, 52964,\n 28475, 32821, 17466, 4916, 4186, 5293, 1426, 29258, 19811,\n 20431, 31870, 53694, 54280, 22884, 11624, 27207, 54080, 15387,\n 51511, 6039, 20728, 19708, 46082, 33483, 33408, 50365, 37217,\n 8, 1233, 23178, 36315, 45242, 23837, 47316, 35221, 34945,\n 7673, 23857, 15832, 36245, 6200, 51127, 48462, 1042, 36264,\n 40208, 16936, 49843, 8580, 24823, 5191, 36798, 27076, 31355,\n 31883, 23159, 11851, 50380, 17479, 32826, 16206, 7749, 3880,\n 40555, 41972, 47834, 28688, 13451, 22706, 37908, 54706, 41708,\n 39480, 40512, 53857, 37632, 6944, 30108, 15535, 9588, 6060,\n 17230, 18931, 24152, 44176, 27313, 7305, 41097, 45528, 20267,\n 20647, 53580, 52999, 30620, 26842, 27128, 42270, 52798, 28516,\n 16986, 54399, 47563, 15547, 36266, 29633, 47839, 24711, 9537,\n 2065, 20159, 51439, 44703, 18809, 52144, 30358, 42965, 13761,\n 52575, 50175, 16085, 41545, 34800, 46531, 4110, 34981, 44264,\n 29318, 7022, 17562, 871, 52869, 15341, 19699, 42328, 50370,\n 30770, 15981, 47737, 29323, 31369, 2740, 17387, 17135, 20227,\n 53736, 1697, 35585, 15284, 9208, 10442, 22514, 4695, 51297,\n 8888, 5325, 43537, 11828, 39320, 51322, 366, 39300, 44961,\n 10636, 49432, 53965, 27083, 11169, 28550, 48744, 20656, 41732,\n 25450, 34456, 47614, 10313, 23218, 43138, 5206, 36097, 40045,\n 36937, 13107, 53982, 40812, 50949, 10977, 50257, 52936, 16009,\n 34673, 9851, 33335, 48471, 31162, 50033, 4243, 29839, 28470,\n 37815, 20669, 20660, 15215, 13459, 12362, 50421, 21696, 16071,\n 40800, 27841, 17631, 43117, 35927, 23427, 43626, 45862, 24992,\n 31661, 12134, 1667, 39451, 48719, 39584, 16075, 21289, 54301,\n 13319, 2144, 20535, 34736, 47609, 539, 49965, 49981, 29645,\n 29218, 21111, 19757, 50107, 5519, 41295, 34345, 50595, 24562,\n 11830, 35552, 20365, 39221, 1601, 16774, 43943, 33026, 21762,\n 23088, 45369, 18270, 39037, 39023, 11555, 18145, 23948, 24190,\n 39949, 44474, 9051, 49655, 19100, 5009, 26883, 11233, 3957,\n 29563, 50586, 25047, 15443, 18315, 46979, 49155, 36509, 32956,\n 12922, 27256, 40959, 3395, 18488, 34238, 13840, 35419, 19639,\n 15550, 37560, 27170, 23902, 49377, 8887, 4096, 24102, 489,\n 14812, 34781, 51397, 27068, 32899, 48743, 49800, 15188, 20419,\n 27419, 44391, 1149, 20564, 18762, 22002, 1079, 25395, 43578,\n 29113, 17875, 28663, 32184, 9091, 50897, 32609, 28443, 42731,\n 34891, 41935, 3317, 36631, 3402, 29733, 43628, 38467, 39952,\n 12854, 16173, 34721, 37027, 26597, 2563, 38906, 49896, 27578,\n 10532, 33486, 31579, 29022, 20837, 15572, 15747, 35227, 48453,\n 31177, 19985, 44990, 42036, 44136, 19612, 42330, 47490, 22574,\n 24069, 12699, 35107, 36398, 247, 40275, 3449, 32084, 53770,\n 13577, 49366, 44347, 52676, 30596, 44184, 3419, 3217, 16671,\n 11690, 33123, 26288, 48164, 10665, 51691, 42291, 11944, 18175,\n 14154, 36335, 23194, 21841, 47204, 552, 55251, 52636, 52138,\n 48981, 44419, 48615, 11905, 4757, 8971, 14108, 609, 18358,\n 45746, 5120, 37826, 13490, 42051, 8288, 50543, 43749, 32217,\n 11467, 26321, 30278, 45441, 14648, 39735, 48584, 33329, 14322,\n 1510, 16851, 49734, 2817, 35712, 41001, 54024, 42331, 18344,\n 10893, 2420, 32975, 28137, 28804, 20397, 41400, 9017, 9357,\n 1471, 18445, 5287, 9892, 46693, 24815, 15991, 42408, 36619,\n 41308, 8752, 23154, 30511, 22957, 15649, 9532, 5599, 25960,\n 9280, 38441, 35405, 55083, 39867, 759, 35416, 34425, 42856,\n 4728, 54308, 44791, 27034, 45699, 35239, 10888, 3308, 21578,\n 42838, 26528, 47033, 7083, 1084, 52560, 38421, 40526, 699,\n 2146, 42708, 1865, 41438, 8249, 49728, 19629, 37352, 37764,\n 53048, 40048, 27827, 44173, 5872, 23461, 12716, 44090, 55280,\n 30300, 15961, 38460, 54990, 40660, 46965, 38303, 54045, 40896,\n 15142, 35230, 41661, 14502, 23424, 32339, 34056, 51030, 46401,\n 54697, 27872, 12664, 420, 44035, 12027, 2679, 18537, 30979,\n 6930, 46837, 11818, 45144, 13910, 24624, 47829, 52075, 34816,\n 30998, 26869, 8990, 18181, 41123, 48390, 44919, 45111, 45606,\n 38947, 54684, 50617, 33635, 51290, 20215, 15941, 20361, 9043,\n 41171, 51729, 31325, 3379, 25741, 19119, 23459, 41039, 7177,\n 766, 53049, 43712, 413, 22727, 15459, 30678, 11437, 47777,\n 25015, 8000, 42003, 26214, 19532, 15163, 35906, 1190, 2396,\n 42070, 49079, 9402, 6584, 45414, 38853, 22481, 2443, 47448,\n 18133, 7556, 53222, 26953, 45874, 30013, 52891, 8502, 53421,\n 24553, 32069, 23761, 17992, 23114, 7718, 3146, 55169, 22335,\n 2567, 5948, 33764, 28970, 17022, 19758, 43664, 54983, 44665,\n 24372, 14014, 47420, 53243, 27380, 48052, 816, 38122, 24636,\n 52133, 47333, 18032, 6001, 33555, 6208, 20584, 15167, 47965,\n 34451, 286, 7360, 19595, 6955, 22190, 35435, 44027, 2907,\n 29622, 8817, 18858, 12399, 13042, 30978, 53202, 15699, 34683,\n 3558, 12147, 560, 9204, 1041, 40079, 24741, 24641, 22894,\n 39309, 39054, 54478, 36291, 15130, 35175, 35077, 45342, 5558,\n 49654, 31527, 30455, 40230, 28647, 17567, 23337, 12737, 47026,\n 189, 2690, 7189, 10078, 43475, 16419, 47441, 32110, 13701,\n 22962, 2857, 35134, 26632, 15402, 10444, 4155, 40291, 15822,\n 18980, 23039, 51025, 28551, 24692, 11059, 48866, 15301, 46868,\n 35179, 51375, 52392, 50070, 47124, 9042, 50714, 11700, 27117,\n 10710, 26455, 3284, 34299, 15478, 38085, 11046, 32570, 38482,\n 28342, 43116, 16630, 32506, 27346, 53212, 14772, 24838, 49893,\n 14962, 33063, 53807, 38415, 31763, 23485, 26618, 39355, 10759,\n 2723, 51204, 54699, 48987, 53325, 37986, 47221, 26764, 18497,\n 48538, 6737, 3255, 14336, 44851, 13644, 9520, 30845, 30961,\n 40733, 39781, 10993, 11279, 47025, 48039, 37735, 31365, 39512,\n 19192, 45337, 33258, 44714, 39313, 33867, 46586, 23762, 37494,\n 11064, 16312, 10436, 48117, 6260, 3812, 27415, 33514, 32222,\n 4843, 12176, 5617, 4345, 49543, 4372, 25563, 25506, 143,\n 45406, 4612, 46688, 8546, 46196, 29813, 33406, 42595, 13982,\n 9443, 8437, 10647, 9556, 36542, 15127, 26021, 29303, 17028,\n 22971, 1491, 17283, 53800, 36565, 12461, 24834, 3262, 54133,\n 40557, 19543, 46436, 54186, 14663, 10391, 17625, 2496, 4829,\n 51919, 19410, 34909, 3930, 10812, 51066, 40913, 14139, 31931,\n 21789, 53525, 22468, 34199, 45277, 14761, 53379, 589, 45473,\n 44606, 33710, 33622, 19058, 44932, 48178, 54520, 35507, 19906,\n 34774, 6355, 664, 4765, 16311, 26226, 39921, 43660, 4989,\n 32931, 40836, 32891, 32020, 30027, 44398, 32183, 47937, 26333,\n 20839, 10352, 52742, 32154, 2959, 29782, 14268, 33743, 50993,\n 47007, 55454, 41413, 53818, 9320, 33218, 41414, 52974, 24310,\n 5379, 24447, 22458, 30607, 39213, 12541, 18386, 41590, 53618,\n 39280, 33283, 35772, 9806, 29213, 51231, 23036, 34644, 28774,\n 2850, 18709, 53459, 14693, 44884, 33003, 49352, 54375, 39879,\n 43841, 24629, 8981, 19473, 40635, 7315, 31127, 43600, 50297,\n 45280, 3878, 18070, 24459, 39848, 12762, 14584, 24664, 1939,\n 46822, 18625, 5085, 49310, 48656, 24306, 21218, 7853, 32593,\n 18388, 38189, 10576, 46592, 46569, 44882, 9703, 28526, 11167,\n 18016, 70, 32984, 8673, 4212, 53924, 20190, 45279, 37982,\n 8064, 44226, 34536, 51948, 33678, 33156, 16635, 38240, 29476,\n 7147, 34778, 11475, 21965, 20730, 25020, 3908, 27934, 5341,\n 36947, 45311, 40934, 7424, 41574, 10190, 43045, 14439, 5460,\n 11435, 11370, 5406, 43391, 8267, 20529, 34103, 51532, 47765,\n 1162, 26564, 43942, 33523, 51229, 49959, 390, 39978, 43709,\n 49527, 41758, 28077, 9102, 36323, 1933, 20013, 4404, 34418,\n 18369, 3759, 48179, 5489, 53132, 20586, 53940, 48048, 47044,\n 9158, 33629, 12684, 42601, 41328, 54062, 5669, 22378, 39118,\n 53070, 32289, 486, 13112, 24922, 13874, 46051, 25249, 24603,\n 28705, 105, 29742, 33481, 51682, 43016, 1490, 20642, 4795,\n 39163, 28196, 17622, 2246, 23564, 26421, 6158, 16404, 46658,\n 2257, 24329, 43716, 22319, 13704, 17105, 49494, 39035, 22809,\n 33286, 44411, 16231, 33316, 16921, 16877, 10844, 38222, 36876,\n 17629, 52732, 32785, 46779, 54114, 53827, 32114, 17676, 19614,\n 28964, 44541, 47663, 38761, 17379, 38878, 39593, 740, 31311,\n 82, 13999, 10361, 32209, 303, 28808, 3060, 11049, 12532,\n 26251, 53383, 6741, 55230, 48458, 9375, 50624, 16344, 7670,\n 12434, 20289, 35875, 27258, 48557, 25631, 28186, 20602, 12201,\n 35515, 457, 23611, 11307, 33616, 46010, 41118, 9575, 8145,\n 37095, 19040, 155, 34319, 16185, 1641, 27512, 43042, 7159,\n 35688, 43012, 29681, 50728, 14331, 50720, 40417, 6924, 53705,\n 17694, 53452, 43898, 22633, 43139, 50199, 30565, 38573, 7272,\n 51914, 47804, 33726, 20629, 28770, 48780, 9647, 703, 1268,\n 35829, 24156, 42462, 26711, 39136, 2706, 16292, 20053, 45233,\n 17746, 12394, 45367, 11389, 35697, 27175, 5916, 36978, 17525,\n 18697, 4254, 383, 51412, 6424, 2591, 38509, 36349, 30286,\n 40369, 10197, 10505, 11750, 34167, 1988, 18345, 26205, 23278,\n 4132, 19254, 3290, 11117, 41522, 47035, 25667, 23031, 19505,\n 39481, 13972, 30099, 46371, 13678, 7396, 54498, 43639, 14430,\n 1786, 38529, 14754, 40552, 44893, 37076, 12690, 7001, 40585,\n 6216, 48508, 29852, 36180, 34923, 39073, 5465, 10677, 51272,\n 13334, 19376, 24685, 50784, 1460, 39111, 45101, 38582, 18893,\n 3599, 31522, 19823, 53256, 53387, 15702, 38185, 13784, 47159,\n 15522, 26276, 3323, 51482, 23238, 12191, 35014, 32270, 41561,\n 28142, 12267, 37397, 10032, 54742, 43807, 4333, 24458, 39790,\n 49131, 50185, 42863, 39115, 18755, 4962, 20386, 5190, 30764,\n 11960, 28189, 55195, 29256, 29330, 7599, 15474, 15126, 15566,\n 42399, 23972, 45949, 45275, 13338, 4246, 39175, 35560, 11095,\n 26470, 53296, 49357, 3944, 37765, 42810, 34903, 23386, 1940,\n 4846, 18952, 17271, 10971, 38077, 24802, 7789, 31045, 17437,\n 5503, 1105, 20960, 53862, 7192, 30342, 35787, 1468, 28575,\n 5612, 13691, 32261, 42753, 40116, 1712, 15303, 39580, 16319,\n 26720, 21088, 39667, 28287, 13007, 17239, 12884, 14870, 6637,\n 18421, 10690, 46382, 43637, 31643, 36901, 30927, 26406, 51005,\n 30529, 39836, 25689, 1226, 6735, 19368, 4897, 39152, 53690,\n 463, 4541, 47029, 4703, 21349, 54451, 38146, 51319, 39022,\n 21339, 54237, 17510, 39413, 20000, 44644, 13017, 5071, 102,\n 12419, 2194, 31700, 30920, 32214, 43098, 31091, 11640, 53193,\n 53797, 5786, 45976, 48392, 12903, 21958, 11029, 33923, 15748,\n 50339, 41698, 54273, 42828, 36462, 4866, 53230, 15095, 6937,\n 25155, 3055, 26678, 18894, 27628, 7172, 45831, 54047, 50097,\n 28486, 32812, 43681, 31967, 26974, 9137, 5777, 42917, 25166,\n 9395, 53975, 52523, 24133, 50923, 33162, 18631, 14278, 33439,\n 10205, 6265, 482, 8029, 33694, 32000, 41606, 30703, 38738,\n 25634, 16727, 36301, 51912, 50605, 55372, 42324, 33654, 24902,\n 41568, 9898, 44130, 46455, 46072, 51642, 48620, 9394, 7810,\n 39772, 24615, 38796, 21889, 42758, 5161, 13458, 42165, 11525,\n 4770, 34789, 30926, 15030, 4908, 54448, 52398, 43632, 31842,\n 16091, 25369, 51956, 44281, 33542, 6574, 37501, 28047, 46881,\n 42508, 619, 53912, 16016, 19429, 53237, 6309, 54064, 44971,\n 5929, 3868, 12171, 17301, 32278, 27492, 5323, 44227, 29369,\n 27089, 16452, 50998, 46506, 43672, 7983, 9151, 6907, 21504,\n 32901, 39761, 42669, 8873, 35121, 11177, 27892, 19589, 17195,\n 801, 34682, 26944, 49716, 3063, 22077, 20205, 32472, 31754,\n 47419, 34134, 34651, 28444, 6062, 24120, 49525, 50616, 46450,\n 23011, 40184, 10549, 42579, 32378, 46601, 10582, 587, 11704,\n 39193, 46759, 39900, 12677, 53831, 17411, 41024, 5497, 35246,\n 1484, 14640, 18788, 46025, 25440, 18093, 30840, 49342, 10126,\n 30857, 16956, 31, 39690, 327, 35040, 29282, 23349, 40599,\n 1803, 24936, 12117, 37879, 41157, 14678, 13889, 1705, 39946,\n 11196, 30881, 15673, 22195, 35216, 20380, 26924, 12222, 9587,\n 48232, 25920, 22882, 45047, 27053, 51093, 54935, 52651, 22487,\n 23160, 19471, 27085, 23990, 27156, 43011, 11931, 40237, 39726,\n 46017, 54638, 10358, 54052, 25103, 31310, 14824, 21731, 18964,\n 12487, 42100, 52858, 45484, 23313, 718, 13929, 11186, 51184,\n 24346, 1825, 12936, 25276, 22960, 26354, 28193, 21466, 52466,\n 25929, 42075, 34158, 21166, 32012, 31133, 39742, 51040, 7709,\n 30805, 11048, 5061, 48876, 2902, 41387, 23287, 12966, 11491,\n 10683, 8175, 20197, 55426, 29816, 6983, 14437, 54408, 41440,\n 942, 19964, 24021, 6155, 5574, 27619, 29298, 27561, 53189,\n 51308, 21396, 10208, 51244, 51551, 46609, 48081, 38692, 36082,\n 6360, 20756, 4209, 36734, 2947, 19062, 29174, 53682, 52216,\n 3850, 2610, 2901, 49810, 22370, 48086, 52151, 43538, 7405,\n 10013, 37510, 11320, 43242, 18882, 8575, 22287, 15687, 28614,\n 19441, 19235, 11969, 32754, 34944, 27345, 38633, 6959, 53923,\n 19117, 14750, 9252, 23242, 24648, 24600, 6583, 43768, 18998,\n 41981, 43280, 35447, 28150, 47427, 39570, 28424, 50707, 15584,\n 20288, 17866, 38209, 50600, 16434, 27290, 37713, 55178, 23824,\n 50285, 23764, 22947, 4194, 35672, 16528, 12508, 31067, 7655,\n 10079, 53764, 6954, 44181, 6832, 32011, 29196, 28670, 25697,\n 31335, 28461, 53964, 6508, 11998, 43873, 1720, 53934, 47546,\n 20511, 25813, 48283, 13831, 45389, 21556, 15088, 19102, 41517,\n 50012, 19533, 19486, 50287, 7453, 4538, 38451, 11940, 4470,\n 17343, 29925, 28166, 52473, 37876, 28394, 39443, 21899, 25184,\n 29993, 45972, 54034, 13087, 30909, 22396, 18905, 5613, 26239,\n 7227, 11222, 25600, 19392, 53877, 32220, 41150, 43975, 44998,\n 32089, 42578, 42685, 32675, 39677, 8923, 45429, 15330, 12986,\n 28983, 27552, 13507, 47082, 39564, 22953, 40093, 32470, 44209,\n 27560, 36966, 50854, 38369, 5566, 5468, 8351, 54053, 39862,\n 4099, 11365, 17213, 24404, 22511, 30894, 21843, 35669, 39897,\n 42018, 29182, 52068, 21960, 25841, 34121, 27129, 45114, 14397,\n 28242, 3840, 8680, 31057, 6968, 21507, 39895, 53382, 17418,\n 42007, 13132, 48676, 18861, 5692, 35169, 11452, 32418, 10187,\n 9095, 46183, 5021, 414, 51848, 38459, 51136, 28410, 49939,\n 3010, 24109, 32614, 11026, 30405, 48862, 1191, 17555, 39099,\n 5616, 7489, 27304, 29261, 26247, 29821, 51832, 29851, 43983,\n 29070, 42181, 23529, 17569, 25662, 31556, 4752, 9929, 33036,\n 53480, 42138, 14379, 26415, 11438, 4633, 51992, 48479, 7260,\n 23704, 33641, 257, 16939, 25092, 44287, 7145, 53537, 22693,\n 35836, 1728, 43997, 26483, 14374, 46997, 29524, 30775, 53697,\n 19676, 38948, 40817, 24940, 22075, 35733, 25105, 798, 42519,\n 36899, 7802, 34667, 26801, 55246, 39602, 21438, 22726, 37710,\n 40269, 10295, 43420, 28001, 53600, 23717, 53445, 22337, 30333,\n 45990, 44516, 26690, 36237, 50711, 9143, 26773, 25226, 4335,\n 2793, 24496, 47575, 21408, 24727, 8405, 14622, 783, 44662,\n 15133, 9046, 35244, 13814, 5448, 47220, 51630, 52607, 14378,\n 12989, 35653, 5524, 11535, 30437, 45756, 53707, 53111, 20829,\n 53261, 7056, 8984, 10851, 43090, 27272, 46787, 13808, 34756,\n 51390, 23341, 13220, 510, 6839, 32403, 48148, 52, 35710,\n 6956, 18008, 9551, 2177, 23661, 5251, 23367, 45644, 47445,\n 6952, 23510, 53146, 54430, 36776, 54140, 47718, 31136, 55249,\n 2213, 3905, 44802, 42844, 4185, 47217, 25404, 11536, 27246,\n 46932, 55152, 12942, 40873, 33625, 38340, 4783, 39703, 21312,\n 44089, 47074, 32068, 36384, 52549, 30130, 35267, 52307, 886,\n 54740, 33379, 45596, 26059, 41469, 36196, 8395, 11054, 48916,\n 25962, 20560, 22063, 35433, 33525, 50353, 46311, 32960, 22780,\n 17166, 29152, 35324, 53165, 29468, 26806, 42909, 16287, 51655,\n 27422, 47201, 50127, 2338, 18658, 22272, 37071, 51692, 49624,\n 31248, 7367, 1760, 24487, 27743, 43791, 22821, 54364, 8358,\n 12439, 34675, 9792, 10248, 9673, 52348, 32039, 33962, 31743,\n 31863, 6745, 11036, 13705, 10712, 43957, 13228, 47543, 16810,\n 45994, 13163, 21628, 3331, 31794, 53911, 18307, 26913, 34021,\n 4938, 32408, 38367, 40472, 24058, 23141, 54041, 55309, 49695,\n 12641, 15954, 33469, 20581, 12248, 6287, 15147, 21547, 18536,\n 27231, 53745, 26284, 39751, 28942, 6920, 36240, 3825, 1611,\n 46033, 33228, 17923, 3604, 45951, 18773, 15727, 31938, 47242,\n 40375, 22490, 11576, 28437, 54695, 6297, 49205, 18938, 11656,\n 4339, 44612, 39676, 10084, 26987, 18072, 53569, 7333, 17039,\n 4114, 50531, 12466, 12792, 39622, 4856, 26217, 7086, 25877,\n 40940, 42933, 28323, 49292, 40611, 10244, 46298, 20559, 27746,\n 33131, 36270, 41585, 38262, 2290, 1293, 43860, 10983, 47863,\n 24983, 4303, 33221, 46772, 29566, 54070, 45727, 12920, 46962,\n 52947, 1117, 10827, 28016, 13512, 14294, 25360, 9833, 39076,\n 29187, 28706, 45646, 2854, 12308, 7988, 37023, 11342, 17518,\n 42512, 35513, 31537, 30389, 35369, 37638, 52427, 9385, 28068,\n 5977, 19384, 5563, 38014, 42112, 20954, 4722, 42596, 41293,\n 49055, 16006, 35557, 32927, 26001, 12297, 27697, 30476, 5324,\n 19702, 35921, 54863, 41314, 8856, 16109, 29629, 29401, 33347,\n 53257, 25810, 19425, 39814, 45410, 20248, 37625, 29641, 53427,\n 3072, 35384, 15363, 1032, 14764, 9064, 6673, 4161, 16439,\n 19921, 22209, 29294, 9379, 26915, 9180, 25099, 24750, 51084,\n 18022, 40353, 11052, 24682, 11419, 28915, 45939, 51557, 51540,\n 38328, 8345, 32031, 24934, 4024, 36310, 28066, 21197, 51773,\n 40684, 8685, 50231, 47498, 12945, 40165, 28682, 36491, 54658,\n 44476, 50188, 12507, 44372, 23282, 38790, 26341, 23556, 39675,\n 32852, 34087, 23528, 22429, 13307, 36143, 15108, 23472, 5661,\n 19654, 24277, 9547, 11198, 43713, 33362, 15685, 5457, 14500,\n 51872, 11577, 46774, 25900, 2903, 7539, 40318, 51517, 5395,\n 47608, 14534, 37450, 12450, 52922, 44330, 33380, 21625, 17845,\n 7125, 11043, 22073, 51706, 41795, 17116, 43155, 38361, 22883,\n 27643, 47131, 47899, 9741, 7235, 23347, 35359, 17770, 2675,\n 6793, 36209, 49062, 24266, 53562, 34813, 4799, 21343, 53969,\n 21915, 14708, 24883, 51788, 39996, 36669, 46679, 40055, 54722,\n 11567, 9133, 11144, 20145, 10968, 44906, 27882, 43588, 50390,\n 53566, 18971, 22936, 54250, 51652, 12761, 51573, 53353, 37561,\n 1475, 50301, 11138, 54846, 49262, 38365, 18005, 9706, 29257,\n 18792, 44093, 23766, 50921, 22927, 25159, 28174, 36804, 47463,\n 47426, 25098, 29285, 52018, 37514, 44241, 46544, 24881, 39409,\n 37386, 163, 2063, 32690, 27777, 3596, 55444, 15716, 36414,\n 25528, 25954, 47392, 31942, 19523, 54400, 24462, 24686, 55181,\n 3155, 36872, 1783, 50706, 5442, 52156, 18591, 34178, 11694,\n 31744, 34437, 9875, 54119, 48209, 548, 7642, 3036, 11597,\n 33446, 28622, 23904, 36609, 9471, 39165, 16844, 367, 54896,\n 46229, 33416, 3042, 18004, 27908, 47395, 49476, 5445, 36806,\n 31518, 35941, 30592, 38152, 38445, 22377, 37074, 52236, 43611,\n 21937, 28882, 8966, 19283, 49986, 27378, 13805, 30471, 38143,\n 40830, 29779, 14862, 22577, 23265, 46791, 523, 45684, 6702,\n 28929, 31706, 35092, 18382, 6744, 49513, 14023, 25072, 26745,\n 49975, 41510, 15074, 55460, 24643, 29279, 11104, 49495, 53346,\n 18206, 54082, 38218, 17967, 6261, 13091, 30776, 28346, 22709,\n 26071, 1448, 41143, 20088, 25414, 8182, 44942, 32437, 15578,\n 32395, 32464, 5399, 51776, 40343, 54639, 46077, 7559, 11639,\n 35171, 47963, 25800, 7811, 9052, 26043, 7992, 8919, 52537,\n 40553, 38097, 25927, 11379, 46458, 19446, 55340, 29422, 10371,\n 36502, 46156, 30311, 25264, 44770, 34272, 17084, 148, 1692,\n 25521, 874, 11391, 40468, 5706, 27689, 8657, 53446, 47659,\n 6976, 9433, 50704, 7208, 37543, 19041, 34482, 25845, 22778,\n 38584, 35408, 53476, 24695, 19790, 53333, 12369, 49406, 25980,\n 49667, 49676, 32905, 17478, 47520, 29345, 91, 23170, 24314,\n 44315, 11493, 33845, 31351, 44313, 24353, 227, 29963, 14253,\n 2291, 47307, 51484, 44371, 46676, 36229, 5853, 20462, 45930,\n 53413, 42368, 43682, 30132, 42819, 11417, 37995, 47704, 41659,\n 6115, 54406, 44682, 29512, 27375, 18295, 17647, 12885, 35695,\n 21550, 45974, 46872, 7587, 38430, 38782, 25484, 24759, 28082,\n 54517, 15585, 32636, 22282, 19169, 20567, 14208, 39158, 44761,\n 194, 46000, 53458, 7346, 22352, 15070, 30069, 1108, 48541,\n 17626, 36095, 51982, 26348, 12691, 51568, 42611, 22461, 51583,\n 13048, 15241, 35665, 36469, 43131, 24622, 14307, 46318, 41179,\n 47172, 43394, 34678, 26781, 39170, 12856, 21297, 13586, 41063,\n 48836, 12332, 54941, 23881, 5968, 54728, 10475, 6901, 11605,\n 28725, 20346, 17272, 31189, 22148, 22736, 28109, 1777, 8202,\n 49070, 16807, 31511, 49889, 27299, 30624, 29577, 26363, 43344,\n 48168, 30963, 11340, 38586, 27233, 50268, 24548, 10200, 54552,\n 48365, 40114, 30368, 45467, 18477, 34378, 47434, 39203, 7124,\n 35691, 35333, 32741, 33539, 35184, 32861, 13612, 50983, 12344,\n 21495, 17727, 5464, 44702, 24076, 30135, 29219, 3962, 49269,\n 8872, 55153, 25297, 11516, 4244, 20762, 44685, 35854, 52256,\n 17894, 16015, 25403, 42317, 39209, 38612, 16447, 23216, 44801,\n 9671, 46535, 25780, 26986, 16620, 54202, 34479, 41912, 12229,\n 35590, 3811, 30343, 29095, 28386, 692, 3102, 18989, 5514,\n 19677, 52207, 11161, 48668, 50863, 25950, 54073, 36110, 26967,\n 6146, 54066, 39114, 10152, 50613, 13485, 49736, 37890, 491,\n 8440, 12425, 38607, 49827, 32571, 11070, 11134, 27817, 19353,\n 41610, 18018, 7282, 12224, 18656, 3855, 14577, 50425, 986,\n 10859, 8455, 48324, 5375, 25173, 15324, 39943, 52996, 661,\n 38043, 19380, 32259, 23048, 25370, 54270, 22650, 17616, 48399,\n 3901, 50261, 53331, 53611, 18126, 33040, 50567, 47530, 50119,\n 41267, 24250, 7361, 9574, 17444, 9563, 21463, 47446, 34051,\n 47847, 24432, 47511, 4524, 40676, 28946, 46514, 10330, 2078,\n 30915, 1566, 32608, 31379, 32700, 27459, 7764, 24907, 41811,\n 35950, 50990, 7193, 9059, 4255, 43658, 5771, 36374, 50100,\n 35807, 1578, 18137, 28234, 49226, 47998, 48716, 36843, 48960,\n 27986, 23032, 14898, 10328, 23583, 47578, 14276, 33325, 616,\n 39159, 32346, 26221, 27960, 49914, 51217, 47485, 47550, 31240,\n 44031, 4091, 13963, 12463, 43727, 55154, 55262, 3568, 14035,\n 15621, 41227, 44676, 7711, 37039, 34951, 43158, 2628, 18622,\n 95, 39148, 51240, 29102, 37771, 45626, 27298, 14536, 50965,\n 36529, 53360, 47744, 53536, 53837, 43947, 11073, 14568, 54904,\n 9388, 32343, 14402, 21388, 41360, 55472, 25540, 24970, 1919,\n 29154, 13883, 42064, 52096, 51123, 6170, 33176, 40782, 16458,\n 55213, 22754, 9171, 12900, 13583, 42571, 40573, 21871, 21024,\n 3463, 11918, 14185, 44986, 9364, 47018, 28809, 3716, 47497,\n 6070, 4429, 54657, 26147, 47205, 5570, 37787, 2521, 29331,\n 20026, 45432, 46076, 19940, 6977, 35820, 3460, 37966, 34611,\n 8626, 35976, 7179, 23765, 49431, 44704, 13839, 2431, 33517,\n 23104, 26519, 25110, 28662, 7131, 53044, 35005, 19489, 52543,\n 29035, 29350, 10786, 11996, 53949, 51506, 11218, 4719, 13537,\n 7788, 21212, 41616, 32243, 32476, 29613, 13957, 35338, 23,\n 17404, 45090, 26387, 39536, 34329, 44987, 10319, 36821, 13886,\n 53374, 41710, 55120, 40571, 21065, 50801, 1519, 29066, 18810,\n 32274, 9773, 14229, 40348, 47239, 47801, 14327, 15590, 32186,\n 26733, 18471, 50135, 27396, 35266, 14709, 49948, 51950, 13155,\n 51415, 9782, 25719, 20477, 26886, 25706, 28141, 20707, 46369,\n 30450, 20705, 43654, 53677, 515, 38158, 34712, 11913, 5426,\n 47795, 39738, 17317, 28532, 14386, 48675, 42044, 37142, 30886,\n 1002, 40203, 39654, 10261, 4282, 38374, 23701, 24029, 39102,\n 19021, 20527, 42649, 39479, 13515, 52043, 28945, 29987, 47869,\n 13765, 44887, 46287, 49842, 38730, 29639, 54383, 50405, 19734,\n 17162, 38648, 53835, 3408, 40244, 35399, 15697, 22465, 49520,\n 4910, 13433, 2295, 13339, 10871, 16110, 23771, 43601, 46652,\n 26450, 3631, 34588, 23926, 47690, 5999, 15552, 26268, 25380,\n 19760, 12266, 522, 33315, 18543, 43203, 12216, 35190, 39923,\n 22822, 10432, 26062, 39528, 23103, 44814, 27317, 45580, 35775,\n 50431, 28374, 29758, 46272, 43497, 36012, 8371, 3085, 37623,\n 3093, 11131, 54578, 295, 36494, 44387, 38070, 51311, 38772,\n 40630, 30251, 22484, 28030, 26566, 53709, 30613, 19084, 11820,\n 53478, 43687, 52153, 39618, 23959, 36932, 42020, 45216, 48711,\n 6696, 26787, 38631, 9066, 24982, 39363, 52933, 27555, 51639,\n 46448, 9992, 33068, 26592, 16900, 46488, 7063, 40636, 32627,\n 41584, 48544, 49558, 5054, 23613, 6554, 20845, 27664, 16575,\n 26355, 32725, 3459, 26941, 5419, 47481, 48856, 975, 25703,\n 55402, 3589, 13464, 44091, 43043, 49183, 32398, 42655, 21925,\n 53576, 18127, 7630, 28201, 4176, 9118, 2487, 44634, 15527,\n 40462, 24122, 15233, 15724, 53744, 39682, 55394, 20473, 49767,\n 16860, 18949, 42336, 4062, 44964, 18666, 33500, 54265, 47524,\n 8002, 19006, 40558, 27097, 21964, 31238, 34212, 4210, 40708,\n 11114, 15232, 1975, 52006, 14462, 49993, 24791, 10590, 650,\n 1413, 38851, 13783, 8365, 1659, 45209, 32893, 42736, 47605,\n 40310, 41225, 37155, 37228, 46139, 40253, 24512, 54223, 38933,\n 41954, 46155, 24965, 47145, 33413, 9924, 12408, 39184, 53401,\n 6477, 6720, 40153, 37291, 13892, 40349, 42868, 23463, 7686,\n 39583, 10995, 47646, 30074, 8254, 12778, 52078, 27864, 9036,\n 48788, 46591, 54438, 1292, 16433, 6883, 24672, 43854, 25332,\n 27098, 24053, 9970, 284, 13936, 34284, 50375, 17819, 23831,\n 41553, 16332, 54236, 4892, 52241, 26190, 14212, 45480, 28018,\n 50053, 18313, 9694, 18619, 25016, 32943, 25393, 38348, 31109,\n 27311, 53112, 42040, 43338, 7551, 3722, 41630, 52787, 47623,\n 18023, 54369, 22945, 51509, 8694, 53083, 31705, 31491, 1287,\n 5757, 1896, 4686, 42372, 32524, 5584, 43093, 52857, 4534,\n 26803, 39010, 21464, 51277, 11331, 29289, 2868, 49670, 21576,\n 18061, 794, 31536, 13909, 34998, 5358, 1889, 49106, 38668,\n 51460, 3522, 42648, 20296, 28982, 24758, 11774, 21351, 33115,\n 33033, 49857, 17237, 2764, 17500, 26667, 35459, 15102, 12839,\n 16868, 7005, 8123, 14338, 13825, 2031, 20780, 42035, 44803,\n 28037, 16137, 33006, 45647, 51963, 28560, 38062, 21013, 33534,\n 26434, 37082, 24520, 42447, 30869, 20212, 22099, 44399, 52225,\n 12616, 50827, 5255, 34136, 29675, 26225, 43590, 8605, 52231,\n 23733, 385, 40256, 23623, 41403, 14324, 15755, 45145, 19187,\n 32744, 45977, 42239, 19701, 36628, 37970, 26077, 9646, 19694,\n 12469, 37899, 31882, 23619, 11755, 28933, 3384, 6578, 27356,\n 19783, 26890, 49538, 16744, 9486, 31233, 29020, 50157, 22204,\n 4477, 29465, 21098, 25556, 55200, 12666, 51697, 32283, 22943,\n 35322, 27723, 51143, 29343, 36845, 5571, 41425, 37323, 37208,\n 34328, 34560, 50756, 55092, 37040, 28671, 26714, 49163, 22595,\n 47120, 46110, 9008, 46023, 26209, 35096, 2353, 26362, 54760,\n 23416, 45067, 32322, 46252, 10308, 6392, 45415, 27886, 42963,\n 899, 34710, 50841, 34339, 7207]), 'F': array([ 1665, 3282, 40670, 19117, 23851, 32970, 10536, 6018, 29445,\n 5882, 36014, 14667, 25330, 9999, 23082, 9425, 27877, 33446,\n 18986, 39546, 32757, 5689, 15568, 39457, 5559, 30196, 10683,\n 36440, 1544, 9303, 15944, 15577, 16324, 5609, 13179, 79,\n 35223, 34372, 16541, 35420, 2783, 14127, 24763, 27792, 2174,\n 7721, 8946, 20590, 32553, 7908, 13857, 31252, 7625, 2317,\n 13154, 4724, 31809, 26634, 7414, 32541, 9955, 13508, 12802,\n 34446, 19192, 30743, 37686, 1860, 32913, 31509, 38539, 21405,\n 6438, 3675, 22381, 39267, 25067, 20328, 33621, 32302, 23659,\n 14485, 15799, 23862, 220, 4077, 16332, 22849, 38940, 13700,\n 7158, 9365, 23857, 36615, 32073, 18877, 22966, 40713, 38889,\n 7771, 18595, 30313, 30323, 23555, 17294, 1920, 28447, 40296,\n 26032, 30836, 1612, 33504, 31039, 5278, 14625, 19042, 22131,\n 3261, 23930, 28588, 14937, 33726, 36309, 7231, 28721, 67,\n 39606, 33402, 29050, 39397, 26544, 17891, 14569, 25759, 23991,\n 18384, 17513, 31586, 20472, 15703, 2593, 16594, 25032, 38268,\n 7870, 7591, 7161, 33751, 25336, 8493, 21970, 19002, 18641,\n 13249, 1789, 8409, 36038, 32044, 36182, 39451, 31452, 24406,\n 11223, 5982, 4597, 28871, 23328, 19650, 40416, 3212, 33822,\n 26159, 27576, 6241, 9095, 39767, 4162, 24745, 36263, 22615,\n 27473, 22715, 26644, 31518, 27957, 21404, 40476, 19563, 21919,\n 5615, 39601, 28899, 21361, 32100, 37448, 12739, 1131, 31967,\n 28982, 40038, 13419, 24111, 29179, 34807, 21726, 2820, 33953,\n 29768, 24079, 13015, 16290, 15287, 19137, 39423, 39640, 32662,\n 38858, 36319, 2821, 13125, 26574, 9461, 32900, 29493, 40569,\n 39700, 31172, 33037, 16753, 23107, 17540, 28666, 16082, 13928,\n 38290, 39409, 4956, 2695, 2548, 4157, 33695, 31717, 40258,\n 9965, 3489, 24638, 17029, 26056, 40515, 37577, 7941, 3370,\n 12933, 21616, 34213, 15247, 29777, 4368, 6661, 12214, 13197,\n 35094, 35841, 29525, 27938, 1310, 23602, 26972, 22389, 33947,\n 32011, 5726, 975, 16428, 3462, 30407, 20329, 13861, 35347,\n 18563, 5445, 28352, 13801, 37553, 2792, 37537, 16887, 25222,\n 11053, 15743, 5097, 16876, 358, 31632, 11801, 21672, 16669,\n 15650, 1266, 6020, 14737, 9072, 12825, 1596, 7514, 18658,\n 36090, 13250, 35084, 5439, 2054, 38214, 17742, 5105, 8107,\n 5374, 1354, 11403, 16247, 21877, 41031, 27910, 13219, 26020,\n 30269, 17088, 21100, 11401, 5095, 2068, 32815, 6545, 30256,\n 26935, 35324, 37837, 11478, 10020, 24798, 25679, 40799, 36595,\n 37025, 29639, 31806, 20795, 19010, 12242, 10818, 25611, 32391,\n 9163, 31815, 27855, 24043, 8955, 32614, 26054, 11824, 13989,\n 37656, 2801, 38401, 31678, 28753, 40270, 10680, 34419, 27703,\n 14274, 5239, 26588, 23045, 36797, 10521, 6353, 22750, 26378,\n 25218, 3296, 21746, 11603, 34614, 36353, 40234, 25304, 35108,\n 32550, 29909, 30422, 22203, 21179, 5542, 30538, 16853, 27662,\n 25728, 14477, 20686, 3115, 28236, 40465, 4155, 29785, 30828,\n 20695, 38549, 30472, 3500, 2522, 40798, 32647, 12856, 6150,\n 2514, 17502, 2193, 25496, 26696, 29657, 6923, 1728, 30777,\n 35817, 3140, 29877, 6721, 12257, 37360, 12546, 29352, 27030,\n 18266, 38188, 37810, 33983, 14658, 14988, 40185, 16819, 32978,\n 8521, 3481, 38917, 29976, 399, 14783, 16603, 4431, 39972,\n 7303, 14111, 19681, 22959, 13451, 41104, 13367, 26176, 17947,\n 28226, 29027, 4050, 3278, 12293, 36361, 13294, 40573, 29754,\n 28970, 23933, 541, 11396, 16364, 20533, 3752, 15956, 25350,\n 40992, 20847, 6534, 977, 5686, 36586, 22239, 11608, 23965,\n 36444, 13746, 25138, 33797, 28445, 29783, 13588, 35392, 14036,\n 27054, 5438, 6440, 30553, 35030, 2445, 5580, 30179, 30058,\n 33976, 12033, 29897, 13446, 405, 1025, 32319, 5386, 2588,\n 18630, 40812, 40429, 8267, 40423, 36940, 784, 16175, 16676,\n 40370, 23993, 16629, 16292, 9992, 5175, 16227, 31431, 14216,\n 20337, 24423, 9477, 25452, 14844, 19822, 40267, 12783, 22546,\n 18439, 39818, 15931, 2688, 5854, 7552, 27039, 28759, 25533,\n 27364, 27157, 22837, 992, 27931, 11290, 34200, 2804, 28427,\n 12246, 23238, 32094, 7353, 38004, 22108, 40585, 8375, 13783,\n 37516, 36891, 13818, 23349, 28844, 20332, 7809, 37640, 16035,\n 39173, 34747, 24488, 23949, 26076, 26687, 25342, 21789, 11837,\n 25662, 34802, 27141, 32588, 27776, 39331, 28597, 37067, 15334,\n 37312, 36766, 34264, 27407, 31074, 23446, 2523, 18330, 11219,\n 1654, 26133, 29451, 19806, 32195, 38848, 14601, 15671, 27061,\n 16357, 22934, 38042, 34168, 27843, 1969, 1082, 18656, 36063,\n 22569, 30071, 4868, 27031, 36794, 11060, 3515, 34906, 22626,\n 21492, 15909, 33643, 39251, 30473, 24801, 3688, 10324, 27358,\n 9437, 21848, 36416, 39250, 25215, 10230, 10920, 40147, 12159,\n 9867, 31383, 9368, 9, 13876, 13146, 7252, 19479, 9114,\n 1215, 3885, 36533, 36297, 6590, 24158, 36623, 2177, 26393,\n 5738, 2070, 21095, 38861, 35214, 4195, 19302, 21759, 21410,\n 641, 32527, 18807, 13091, 31261, 27213, 28205, 208, 7253,\n 41173, 9499, 37761, 3641, 22679, 18680, 18142, 19532, 493,\n 28212, 22931, 22234, 3883, 41200, 10292, 17965, 20098, 4358,\n 16449, 17171, 8939, 12668, 13924, 8085, 3154, 35972, 36420,\n 32866, 35534, 18144, 38304, 40981, 30211, 8393, 25127, 41277,\n 12330, 5194, 9241, 28106, 33573, 37548, 20850, 27683, 17334,\n 34944, 2403, 9354, 7047, 36841, 38300, 34857, 12044, 2168,\n 22887, 20529, 6057, 26398, 10972, 24716, 9911, 491, 5881,\n 13859, 27600, 2503, 22793, 25643, 34463, 16742, 29098, 25980,\n 1706, 22181, 17491, 33816, 27861, 3739, 19380, 6968, 30568,\n 37673, 24543, 37561, 14362, 27769, 29154, 4354, 30345, 20918,\n 972, 15477, 1788, 30501, 20269, 1096, 37363, 32733, 15431,\n 21783, 3503, 19719, 7622, 39000, 30208, 33274, 683, 2627,\n 29733, 19772, 26016, 3535, 29231, 37663, 26170, 36567, 11283,\n 25154, 2163, 10784, 25134, 35987, 22636, 25349, 8537, 27084,\n 5736, 37128, 36218, 22028, 6128, 28980, 25479, 15315, 11742,\n 6643, 21071, 38011, 15878, 31752, 4087, 23504, 34832, 10738,\n 4655, 39328, 4205, 35433, 7333, 25074, 18635, 2010, 11036,\n 38664, 34994, 3337, 29292, 26682, 34603, 508, 37254, 8403,\n 7160, 22413, 4397, 30932, 190, 33047, 35731, 20954, 23251,\n 36462, 26213, 33136, 11896, 30699, 34331, 20393, 39124, 12016,\n 18345, 18818, 36082, 524, 14547, 15359, 9964, 5119, 15539,\n 22753, 22432, 35615, 26449, 31994, 31866, 19551, 26161, 11543,\n 29103, 6859, 23922, 35071, 8617, 15949, 28846, 8631, 11957,\n 19627, 25176, 27447, 9172, 3345, 12137, 18834, 34043, 22096,\n 6742, 8395, 215, 5088, 5387, 25282, 22278, 13573, 25840,\n 35742, 8697, 9004, 28065, 5961, 12375, 646, 32780, 23604,\n 33507, 37330, 3016, 36622, 40948, 9136, 33686, 30697, 38132,\n 9322, 30149, 19531, 11778, 41012, 2909, 20315, 37788, 3302,\n 24124, 11617, 20148, 5193, 39345, 12196, 17307, 5621, 30669,\n 22667, 29561, 7593, 7972, 1556, 23485, 26215, 36774, 19777,\n 10816, 4687, 11281, 32759, 29698, 36155, 13772, 11094, 40355,\n 17824, 38586, 35574, 14874, 11363, 3429, 37445, 32031, 11713,\n 23050, 37142, 14470, 2429, 29868, 32473, 14505, 15233, 25989,\n 29587, 36979, 38665, 23780, 12655, 8800, 17956, 10653, 27679,\n 961, 31258, 29742, 9457, 24280, 38071, 31213, 16295, 40916,\n 13687, 24084, 30815, 28201, 40671, 7791, 16810, 1590, 15800,\n 38553, 4991, 22448, 2364, 19180, 7862, 25277, 7572, 37131,\n 21153, 4238, 28036, 9464, 20770, 21480, 30941, 39380, 37414,\n 37905, 8629, 13721, 9701, 25547, 2979, 918, 39570, 1794,\n 28365, 13983, 8979, 10705, 170, 34632, 9556, 14504, 19817,\n 28350, 14605, 37823, 26200, 8266, 5944, 4315, 22859, 10483,\n 35287, 25459, 29402, 26899, 20829, 28955, 24558, 15054, 27764,\n 36923, 15911, 27905, 37967, 14215, 17708, 41196, 21173, 17415,\n 10330, 4180, 37578, 21637, 30798, 9174, 29341, 4900, 39253,\n 2466, 507, 3857, 16404, 3625, 26318, 33290, 33452, 17002,\n 31905, 23214, 21081, 38213, 7454, 775, 7425, 12457, 26276,\n 31956, 5948, 38434, 30050, 23623, 26511, 5009, 21186, 11431,\n 21747, 18864, 38957, 3807, 87, 453, 312, 27806, 7726,\n 16790, 31768, 29485, 32645, 26471, 30135, 32321, 39029, 11793,\n 3478, 20893, 34417, 1295, 3397, 34227, 36071, 33157, 21570,\n 14094, 24229, 4094, 24055, 40057, 26214, 4694, 13494, 890,\n 8655, 365, 15603, 9502, 8864, 37008, 21459, 20489, 1018,\n 11690, 5038, 238, 39148, 25065, 18031, 11936, 4130, 5861,\n 20480, 828, 32626, 5277, 37611, 27113, 19448, 28328, 38783,\n 6782, 1733, 35351, 16258, 4709, 17099, 15078, 8980, 23639,\n 1234, 15096, 38501, 9075, 21657, 11085, 37395, 8921, 40990,\n 36009, 8316, 6920, 39079, 32135, 16197, 19350, 11366, 28489,\n 38977, 18992, 34306, 17576, 34981, 18975, 8294, 4980, 14891,\n 25680, 18914, 32525, 39316, 37796, 3031, 9651, 6012, 25106,\n 28734, 33887, 18011, 18122, 28019, 41230, 38335, 1538, 10896,\n 14840, 15959, 38062, 35484, 2139, 2576, 24511, 40610, 25893,\n 686, 16222, 18283, 38043, 27688, 33645, 19163, 10283, 20370,\n 10362, 31689, 30558, 29, 17943, 9024, 27079, 25644, 18469,\n 3992, 37960, 18733, 14910, 20929, 24443, 6352, 2604, 21962,\n 27999, 16863, 13526, 19836, 11587, 36880, 14415, 1425, 16233,\n 32637, 4212, 1073, 36035, 34875, 10233, 12975, 5074, 39504,\n 24892, 13013, 35953, 7086, 15275, 22178, 30515, 34044, 13959,\n 5568, 40765, 19676, 32330, 5124, 12873, 19555, 25829, 7653,\n 29189, 1125, 5030, 37689, 5985, 20224, 41269, 1854, 37004,\n 27715, 5969, 1879, 12270, 873, 6226, 21790, 41241, 21380,\n 40059, 9195, 39579, 19177, 14732, 35814, 16090, 31345, 2043,\n 21565, 6672, 16943, 26760, 2614, 31765, 26337, 28177, 28336,\n 32196, 34669, 23503, 9627, 9540, 11135, 2881, 9753, 33108,\n 12379, 1511, 5966, 2856, 9419, 12383, 38378, 28703, 36805,\n 17485, 13534, 6137, 9453, 26210, 24469, 31819, 896, 19182,\n 18270, 31163, 11418, 6868, 19830, 18540, 37361, 36518, 5376,\n 19826, 2719, 1785, 28700, 24678, 29463, 4632, 29195, 4665,\n 29523, 23680, 1403, 29168, 25877, 37912, 28377, 11817, 13553,\n 41117, 13041, 8590, 38287, 20862, 19202, 15457, 19291, 15828,\n 28440, 3824, 18341, 13005, 11646, 4497, 23632, 32306, 211,\n 1065, 33027, 40845, 36322, 20768, 12473, 29438, 34146, 3697,\n 31749, 8651, 33052, 19065, 6118, 17573, 20576, 11980, 3836,\n 40747, 16557, 22870, 36304, 10161, 3027, 10981, 32239, 17182,\n 29531, 8036, 10296, 1877, 34960, 22374, 26964, 9813, 322,\n 17729, 11895, 7183, 38530, 383, 22182, 9348, 30037, 22110,\n 22572, 25978, 26998, 38159, 19524, 7437, 38190, 37623, 18672,\n 21518, 11037, 14609, 13968, 4363, 15030, 39140, 22632, 16134,\n 27556, 30758, 12078, 23453, 23695, 32213, 7575, 27416, 19762,\n 39542, 20723, 4146, 37242, 6813, 7022, 30834, 28939, 27968,\n 40498, 30290, 15124, 22930, 21796, 14045, 16301, 25953, 40909,\n 27782, 7262, 21387, 18281, 27438, 17008, 38084, 21968, 35204,\n 9636, 37941, 15760, 16499, 39191, 3795, 25271, 17845, 7034,\n 34920, 19307, 40428, 29366, 38772, 14297, 36577, 13332, 21142,\n 29824, 10735, 11982, 29287, 26518, 37952, 23562, 38651, 4712,\n 34761, 25286, 11607, 21691, 33302, 3735, 35973, 27907, 21609,\n 10815, 9201, 22484, 40461, 40031, 28678, 28979, 52, 38522,\n 10286, 33300, 3037, 36697, 928, 8876, 20454, 19728, 29819,\n 39671, 30030, 13193, 10191, 36479, 22222, 33304, 35971, 5762,\n 2659, 39505, 37543, 21807, 30333, 22884, 33578, 26051, 991,\n 24537, 37332, 21552, 20554, 39698, 7453, 39444, 6792, 11977,\n 26147, 31995, 35343, 18651, 7485, 33066, 28501, 14313, 337,\n 29558, 38774, 1358, 32322, 10045, 5225, 9855, 28459, 31344,\n 19317, 37005, 30735, 33439, 25259, 33270, 23908, 37767, 13773,\n 20200, 12496, 1952, 11495, 2076, 20219, 26630, 24857, 20115,\n 31390, 15097, 25976, 33251, 38725, 9452, 7320, 38683, 20248,\n 31487, 1565, 4947, 7403, 15534, 9880, 32830, 1250, 26824,\n 23607, 5664, 15503, 11249, 2992, 3415, 22032, 18440, 31038,\n 27998, 39902, 5626, 38923, 335, 32299, 27386, 23837, 34826,\n 11799, 7234, 25879, 17137, 32373, 19793, 15780, 19387, 9942,\n 13647, 12773, 24004, 29629, 33687, 7200, 4217, 6560, 2404,\n 10597, 3941, 40837, 10994, 24836, 20077, 17086, 8865, 6593,\n 22431, 27258, 7308, 30095, 41111, 24041, 38820, 8417, 3958,\n 37556, 34285, 14524, 31894, 21677, 2458, 19687, 19268, 38050,\n 37329, 35673, 26153, 33018, 27986, 13084, 4901, 6341, 6264,\n 12705, 16621, 634, 8729, 35369, 20928, 2326, 39959, 24916,\n 33340, 20122, 112, 13501, 13770, 8335, 34538, 14177, 9242,\n 324, 23367, 28275, 19843, 26722, 29280, 26402, 12305, 24796,\n 25828, 598, 24351, 13157, 857, 17612, 37139, 11112, 1068,\n 13069, 12367, 12512, 16384, 5713, 20844, 25054, 33553, 22664,\n 25578, 7740, 8906, 36216, 37945, 37793, 14078, 15570, 28857,\n 19189, 41293, 23938, 33682, 11194, 6507, 1045, 29870, 14258,\n 12887, 19754, 16423, 15620, 936, 24983, 23521, 11826, 20801,\n 20361, 18951, 21722, 36323, 30883, 37170, 15435, 14454, 38325,\n 15048, 17766, 2564, 9385, 591, 6713, 1075, 1780, 9410,\n 7506, 2841, 34329, 18693, 18254, 9039, 30817, 4618, 36343,\n 15386, 14731, 40524, 26623, 16151, 21280, 38148, 31144, 34157,\n 16147, 20087, 20620, 13506, 6113, 379, 33990, 2575, 22216,\n 30937, 22021, 40174, 39612, 3059, 39567, 10137, 31460, 7537,\n 7394, 34663, 16121, 17377, 22557, 22265, 1177, 22243, 32369,\n 27269, 2004, 27757, 40888, 13318, 2928, 4787, 39794, 11844,\n 3993, 1187, 23378, 28931, 2693, 22948, 30332, 15556, 18470,\n 32828, 4415, 11186, 25376, 9270, 22075, 1727, 26662, 2731,\n 40122, 22809, 20048, 11862, 34061, 15747, 8854, 20372, 34730,\n 33301, 10338, 17735, 17500, 9027, 32625, 17647, 24930, 6393,\n 29217, 34088, 8668, 15032, 3288, 23450, 16739, 1032, 12427,\n 7534, 15646, 28428, 22924, 7621, 11027, 18362, 14016, 28826,\n 7657, 36910, 23751, 34441, 39998, 36458, 21453, 26837, 17529,\n 26027, 15014, 42, 18227, 23863, 36500, 13305, 32977, 38970,\n 28462, 32796, 33499, 2124, 22885, 11605, 22359, 4396, 10708,\n 517, 27078, 40691, 28166, 30723, 10686, 14913, 23311, 27451,\n 16078, 15772, 14796, 33282, 18437, 26739, 18874, 4170, 27518,\n 19121, 2691, 30124, 10667, 33024, 5140, 9200, 8330, 37550,\n 33051, 28015, 14493, 1371, 16137, 40842, 14508, 27134, 2442,\n 34149, 24549, 39236, 25089, 36557, 7742, 14747, 10122, 32288,\n 25818, 33781, 855, 7909, 26319, 20068, 593, 486, 23213,\n 22825, 38120, 32430, 21624, 37718, 26868, 16320, 36195, 22865,\n 32414, 4008, 26936, 38064, 31610, 9996, 31004, 24614, 37881,\n 30187, 23105, 35110, 22341, 37251, 2342, 22377, 19717, 28392,\n 22205, 40873, 37300, 31906, 38777, 39529, 4833, 2923, 39598,\n 36089, 40537, 40060, 13654, 35446, 6197, 24173, 6756, 13743,\n 3281, 5786, 18265, 17323, 1683, 31451, 4783, 12344, 17966,\n 31376, 2106, 20993, 21577, 6777, 32224, 34504, 10557, 475,\n 10841, 37356, 33293, 20879, 31385, 39339, 21595, 7432, 31447,\n 36452, 27393, 36820, 23022, 40166, 22461, 18197, 17482, 40285,\n 25351, 34557, 35460, 29942, 22010, 30731, 16475, 32483, 11039,\n 4447, 38909, 36772, 26325, 34345, 14500, 9653, 7419, 39479,\n 16986, 20854, 31445, 14825, 5128, 1498, 40864, 32750, 23498,\n 28832, 27046, 11856, 14225, 37506, 17570, 5252, 10203, 30703,\n 27208, 515, 94, 20631, 14053, 6096, 26538, 16043, 29618,\n 5755, 6542, 27820, 12056, 16008, 22261, 40293, 29413, 22915,\n 8917, 17705, 3682, 16012, 35073, 40162, 11148, 29922, 35850,\n 40926, 19605, 36359, 26113, 15387, 17475, 5089, 1667, 31306,\n 16817, 19847, 27454, 11843, 33719, 24038, 16443, 28796, 20739,\n 25589, 18050, 26515, 15702, 3121, 15860, 1970, 16251, 7904,\n 32114, 7702, 38365, 35915, 31837, 12791, 34273, 6324, 28179,\n 36418, 26069, 26364, 19211, 14536, 24134, 31190, 15453, 32752,\n 22014, 13811, 21456, 17228, 21716, 5505, 30874, 34143, 30482,\n 40194, 31595, 34699, 13094, 4925, 33917, 15052, 15914, 9054,\n 30949, 34989, 5236, 23394, 15940, 5490, 3043, 38593, 28740,\n 11510, 39427, 24690, 31183, 24566, 37349, 38414, 27279, 4598,\n 8003, 25574, 30689, 26297, 20054, 10589, 35474, 20888, 18051,\n 29064, 37179, 23833, 13862, 32516, 18198, 41036, 10146, 17031,\n 2641, 14789, 587, 8804, 17775, 32198, 28659, 3219, 9922,\n 5832, 17079, 30875, 5859, 9805, 27997, 6126, 35585, 7319,\n 28198, 20717, 3024, 14847, 26989, 33194, 9387, 37398, 9734,\n 7743, 41231, 11133, 27980, 33095, 31134, 40846, 10907, 36307,\n 35048, 23540, 23141, 33405, 15050, 27778, 14475, 35005, 23782,\n 39619, 16662, 18687, 29670, 9232, 8062, 22281, 31723, 32535,\n 4877, 17430, 5949, 16207, 5829, 33451, 31877, 32867, 699,\n 9979, 21827, 27885, 19381, 13955, 11835, 4253, 22543, 7203,\n 19096, 16996, 5392, 36215, 38913, 6657, 23585, 39046, 16757,\n 13074, 36242, 5212, 24907, 34816, 12145, 32791, 38226, 32455,\n 37777, 4673, 630, 22516, 4128, 8827, 762, 2596, 18856,\n 31614, 7570, 24962, 25648, 3689, 5168, 10798, 11385, 1867,\n 2260, 17877, 8026, 24185, 34232, 16201, 38328, 22701, 19495,\n 13453, 18623, 27489, 8346, 34290, 16862, 6803, 599, 24837,\n 11461, 17630, 33559, 445, 22194, 62, 35709, 36179, 35362,\n 18475, 30870, 25890, 21161, 4832, 32034, 12792, 2577, 3864,\n 12032, 25323, 40538, 39164, 9031, 15456, 5343, 39521, 28335,\n 18367, 29506, 5154, 30218, 31766, 3602, 28070, 14018, 18305,\n 2259, 21350, 14560, 26273, 10868, 31047, 8451, 1732, 3029,\n 27831, 35799, 27894, 14302, 39761, 1859, 39465, 39049, 40770,\n 21775, 1319, 1501, 1357, 37936, 7091, 27608, 7634, 19343,\n 17128, 29148, 27951, 29987, 14075, 14527, 23507, 8228, 7285,\n 19991, 24138, 9493, 4500, 37098, 30892, 30317, 12535, 21272,\n 38063, 25739, 3256, 23856, 19253, 35723, 6266, 13180, 20061,\n 19298, 8498, 27724, 10639, 5452, 32155, 36243, 879, 247,\n 34764, 16476, 32919, 26013, 23878, 19904, 7762, 36036, 27525,\n 921, 38360, 12493, 15863, 19900, 13090, 1364, 33789, 9470,\n 30263, 1878, 29020, 2886, 18125, 6674, 24290, 21323, 10842,\n 12295, 18354, 34056, 19498, 18364, 35028, 30257, 22697, 35899,\n 40814, 37401, 8771, 33218, 2083, 21097, 20609, 17, 40244,\n 4695, 5229, 9878, 30506, 14341, 3032, 4203, 38442, 12764,\n 3279, 14585, 28996, 5687, 40472, 14076, 12382, 27202, 15648,\n 10836, 30142, 34924, 28516, 18221, 1618, 19913, 23809, 39584,\n 4209, 20558, 34493, 40320, 27345, 36127, 16338, 39012, 2999,\n 37734, 5956, 17203, 30611, 41005, 5758, 33600, 31488, 38557,\n 34451, 32819, 4675, 8226, 40340, 8304, 27971, 12521, 41265,\n 7337, 11026, 22206, 35515, 7645, 3158, 33065, 8846, 16888,\n 33198, 14899, 8658, 30725, 3236, 11684, 33076, 39122, 13527,\n 21127, 7313, 39724, 6336, 24881, 7185, 13443, 17819, 6078,\n 29845, 25825, 27232, 36813, 7869, 37885, 4284, 5737, 8236,\n 20, 6186, 16314, 6662, 1343, 28937, 23738, 3117, 40327,\n 8987, 34744, 38440, 39312, 16457, 23433, 877, 6339, 34834,\n 8463, 32356, 5271, 13442, 29153, 38390, 34799, 36044, 22563,\n 12798, 4107, 28045, 40552, 9026, 20047, 26491, 35373, 21754,\n 30808, 37120, 10247, 39361, 3445, 35273, 37606, 4293, 27350,\n 2159, 23645, 3587, 39907, 34892, 37206, 20639, 4085, 9764,\n 925, 19796, 26915, 11692, 36124, 7431, 14795, 41110, 4125,\n 15957, 17606, 19756, 6652, 23310, 9976, 19786, 27035, 17672,\n 1483, 12506, 5536, 26799, 31284, 30964, 6433, 31686, 26602,\n 32014, 34214, 28295, 25864, 15377, 18681, 4423, 29962, 8256,\n 11665, 11930, 2412, 4432, 20272, 32607, 33996, 20788, 15973,\n 12722, 13912, 13205, 10479, 35361, 39072, 40016, 39147, 3988,\n 9597, 38383, 20352, 21133, 35633, 27292, 39063, 32658, 29883,\n 38887, 37544, 4153, 5960, 11398, 23325, 27069, 32117, 13867,\n 31393, 31512, 31227, 5653, 97, 34069, 6144, 24121, 3105,\n 38204, 16478, 39067, 23270, 32009, 22730, 33332, 15081, 18689,\n 40959, 22500, 15607, 28069, 24630, 7894, 37057, 19288, 33349,\n 27472, 30985, 24649, 24645, 17153, 8589, 10095, 15044, 37078,\n 35888, 15992, 37059, 17492, 30362, 15311, 30626, 16534, 16920,\n 19422, 7779, 25386, 37792, 12826, 16714, 24155, 3435, 15904,\n 5562, 8281, 35859, 37047, 19743, 29218, 38570, 7994, 36000,\n 1236, 6378, 16690, 7181, 26310, 27876, 17827, 36221, 16156,\n 26503, 7147, 3878, 6344, 5946, 17552, 25661, 7562, 29054,\n 13133, 3719, 15382, 6525, 12039, 39620, 30077, 33457, 2015,\n 2684, 1591, 3409, 7955, 14496, 25692, 38499, 1857, 26146,\n 15062, 11131, 17967, 34918, 19716, 2012, 22488, 4738, 12903,\n 35722, 3508, 9247, 2647, 7566, 38974, 38545, 28163, 17157,\n 5743, 31828, 2988, 15162, 33143, 24896, 32162, 6701, 4703,\n 10506, 32595, 33417, 12985, 6298, 26892, 27329, 37575, 5818,\n 40457, 29114, 36982, 4525, 27021, 1161, 4817, 27140, 34762,\n 16609, 10039, 19965, 39719, 13242, 27419, 35737, 16998, 33443,\n 32459, 1954, 734, 23849, 909, 13985, 33355, 11814, 6466,\n 31506, 20251, 40365, 8565, 10988, 25423, 1893, 38634, 36922,\n 3476, 17670, 38244, 41221, 28496, 15804, 8442, 38934, 14533,\n 13087, 11381, 2199, 16470, 184, 35172, 9844, 28074, 38863,\n 13348, 20193, 24675, 21148, 15338, 17161, 26844, 13877, 25445,\n 40073, 10355, 29130, 17022, 15302, 11277, 16866, 2359, 24289,\n 40813, 33614, 29693, 26963, 266, 9717, 28787, 31747, 36792,\n 35536, 24066, 11807, 22547, 19036, 40335, 4029, 17980, 6711,\n 10950, 14637, 25094, 23495, 38298, 38642, 6139, 25705, 14098,\n 35622, 40167, 24611, 14104, 25654, 14484, 6596, 33744, 20343,\n 30960, 27012, 26081, 1795, 23929, 26814, 17261, 4093, 12481,\n 36899, 31825, 585, 38406, 29286, 18491, 25048, 25897, 7678,\n 9542, 1853, 3588, 16854, 11306, 5440, 9678, 2557, 19755,\n 20945, 20180, 18426, 17724, 25635, 5487, 24701, 38057, 3246,\n 8908, 10238, 8622, 39783, 34383, 33921, 18430, 2239, 31733,\n 19597, 10568, 14092, 4607, 2031, 20410, 9916, 19086, 16745,\n 26525, 15235, 18554, 35749, 36066, 5118, 14282, 24019, 22969,\n 37, 34845, 20007, 19157, 8624, 39876, 25764, 39070, 17629,\n 24820, 33679, 2551, 15711, 35554, 18694, 5803, 26193, 1024,\n 14814, 3093, 16084, 20947, 4527, 28733, 38692, 5894, 14103,\n 30849, 13230, 281, 24583, 9411, 15528, 29650, 18547, 25521,\n 32787, 40716, 7389, 24497, 1184, 10736, 29462, 11864, 33392,\n 15656, 11199, 35721, 6634, 35667, 573, 18912, 39676, 31502,\n 34629, 31514, 15894, 20740, 20284, 678, 31847, 32294, 3234,\n 29796, 38794, 25425, 14835, 22473, 37892, 4631, 36915, 32934,\n 16153, 18252, 38448, 8716, 737, 12784, 38098, 20916, 11214,\n 30081, 2085, 37195, 29443, 4485, 9947, 1217, 17761, 38249,\n 36275, 25433, 26851, 4798, 6147, 29321, 24990, 973, 41151,\n 40334, 4437, 36616, 34097, 20632, 33296, 3747, 1662, 28433,\n 12014, 21379, 37450, 32366, 12567, 25341, 32596, 29978, 5172,\n 27442, 6976, 28693, 6244, 4210, 16275, 3304, 32382, 20416,\n 34931, 26499, 36171, 36384, 40171, 14309, 37872, 17572, 36388,\n 27802, 38236, 15735, 29658, 36234, 29449, 16665, 40689, 37288,\n 14963, 11180, 16936, 1137, 40696, 17598, 20495, 1701, 20959,\n 10405, 7221, 12724, 3834, 26029, 8467, 6816, 39844, 35578,\n 25329, 4788, 20933, 26049, 12563, 2778, 1799, 32104, 8283,\n 28957, 39883, 7031, 36515, 39726, 5979, 6263, 31262, 38546,\n 30809, 20297, 18862, 8606, 15005, 7636, 10630, 31893, 33999,\n 29261, 25158, 13196, 1158, 10827, 16353, 36320, 12448, 24982,\n 28944, 24665, 9043, 17821, 27225, 35461, 8620, 32437, 27145,\n 6269, 14701, 3204, 30117, 7479, 37971, 2419, 17744, 30241,\n 18446, 33471, 32628, 28874, 22700, 35695, 5709, 12296, 34433,\n 4643, 17421, 1336, 21840, 19114, 40375, 24551, 9288, 8947,\n 868, 27103, 19337, 36633, 1205, 27697, 1059, 11221, 2211,\n 19234, 5237, 9532, 22760, 1292, 12832, 35599, 11007, 32908,\n 31173, 31726, 9761, 28195, 30801, 23513, 17220, 23043, 38645,\n 9910, 11610, 5028, 35949, 16160, 35966, 39986, 6116, 6883,\n 1414, 28756, 5149, 40735, 5251, 21822, 8508, 40898, 39341,\n 19357, 3869, 11355, 2823, 23786, 29023, 5577, 13045, 40039,\n 11602, 23124, 9409, 37466, 15072, 23061, 18648, 20483, 9698,\n 15144, 880, 31153, 10881, 9892, 24153, 25194, 17996, 6729,\n 23822, 2133, 10876, 16360, 15608, 7393, 34048, 22031, 5185,\n 36449, 25308, 8733, 22113, 6421, 22279, 31455, 32688, 34885,\n 8879, 2868, 39284, 17559, 11967, 28422, 32325, 7617, 15203,\n 5511, 16741, 21328, 9641, 26100, 12169, 26850, 20325, 32493,\n 25753, 23515, 19151, 1689, 8249, 36770, 27450, 1488, 19170,\n 5015, 5273, 18231, 38199, 24075, 10780, 242, 7133, 27893,\n 41037, 3459, 25806, 13308, 21464, 27177, 4167, 15463, 23186,\n 30913, 19912, 12577, 27374, 19459, 7245, 30749, 4065, 14905,\n 24421, 35038, 27787, 11670, 13135, 32599, 33387, 5827, 26881,\n 27304, 6946, 14102, 10428, 34621, 33731, 10242, 6251, 3122,\n 7230, 32244, 41042, 19473, 36527, 1099, 19076, 15246, 13240,\n 26243, 8195, 8096, 22607, 22229, 36651, 2496, 2302, 13784,\n 9498, 23351, 29254, 21994, 8873, 33435, 33233, 37222, 18425,\n 2718, 28596, 28936, 40452, 36041, 9145, 7, 13214, 16895,\n 32495, 1448, 19191, 11954, 2325, 15383, 9357, 25688, 11229,\n 39825, 21342, 10533, 4768, 30277, 25266, 4989, 37694, 23613,\n 29100, 17925, 11318, 8420, 30158, 9058, 20128, 30219, 30928,\n 435, 25309, 14883, 7747, 39951, 11386, 36875, 12299, 38110,\n 9197, 38486, 5963, 25090, 17450, 36986, 3732, 26234, 14079,\n 23400, 40557, 29035, 38106, 29986, 40250, 3797, 31439, 21780,\n 38693, 27810, 19669, 4381, 21057, 14140, 12184, 2876, 38825,\n 27646, 3651, 14582, 29237, 31464, 31129, 1115, 13337, 20373,\n 19729, 11723, 9743, 31931, 29239, 16891, 16589, 17556, 9814,\n 38992, 35292, 20262, 21990, 12302, 9720, 2666, 29213, 715,\n 38681, 21820, 21792, 34741, 29291, 38145, 28379, 24426, 6310,\n 332, 10833, 7187, 9169, 19957, 14517, 33652, 11565, 40852,\n 18808, 3131, 1129, 189, 30059, 18799, 6970, 38608, 136,\n 27270, 39419, 23541, 25754, 39988, 20052, 8219, 10009, 9450,\n 16045, 14594, 27227, 15749, 12629, 20012, 26387, 39918, 40598,\n 39010, 2773, 20349, 10654, 2490, 41053, 29582, 34817, 22517,\n 33730, 27281, 35252, 32079, 21741, 28268, 36118, 26597, 21283,\n 6129, 7242, 4975, 35346, 25534, 39811, 5788, 21891, 18527,\n 17654, 12271, 36008, 9467, 38341, 30423, 24214, 19911, 32451,\n 37229, 15675, 41035, 4460, 21362, 8892, 8429, 4916, 39654,\n 33422, 9021, 20537, 27207, 24086, 35786, 30771, 27376, 27635,\n 35468, 3034, 10503, 35464, 3571, 2225, 21088, 18126, 12555,\n 2671, 13838, 388, 32099, 39513, 23970, 40904, 6536, 25748,\n 22545, 7155, 30118, 28354, 1660, 40543, 31963, 147, 18432,\n 16613, 37607, 38174, 13951, 10943, 2854, 40204, 17537, 12911,\n 7178, 1443, 27942, 31878, 11444, 15793, 28952, 14256, 33530,\n 19400, 6432, 33759, 4552, 25627, 5727, 39414, 28758, 36352,\n 22562, 28016, 1175, 18353, 23134, 37852, 21679, 24687, 35022,\n 10264, 13684, 25378, 13844, 19356, 1980, 29745, 29421, 41033,\n 9749, 28518, 13315, 20513, 25926, 21454, 30462, 33979, 26639,\n 3583, 36033, 26456, 27181, 16359, 12632, 18193, 16361, 650,\n 2727, 25571, 10435, 4481, 20308, 35149, 22542, 7157, 32042,\n 2046, 26304, 7580, 35241, 26940, 12507, 23777, 39197, 30447,\n 5630, 31156, 16053, 29101, 13707, 34196, 24321, 7768, 27978,\n 9807, 34827, 30334, 6691, 30086, 25982, 13848, 34902, 11811,\n 11523, 27268, 6177, 6645, 16365, 1989, 14917, 10986, 28355,\n 6931, 8561, 23932, 14266, 33743, 15043, 29588, 21051, 15643,\n 19569, 30890, 30449, 13445, 29689, 10465, 18086, 9496, 31649,\n 28077, 16326, 22554, 28978, 3589, 13413, 30424, 11469, 22769,\n 5050, 22170, 32464, 19286, 20681, 369, 37846, 30999, 547,\n 2623, 8932, 37109, 20713, 13965, 37599, 6975, 28985, 12772,\n 17711, 23853, 32865, 6799, 3811, 24653, 37907, 4594, 9702,\n 37590, 290, 13726, 14444, 4347, 37887, 40642, 19451, 30266,\n 36816, 25196, 22807, 24448, 3804, 21714, 966, 10229, 29569,\n 29395, 1324, 28552, 14134, 338, 40253, 31491, 16656, 21949,\n 1076, 19988, 24095, 39087, 4753, 24999, 28535, 14921, 12687,\n 14464, 4494, 33518, 3884, 2190, 18501, 33469, 5722, 40319,\n 24906, 34838, 28091, 28051, 10809, 26478, 9519, 9424, 21959,\n 7886, 25592, 37265, 3094, 32204, 27424, 14711, 15282, 30549,\n 26152, 40279, 31162, 2212, 5474, 4346, 7559, 10048, 3534,\n 34745, 25246, 15039, 29520, 33920, 41093, 16389, 4442, 8591,\n 29902, 6828, 30722, 33210, 22472, 6755, 39202, 33836, 29334,\n 32500, 16141, 2776, 24224, 10108, 8276, 15269, 13944, 23740,\n 14249, 33801, 35443, 38927, 27123, 4967, 17278, 37447, 11785,\n 3786, 17713, 15150, 35782, 32060, 37106, 3874, 13689, 15266,\n 18661, 13279, 28010, 13039, 35579, 15403, 28120, 12559, 34881,\n 24099, 12774, 6921, 24030, 35902, 31827, 5655, 14020, 21067,\n 36987, 6221, 19165, 36814, 1307, 13008, 34636, 1404, 35735,\n 37482, 14531, 27850, 37183, 21329, 23252, 6406, 193, 37199,\n 8539, 15276, 3845, 558, 2065, 24767, 24519, 13615, 7225,\n 36466, 8553, 41209, 35724, 36912, 5444, 3418, 1408, 29173,\n 5092, 36308, 7146, 13055, 15042, 7499, 27100, 16225, 9546,\n 7323, 25345, 30782, 23233, 8200, 32107, 20571, 31323, 15248,\n 20040, 31079, 9319, 35279, 7924, 37956, 12225, 12100, 24634,\n 32416, 19134, 3838, 6407, 10245, 31881, 31100, 34081, 29472,\n 25021, 35124, 14544, 5965, 20949, 5863, 22762, 23789, 31569,\n 41251, 8901, 27702, 11533, 3077, 20774, 40245, 25112, 34719,\n 31607, 33285, 12070, 28313, 29371, 10473, 22292, 15818, 30223,\n 31950, 11969, 668, 12063, 8254, 18668, 29508, 222, 37821,\n 4394, 3721, 15966, 20955, 8033, 6938, 10566, 39292, 21561,\n 35886, 15414, 31913, 37129, 27700, 21867, 22411, 36655, 40149,\n 16930, 3104, 33786, 23480, 20715, 17519, 32639, 1752, 19005,\n 22035, 29316, 34873, 28727, 37233, 36099, 16462, 806, 17180,\n 36389, 37637, 17832, 6016, 37314, 40168, 9324, 29600, 24637,\n 17010, 7069, 10313, 40353, 18038, 13967, 9771, 3049, 37423,\n 13904, 22193, 9599, 23219, 39823, 27320, 20995, 18374, 19564,\n 32519, 9415, 3746, 19633, 13243, 30448, 39533, 40252, 21695,\n 11424, 33937, 37000, 28923, 27151, 39061, 9778, 15470, 16188,\n 412, 19014, 25869, 33132, 36442, 8642, 9050, 31761, 2129,\n 33099, 16050, 10070, 18750, 17055, 27053, 27184, 36311, 14235,\n 33619, 26369, 34513, 32582, 6498, 216, 33558, 34237, 32795,\n 19310, 15347, 39459, 33141, 40367, 14237, 32283, 7626, 16168,\n 30314, 542, 32936, 13802, 17300, 31522, 2170, 766, 29869,\n 31836, 32263, 10041, 39856, 23234, 24914, 34604, 40406, 22196,\n 17283, 29576, 38599, 26796, 10132, 20384, 9123, 6338, 4012,\n 15948, 32665, 580, 452, 9307, 31402, 23651, 6092, 858,\n 35835, 11047, 3946, 23205, 4852, 15888, 38302, 347, 18121,\n 14150, 26311, 4576, 15610, 31096, 50, 14764, 20607, 35875,\n 38854, 4521, 33184, 20549, 5793, 13208, 17243, 23750, 14543,\n 39575, 2538, 33972, 5442, 14207, 18490, 11014, 19637, 1098,\n 16560, 23767, 38443, 16394, 41167, 6537, 27923, 33228, 26937,\n 32329, 26365, 24791, 16705, 7911, 310, 11243, 7412, 36679,\n 31943, 13424, 20701, 19684, 17155, 22952, 34140, 22414, 21438,\n 32021, 34304, 24246, 26690, 22260, 26524, 14400, 5449, 17163,\n 16277, 6259, 26867, 14574, 38979, 33629, 29853, 14165, 23284,\n 9847, 26724, 13470, 293, 40767, 5167, 40091, 5409, 8543,\n 40247, 6439, 37904, 23505, 2238, 28062, 12400, 11836, 19471,\n 30082, 40007, 36550, 24535, 11136, 11228, 29435, 11332, 31474,\n 33706, 21896, 34282, 26641, 36522, 26655, 16616, 12983, 12922,\n 27355, 26432, 28604, 7716, 10098, 31018, 26136, 25494, 4537,\n 7355, 36432, 24487, 30444, 8233, 32546, 35199, 3099, 34558,\n 24516, 11891, 8023, 35184, 286, 29446, 6680, 26496, 29532,\n 12518, 1279, 29357, 13105, 30456, 24884, 31475, 29739, 35551,\n 4053, 7162, 40246, 20845, 2963, 9111, 18251, 6951, 28407,\n 19741, 33510, 35741, 15313, 5042, 38941, 40479, 41178, 7417,\n 13213, 29551, 8929, 2805, 15635, 8190, 19503, 30940, 31264,\n 37464, 23543, 4071, 34017, 21976, 34303, 38688, 37264, 16006,\n 31915, 30730, 7279, 36957, 26836, 40536, 7596, 30973, 9336,\n 31936, 17954, 13428, 17522, 10077, 30112, 273, 732, 20063,\n 25660, 14981, 37055, 18225, 21307, 6915, 10439, 16497, 35205,\n 13722, 14756, 21169, 33835, 6865, 16918, 9706, 17581, 41098,\n 3981, 30662, 22606, 15368, 6009, 40967, 31998, 31117, 2292,\n 29714, 32742, 19609, 30757, 19910, 18151, 23112, 19557, 20354,\n 39564, 16177, 31325, 10908, 21292, 21143, 14833, 19647, 17414,\n 39809, 31537, 8610, 19816, 29749, 11196, 37081, 25170, 23218,\n 2864, 16104, 25795, 30283, 30965, 33175, 37241, 6288, 16031,\n 17305, 4624, 1839, 27071, 27221, 35559, 27902, 21157, 21718,\n 29861, 6359, 19333, 18768, 23329, 15564, 23870, 19305, 9483,\n 19963, 38283, 39603, 31577, 11920, 31788, 5379, 11943, 31513,\n 39909, 28410, 13096, 36214, 29806, 13805, 2415, 22048, 31685,\n 27929, 34925, 15329, 6535, 26247, 2511, 13622, 11917, 17591,\n 9535, 37225, 6227, 35382, 13398, 26649, 28726, 36529, 30383,\n 27533, 8377, 33230, 3581, 29205, 24921, 27000, 18728, 13845,\n 9852, 2408, 24694, 26531, 1297, 20653, 13927, 6752, 21740,\n 25658, 3511, 40965, 7614, 2003, 26322, 4379, 4705, 15262,\n 36514, 32211, 30884, 209, 23917, 34439, 21572, 39167, 9000,\n 16619, 37302, 36247, 17028, 5651, 3366, 29729, 33182, 11250,\n 27570, 32259, 12043, 14655, 36516, 21738, 29554, 27253, 15517,\n 12340, 2857, 29387, 9832, 8665, 39254, 15087, 38002, 36123,\n 25120, 36650, 23653, 21089, 17867, 29972, 18393, 31725, 14209,\n 34676, 1429, 21048, 12852, 20094, 10554, 4820, 6506, 22830,\n 37923, 25566, 5614, 27433, 15102, 32932, 27750, 20091, 39099,\n 11346, 15664, 18644, 13067, 12494, 32497, 19972, 25743, 33843,\n 9970, 35821, 3808, 14014, 18342, 15333, 8002, 8470, 22003,\n 32960, 10115, 31738, 5978, 11195, 25017, 9305, 28144, 3268,\n 19458, 41123, 13664, 36281, 4542, 8886, 12109, 41286, 15798,\n 8782, 33127, 17292, 20300, 28696, 32829, 26014, 25024, 23910,\n 36722, 11981, 1580, 11823, 32203, 37121, 22371, 34060, 38240,\n 17914, 8639, 33984, 11216, 24755, 36328, 33360, 21082, 12355,\n 10207, 24639, 30026, 16929, 24039, 22163, 29433, 19143, 16796,\n 15436, 23742, 9687, 386, 13021, 8, 34238, 9632, 8594,\n 31311, 26514, 21010, 13533, 38939, 32229, 12854, 28632, 5116,\n 4563, 11743, 40673, 7698, 18498, 39434, 10173, 29822, 32548,\n 9841, 7239, 6845, 8951, 4575, 30452, 31222, 14932, 21419,\n 11889, 30923, 11432, 22556, 13776, 5983, 8618, 23508, 34854,\n 29329, 28450, 28384, 9520, 3776, 34087, 2972, 18876, 9884,\n 39537, 2143, 4927, 10378, 22483, 29645, 23790, 33359, 14748,\n 37013, 30978, 6361, 10008, 40210, 40766, 5498, 20018, 9655,\n 7775, 33011, 4482, 20156, 38353, 30324, 12007, 18851, 25147,\n 19067, 25210, 7427, 18170, 27291, 32528, 37874, 35847, 34161,\n 37104, 8666, 32680, 21841, 13176, 40471, 31603, 31053, 17884,\n 37562, 12613, 1713, 39685, 3753, 24626, 40823, 36302, 3569,\n 26135, 14798, 30821, 38902, 18235, 2700, 5504, 35401, 8943,\n 20721, 15614, 11993, 7539, 10663, 13267, 36928, 3014, 17473,\n 20876, 6823, 9681, 13049, 13563, 25560, 28017, 33957, 16074,\n 20748, 37370, 13465, 17364, 4303, 5889, 38218, 31147, 26927,\n 40440, 249, 1813, 1446, 8086, 11272, 22917, 29118, 11949,\n 37780, 3284, 15280, 790, 22731, 15291, 36300, 3965, 37462,\n 21684, 25586, 27341, 2468, 39853, 21227, 4215, 40280, 25316,\n 10280, 40086, 5114, 30278, 35917, 29215, 12000, 22951, 38074,\n 13896, 12312, 30616, 33278, 11353, 33818, 33425, 14007, 21432,\n 30839, 38638, 19992, 13271, 26992, 12574, 27335, 37320, 18260,\n 6098, 2341, 34600, 3339, 41244, 32102, 14008, 16595, 34325,\n 17181, 20816, 4123, 34326, 10665, 20589, 17451, 15961, 1559,\n 14633, 32858, 36332, 16731, 11145, 26038, 29019, 23145, 20664,\n 10436, 2520, 23263, 25320, 38921, 1331, 10057, 40248, 21544,\n 39637, 19599, 33519, 3396, 34436, 32589, 37649, 30908, 34152,\n 817, 4722, 15491, 30764, 16847, 897, 6104, 5857, 37491,\n 1022, 10208, 4910, 38444, 35119, 39880, 38152, 8461, 7208,\n 38183, 38430, 34085, 17838, 40631, 6908, 1750, 19243, 537,\n 14390, 14173, 5826, 24404, 24417, 39387, 1185, 1486, 24128,\n 27904, 11898, 20963, 7129, 39854]), 'E': array([28474, 59194, 17257, 38727, 4464, 46314, 11049, 64748, 40777,\n 47724, 63877, 40725, 39461, 4015, 25042, 16403, 25800, 13224,\n 60849, 43911, 32531, 38807, 28829, 17735, 23474, 2330, 1678,\n 10095, 10560, 10702, 53787, 62919, 4590, 63087, 828, 47459,\n 20610, 46415, 59908, 52150, 65327, 9954, 47865, 65100, 31898,\n 18131, 40085, 25157, 6012, 28952, 2952, 35459, 21249, 18944,\n 56491, 64517, 52069, 25505, 20777, 28304, 53296, 60938, 27798,\n 38881, 5232, 45079, 1137, 30749, 432, 16033, 5015, 49293,\n 66025, 19970, 53061, 55379, 38510, 36195, 35207, 20243, 60081,\n 43484, 37042, 12155, 9749, 15330, 49690, 6894, 22787, 37446,\n 50553, 64209, 43217, 40122, 21700, 47858, 6109, 48971, 53741,\n 65234, 37524, 3451, 38150, 61120, 15464, 35945, 63235, 3857,\n 2938, 15983, 48792, 39027, 55476, 31906, 40467, 39970, 52334,\n 58571, 56430, 11884, 15797, 398, 42831, 62970, 12498, 11246,\n 46306, 64480, 26493, 25507, 48162, 51789, 11293, 31136, 28923,\n 4704, 39794, 48720, 11571, 59263, 65073, 44365, 8875, 18330,\n 18915, 47881, 40852, 36169, 33477, 18722, 40111, 62526, 34909,\n 725, 33118, 22885, 24932, 51645, 63051, 25366, 36567, 672,\n 14645, 42752, 5308, 57833, 2259, 1542, 9862, 55961, 37259,\n 64251, 32272, 56244, 47794, 50130, 15804, 30198, 44161, 921,\n 37707, 7187, 58121, 45965, 58450, 66076, 6032, 50930, 56353,\n 26607, 24609, 36196, 452, 699, 40385, 46183, 5641, 30535,\n 66210, 32080, 19468, 47040, 5367, 21320, 8598, 6357, 48252,\n 9693, 48862, 36510, 57231, 42431, 34675, 23483, 2590, 30152,\n 51944, 169, 29598, 11050, 2496, 49409, 4033, 19950, 43907,\n 9112, 52824, 32460, 65277, 19272, 15666, 20744, 52114, 52293,\n 30933, 30403, 33124, 49227, 636, 2834, 41404, 1316, 7548,\n 36338, 35607, 55629, 42088, 50648, 17500, 49557, 56415, 58577,\n 48281, 58517, 44585, 52050, 13166, 37856, 32213, 48303, 47391,\n 42043, 51610, 2629, 57076, 56950, 17400, 13734, 44164, 11045,\n 62961, 7516, 35567, 34780, 60413, 22782, 18509, 59600, 37702,\n 40586, 666, 11167, 65727, 34155, 43177, 5561, 51450, 13987,\n 531, 39157, 57748, 39887, 8439, 45313, 49381, 60972, 44794,\n 16965, 26319, 5636, 13162, 42227, 16622, 16337, 38946, 26397,\n 12073, 37056, 26478, 4439, 59198, 23335, 56853, 25337, 12131,\n 62616, 40945, 4295, 6917, 22088, 30327, 63985, 20393, 2340,\n 33804, 37453, 18978, 5983, 53702, 42864, 63176, 13195, 34478,\n 62260, 7191, 14087, 11309, 8777, 8624, 19063, 33283, 9222,\n 43614, 58966, 6292, 57458, 10690, 40127, 9604, 52373, 25567,\n 2882, 38355, 62178, 59576, 21453, 21220, 27806, 2104, 22404,\n 36441, 63628, 27973, 26149, 8630, 38507, 42866, 37770, 16535,\n 23975, 32301, 64602, 15979, 38962, 8896, 53612, 63911, 47361,\n 47674, 21572, 13221, 12575, 24795, 17900, 58594, 15468, 54591,\n 42060, 970, 23701, 55096, 33692, 33551, 12408, 10475, 16913,\n 62714, 28199, 55099, 27877, 29155, 23334, 40734, 55588, 22959,\n 44009, 51935, 15989, 23399, 2745, 31480, 11444, 56886, 1492,\n 57794, 65959, 45882, 880, 20259, 56702, 18571, 49506, 17114,\n 59246, 63385, 24641, 50469, 24479, 22406, 26967, 58362, 48761,\n 45478, 23663, 64104, 5506, 44766, 17196, 58518, 70, 61382,\n 7209, 36847, 12996, 14703, 16740, 14351, 6015, 42199, 17498,\n 43977, 7661, 58744, 23555, 16112, 8039, 18696, 5754, 24034,\n 37413, 15009, 53802, 21587, 45152, 56136, 42888, 2468, 52258,\n 11092, 59091, 4037, 16349, 25908, 58471, 40334, 44657, 24058,\n 11296, 49489, 46480, 5600, 8749, 63467, 31293, 63234, 9765,\n 60864, 51575, 6874, 1937, 39929, 15268, 23414, 18475, 42941,\n 32530, 24344, 26425, 28061, 11600, 63006, 41572, 4821, 33770,\n 63111, 35546, 42205, 61607, 16892, 25542, 39830, 1242, 26457,\n 4971, 46659, 46785, 39817, 40906, 65337, 20349, 336, 64126,\n 44043, 49548, 27449, 23177, 26420, 54168, 11071, 20075, 52929,\n 36411, 10533, 43266, 4170, 5171, 32270, 50895, 63728, 44851,\n 11780, 46433, 19143, 50353, 12935, 41678, 28512, 28289, 27453,\n 33952, 33028, 7994, 39992, 62196, 29932, 41987, 3706, 447,\n 20617, 18652, 7955, 44581, 38146, 40664, 42879, 8245, 43127,\n 9677, 62749, 32712, 10750, 53050, 62914, 12134, 42641, 23071,\n 38804, 17092, 44158, 34853, 20418, 56025, 63044, 50103, 31805,\n 18559, 56328, 37728, 49777, 48342, 13374, 38926, 38018, 65991,\n 38528, 21475, 61934, 65550, 55359, 45746, 14940, 26459, 15374,\n 14547, 13926, 14819, 3559, 65878, 33605, 45102, 8103, 48756,\n 12547, 35143, 65557, 61790, 56902, 29067, 25205, 35879, 14569,\n 15905, 27677, 36478, 50420, 25621, 43209, 17490, 15088, 9698,\n 11326, 51091, 48442, 34152, 5908, 39415, 52027, 25387, 18269,\n 486, 11724, 33424, 7667, 26943, 59519, 25526, 7405, 26716,\n 18248, 8848, 27062, 60119, 55748, 54017, 28778, 7340, 13091,\n 62374, 62444, 32045, 15463, 41804, 14971, 49912, 14455, 46509,\n 33098, 5797, 54077, 58873, 61665, 30799, 61802, 32806, 37457,\n 29187, 48292, 30887, 39108, 6568, 21293, 60138, 65090, 37734,\n 47511, 41161, 50203, 5223, 41906, 51137, 24742, 5798, 46982,\n 42682, 57723, 59163, 26108, 34141, 17276, 57837, 3726, 13930,\n 44723, 26878, 56247, 9610, 30711, 46989, 17993, 15319, 13887,\n 40756, 27239, 13151, 27592, 59138, 64640, 11539, 18891, 38375,\n 64694, 38353, 42279, 11455, 22789, 27965, 36583, 31887, 15318,\n 43592, 22280, 62539, 33663, 55685, 31216, 35429, 62331, 49479,\n 19319, 65542, 44811, 40419, 58690, 25272, 45548, 13839, 981,\n 28618, 48449, 64886, 3459, 59070, 52668, 55897, 28868, 53199,\n 10206, 29717, 13563, 17419, 21628, 43808, 55314, 58733, 7513,\n 63919, 57925, 12743, 12734, 56780, 63889, 53615, 59890, 39056,\n 50603, 11406, 32177, 37409, 66376, 30047, 32966, 25400, 14333,\n 52926, 46410, 24598, 50464, 317, 36783, 9209, 64212, 8041,\n 61533, 59025, 44653, 13814, 12490, 17839, 10070, 32278, 29271,\n 18664, 37526, 621, 58140, 27837, 11809, 29307, 38370, 56806,\n 23901, 40567, 37513, 5672, 59003, 55951, 16426, 3661, 26345,\n 56352, 48211, 12527, 30125, 55530, 25577, 47416, 35805, 42742,\n 20790, 27947, 9413, 27727, 65586, 5567, 61673, 62066, 65230,\n 5923, 57309, 40139, 52451, 48943, 40561, 9584, 30331, 38969,\n 47545, 12176, 1651, 27613, 7905, 20323, 58175, 63565, 27978,\n 7106, 35105, 4780, 6170, 34830, 65618, 36367, 23743, 43234,\n 60130, 54738, 37486, 13039, 40521, 4426, 62553, 64509, 30276,\n 58500, 44879, 39089, 7571, 20728, 54932, 53909, 8143, 32359,\n 25701, 44363, 39867, 8722, 129, 25436, 27452, 14699, 3214,\n 32306, 34290, 64182, 11973, 34613, 55116, 21481, 62208, 42843,\n 27294, 44085, 7177, 16082, 27156, 41092, 15413, 39117, 34145,\n 57311, 60430, 41937, 62770, 50999, 23206, 12338, 62147, 23998,\n 64700, 6861, 45984, 62960, 61304, 45453, 28146, 8870, 34311,\n 38498, 63716, 712, 15814, 1363, 57828, 59809, 53040, 22714,\n 27334, 21187, 34915, 30051, 55936, 34832, 56539, 57697, 41152,\n 66034, 26689, 59940, 64618, 34294, 55330, 5618, 14536, 37029,\n 9981, 10956, 21895, 21591, 45889, 25121, 2807, 41755, 4273,\n 8679, 8988, 46608, 38684, 1611, 32792, 50829, 39226, 4515,\n 54039, 6911, 60145, 41595, 10180, 48528, 57524, 21160, 10593,\n 4676, 28977, 13566, 65787, 2194, 52436, 45336, 53092, 21524,\n 8650, 42441, 8182, 53108, 28154, 13015, 66329, 57125, 64735,\n 40455, 8186, 50249, 25587, 49985, 26744, 6303, 24572, 11584,\n 695, 16614, 19605, 42293, 63863, 58358, 625, 51360, 24606,\n 57860, 11812, 11637, 9713, 52535, 54406, 43818, 31670, 13802,\n 53516, 13502, 53520, 26067, 33706, 33445, 33177, 22135, 51266,\n 15849, 33955, 60887, 59588, 14069, 10213, 9642, 39463, 31507,\n 50162, 37194, 30639, 51955, 27247, 19037, 37799, 33643, 55810,\n 42782, 39125, 18477, 33044, 24116, 60860, 2857, 44642, 51910,\n 40982, 19310, 37508, 14397, 15634, 15053, 30840, 6133, 8443,\n 49536, 58765, 64594, 36519, 43671, 46483, 5291, 20850, 13509,\n 13645, 9722, 20252, 48182, 7356, 48779, 58835, 46285, 31098,\n 40155, 31096, 45459, 40215, 25646, 58302, 4333, 22551, 65144,\n 64512, 48427, 6182, 12144, 62516, 28736, 52128, 20759, 62968,\n 28519, 56346, 44673, 61232, 56759, 46551, 41639, 19261, 27142,\n 7990, 49037, 65372, 59748, 41163, 18738, 29687, 5701, 55505,\n 42661, 25022, 21532, 32283, 3044, 57102, 23148, 34291, 21074,\n 372, 6312, 30656, 36706, 2352, 43754, 17281, 40977, 3804,\n 43198, 20282, 8353, 7408, 15952, 40851, 62176, 60456, 7468,\n 32099, 12977, 59704, 4025, 54636, 58226, 36044, 25589, 46155,\n 60016, 29613, 6389, 2200, 39733, 24378, 62238, 19352, 19887,\n 30817, 6569, 48834, 47305, 5557, 10026, 44840, 1161, 56058,\n 39258, 42619, 11334, 39309, 8732, 27298, 56075, 57232, 55996,\n 11033, 29490, 10581, 42552, 54993, 19081, 34041, 54509, 40263,\n 1393, 63888, 15503, 51607, 65825, 39120, 17013, 23537, 19670,\n 18699, 12766, 7602, 63545, 17684, 34360, 33278, 54116, 2270,\n 2348, 47157, 24852, 16908, 44594, 23289, 32817, 16148, 56524,\n 46994, 7466, 27201, 33457, 24164, 43494, 6779, 23105, 53693,\n 7814, 15864, 26693, 14260, 17433, 8452, 7139, 47227, 6067,\n 52446, 9273, 32456, 59568, 34992, 35876, 10408, 23591, 47117,\n 38690, 16688, 14766, 53859, 6926, 51169, 50694, 48787, 4989,\n 35983, 23873, 39372, 57387, 32039, 1650, 32945, 29885, 20762,\n 30951, 40107, 12353, 41074, 65225, 48716, 25536, 47512, 33953,\n 4620, 42938, 62534, 2762, 61392, 28927, 42425, 31622, 60203,\n 2397, 13437, 38188, 12644, 18837, 61245, 47720, 12605, 52400,\n 60612, 29628, 454, 59039, 4218, 27603, 23098, 5309, 49057,\n 25011, 36934, 56144, 42936, 14167, 61636, 3760, 26130, 25334,\n 40770, 48379, 24910, 30967, 63518, 30751, 39223, 64290, 56794,\n 58529, 38849, 60921, 52889, 2204, 64711, 54840, 23890, 31695,\n 17879, 25539, 56380, 51110, 16813, 12893, 48917, 26695, 28621,\n 8352, 35936, 49135, 4489, 4591, 50198, 57910, 62823, 18022,\n 61477, 14978, 19244, 52688, 41170, 52479, 53752, 41040, 23287,\n 51328, 23344, 9466, 26963, 15786, 48321, 16398, 52291, 6193,\n 28139, 16591, 30299, 64606, 48860, 45454, 33515, 25257, 31715,\n 45875, 12430, 24218, 44369, 10338, 23318, 58599, 45116, 30413,\n 44274, 44504, 65382, 46911, 46413, 7230, 35233, 62409, 10678,\n 50035, 21643, 2341, 5803, 36459, 9404, 40313, 61860, 13616,\n 19309, 39710, 31131, 24537, 53903, 34756, 47067, 28852, 29108,\n 35877, 29808, 18808, 29261, 14897, 20297, 362, 49960, 5868,\n 12972, 10079, 40925, 31989, 45858, 18932, 28036, 25064, 56871,\n 18183, 39419, 44299, 14107, 44267, 19923, 41645, 24259, 34962,\n 7181, 16613, 3456, 45962, 33802, 9067, 5885, 63727, 37270,\n 45634, 41370, 20360, 6787, 31997, 64692, 18780, 34052, 6749,\n 44265, 53536, 11155, 59973, 13867, 11517, 16109, 60750, 56519,\n 7608, 56740, 34280, 22998, 23759, 59109, 61933, 65300, 18931,\n 54980, 35693, 19961, 63363, 30937, 4637, 41223, 26750, 11141,\n 41863, 4565, 51690, 20080, 27229, 51439, 39928, 42059, 17191,\n 38884, 124, 15289, 2280, 16566, 54539, 24444, 37451, 38160,\n 13123, 63958, 50979, 58040, 65408, 7720, 58320, 48896, 47523,\n 38785, 29480, 6942, 30529, 21300, 42730, 9791, 5738, 34880,\n 5284, 24367, 21593, 63572, 60850, 12953, 50011, 39692, 47487,\n 12710, 26066, 12102, 79, 42904, 20877, 47968, 57830, 5949,\n 50384, 38873, 41802, 35004, 38318, 42345, 24693, 23738, 50017,\n 36174, 882, 9659, 1493, 12027, 20376, 59529, 44598, 55054,\n 27843, 58477, 29467, 9149, 24080, 6322, 41553, 9070, 59913,\n 39424, 63790, 65329, 30386, 22109, 20143, 7010, 63369, 9595,\n 59174, 50392, 51583, 8264, 53173, 57444, 1372, 15132, 16392,\n 42007, 6827, 21800, 43673, 627, 14642, 33015, 50456, 19183,\n 64040, 40301, 30325, 20029, 6910, 56725, 66133, 27516, 36406,\n 19712, 51515, 29719, 9159, 25188, 2374, 9929, 6136, 30636,\n 28325, 10044, 8432, 43760, 60232, 52683, 34804, 8800, 29783,\n 41165, 12758, 3461, 8219, 53727, 18642, 10434, 34907, 56732,\n 44207, 49343, 38698, 41702, 14800, 55048, 36913, 32614, 31055,\n 39399, 48738, 54754, 44421, 5917, 5429, 37545, 40512, 17221,\n 50958, 29503, 61544, 17914, 8901, 11037, 26309, 43573, 32754,\n 48960, 3506, 3422, 26595, 35323, 31386, 13634, 45176, 63687,\n 44557, 30609, 29366, 44618, 58307, 37061, 57210, 18715, 2739,\n 5090, 1625, 61442, 50304, 37599, 29943, 31643, 4043, 58847,\n 45845, 28493, 60227, 65379, 4877, 13550, 5341, 15551, 54575,\n 27414, 8979, 47749, 19372, 45832, 37782, 57080, 3467, 20653,\n 1399, 14226, 14573, 51235, 52472, 55187, 36451, 3258, 44422,\n 2986, 66367, 11356, 5564, 62896, 65730, 30992, 31368, 49402,\n 62394, 20193, 13130, 22625, 66384, 56446, 40541, 12077, 60455,\n 55331, 565, 52832, 29421, 13099, 12749, 40005, 27830, 40802,\n 49735, 41856, 56373, 60018, 19683, 17817, 29445, 4954, 7562,\n 43062, 40205, 30460, 29767, 52386, 59215, 20447, 44393, 65309,\n 48795, 28739, 35407, 60654, 943, 65183, 64873, 960, 40303,\n 55880, 55536, 19443, 33532, 12714, 31077, 28585, 20238, 45941,\n 65117, 30443, 26128, 21243, 62477, 51996, 30237, 58219, 47918,\n 21051, 12562, 25252, 52987, 3035, 15460, 23868, 63126, 36665,\n 62454, 12389, 46473, 31522, 51276, 48560, 46628, 48916, 57331,\n 13994, 26965, 4767, 56685, 58738, 23087, 38501, 32850, 41707,\n 3764, 59135, 30288, 58101, 27827, 33590, 51125, 50433, 52034,\n 50866, 9056, 24524, 55702, 44910, 22979, 37772, 47358, 28341,\n 8891, 13339, 35511, 38426, 58848, 12648, 26151, 33519, 10580,\n 633, 9623, 16058, 33174, 52357, 52182, 6341, 28126, 9994,\n 42639, 29235, 37604, 12460, 18433, 41083, 48017, 14055, 40529,\n 44374, 31529, 30539, 27745, 2932, 3348, 11998, 52897, 20876,\n 34130, 4864, 9102, 24371, 40956, 15692, 14040, 54995, 2729,\n 34752, 30092, 9002, 31594, 49563, 19415, 30041, 235, 34484,\n 60034, 9920, 47181, 8639, 29807, 3935, 19915, 33465, 13939,\n 6512, 57106, 49636, 35682, 7550, 54309, 56930, 49295, 31159,\n 43327, 33514, 15420, 61856, 59379, 23665, 61140, 59658, 22718,\n 9245, 22908, 34858, 42212, 60857, 20819, 24881, 17363, 9735,\n 60291, 46717, 32168, 32866, 30243, 50172, 8925, 25140, 24890,\n 32218, 42460, 32422, 16656, 5953, 33577, 27242, 1032, 7417,\n 59969, 58211, 40348, 35888, 55569, 19909, 37438, 61111, 17542,\n 9512, 11833, 8841, 27084, 50801, 46021, 15478, 11069, 22928,\n 57606, 11154, 44715, 15943, 17297, 46046, 59444, 38344, 12611,\n 31380, 27393, 21823, 480, 14163, 40699, 36927, 40534, 57655,\n 16705, 33373, 47972, 30089, 34121, 29306, 28475, 50644, 55858,\n 36515, 14688, 31275, 47621, 25565, 63206, 27744, 53626, 22265,\n 40530, 7153, 18253, 5330, 59165, 64756, 29770, 12510, 25176,\n 6545, 24382, 48837, 45664, 59250, 9212, 26504, 4943, 65518,\n 11803, 32206, 18082, 30439, 27176, 40929, 10035, 56030, 3561,\n 30831, 51826, 16966, 42307, 44633, 56148, 56721, 34391, 9627,\n 51208, 47326, 27537, 53156, 45403, 36162, 15224, 62034, 59943,\n 16309, 47417, 4982, 60592, 62567, 39242, 39751, 26888, 7680,\n 12229, 64377, 21576, 65906, 38905, 22127, 40135, 51544, 48631,\n 24670, 44357, 2384, 4781, 41417, 58064, 44462, 19624, 29264,\n 40070, 27583, 56159, 57532, 51412, 23531, 60048, 50410, 867,\n 3926, 59504, 55692, 60143, 7913, 23195, 66177, 49849, 50613,\n 26435, 51562, 4453, 27127, 32533, 47822, 47468, 54766, 53447,\n 59363, 53997, 38421, 7261, 21040, 57603, 38283, 42140, 55690,\n 64281, 9932, 51814, 3221, 24491, 15080, 35386, 1810, 24951,\n 50060, 11533, 46869, 35372, 17591, 46632, 30761, 23156, 24771,\n 58932, 46143, 17090, 34147, 54875, 24303, 26948, 26218, 15671,\n 55311, 24778, 8537, 1726, 46352, 35296, 55062, 9530, 26569,\n 22156, 61981, 10855, 9731, 41277, 32834, 59509, 46940, 24308,\n 8622, 14704, 25956, 10846, 13898, 62439, 51140, 18769, 63377,\n 18028, 32344, 58109, 31757, 54415, 965, 5177, 44311, 29908,\n 53086, 1779, 56435, 15922, 11409, 1252, 65048, 52698, 41907,\n 29842, 18084, 30889, 42658, 30135, 39437, 44, 10326, 48944,\n 38359, 48387, 50113, 38583, 66282, 61043, 29069, 44253, 8818,\n 10033, 56307, 286, 4422, 5033, 31474, 65219, 61772, 42724,\n 59483, 55264, 44667, 33558, 27314, 24527, 63510, 53184, 44317,\n 55739, 52597, 34553, 58702, 33908, 43181, 22290, 8557, 40012,\n 35478, 47345, 22606, 58964, 64706, 4129, 49449, 16483, 9043,\n 63747, 14673, 23852, 23666, 57153, 6586, 11234, 37359, 63976,\n 59218, 13478, 65126, 27899, 55174, 11434, 18, 41297, 44662,\n 54492, 20167, 43896, 12477, 26086, 66222, 3914, 60764, 36639,\n 10455, 48246, 40375, 55356, 15501, 10920, 23288, 62882, 21274,\n 38733, 4176, 32393, 13546, 28609, 43142, 14284, 46759, 39788,\n 38742, 35536, 13299, 39737, 55409, 63948, 41425, 63283, 45881,\n 19131, 36717, 17167, 25707, 5435, 9393, 15439, 40371, 26866,\n 18202, 33526, 10818, 6334, 63844, 25802, 38553, 63634, 52733,\n 17486, 26213, 22233, 50191, 30561, 32919, 19796, 22017, 25991,\n 53561, 32910, 4110, 50095, 35090, 66208, 46990, 56807, 19577,\n 38314, 28481, 65610, 55593, 47223, 11632, 50448, 40065, 28262,\n 34115, 45275, 41918, 15418, 2617, 34021, 46175, 50751, 32942,\n 46353, 10004, 6784, 29906, 27377, 22545, 35730, 10189, 30297,\n 23342, 2706, 64918, 45636, 31893, 33527, 1482, 32780, 43388,\n 21631, 63159, 58505, 61161, 40271, 23358, 18500, 65880, 56309,\n 201, 3308, 35976, 10597, 65552, 64439, 52107, 62280, 24209,\n 44628, 27988, 54520, 5534, 21543, 456, 4148, 54135, 60981,\n 19050, 13230, 40037, 18208, 35896, 25065, 23694, 18428, 64069,\n 34080, 37158, 61977, 57250, 41807, 60883, 14559, 9762, 22081,\n 10777, 19480, 36572, 18075, 28876, 65046, 61243, 19798, 21006,\n 38647, 21265, 55732, 12745, 46451, 41641, 53531, 53819, 22033,\n 34299, 60042, 49854, 20502, 25462, 16341, 47109, 51069, 8238,\n 4941, 335, 31363, 60258, 36207, 38093, 52621, 51545, 40462,\n 26207, 9158, 53053, 49635, 65990, 34143, 1284, 59374, 34565,\n 61095, 26479, 16278, 5805, 64280, 65703, 63555, 60073, 22146,\n 8377, 21929, 21325, 14931, 18424, 21174, 9703, 55362, 5619,\n 11747, 29733, 66115, 6268, 26081, 64201, 8981, 29883, 556,\n 40453, 38467, 31401, 38597, 52149, 41550, 61374, 34177, 51637,\n 26739, 14693, 53913, 54117, 41983, 44414, 44475, 23176, 36653,\n 11769, 7431, 13690, 22776, 10309, 13314, 16624, 13137, 9012,\n 5344, 26173, 48989, 46294, 51749, 15277, 50776, 6288, 7813,\n 27755, 36632, 44082, 36737, 340, 33736, 3287, 48251, 22373,\n 24114, 16950, 2081, 47299, 33589, 25546, 27625, 36924, 45620,\n 56153, 28834, 25748, 27060, 65426, 35644, 46931, 17569, 11540,\n 10833, 5520, 45527, 46077, 49749, 34994, 57508, 27120, 3007,\n 29435, 19161, 18335, 24284, 1298, 13662, 45532, 651, 14263,\n 57510, 33484, 10148, 23521, 49311, 60450, 3086, 267, 29881,\n 23870, 63033, 1135, 4234, 49142, 60629, 2969, 7503, 10451,\n 55044, 57159, 29623, 16280, 21809, 12237, 5746, 38008, 51149,\n 45177, 40691, 4729, 20699, 31654, 59887, 30651, 40477, 8711,\n 21377, 12785, 29868, 37516, 63137, 19135, 61931, 47973, 35221,\n 13522, 31697, 19492, 21270, 55274, 63548, 4393, 10621, 55275,\n 44096, 449, 36255, 25225, 64611, 26033, 59945, 43534, 41007,\n 17523, 59819, 47862, 42342, 3028, 65029, 17812, 56207, 30406,\n 51889, 39560, 27064, 734, 59604, 4120, 23797, 29112, 36489,\n 23498, 52039, 55920, 15352, 34495, 33267, 2139, 5447, 48742,\n 36055, 8442, 59991, 11335, 59146, 21630, 2669, 9326, 51694,\n 52442, 17573, 52284, 37882, 48776, 58889, 52928, 43159, 21535,\n 45623, 14291, 42103, 39621, 28290, 22243, 60194, 14943, 11650,\n 30029, 45019, 29934, 21008, 23785, 53410, 63968, 13392, 4049,\n 20142, 54371, 34270, 1290, 20370, 64397, 63255, 54940, 14712,\n 46601, 60865, 22143, 50221, 64570, 17838, 16199, 24981, 23982,\n 62140, 44454, 9412, 57377, 9053, 29973, 52761, 30098, 44245,\n 47611, 49944, 35895, 17951, 9278, 33139, 13051, 65350, 41938,\n 64790, 46368, 42789, 66218, 28024, 60322, 9249, 42264, 41808,\n 37051, 28591, 5457, 35063, 25120, 47817, 28765, 63893, 20768,\n 58050, 9798, 13063, 52927, 65738, 42076, 61021, 65631, 50287,\n 48136, 39769, 28115, 43333, 59380, 24627, 51380, 48482, 26763,\n 22020, 11835, 3628, 4972, 65626, 35369, 61801, 37434, 27476,\n 58724, 47561, 18892, 58891, 12732, 66239, 28704, 65118, 10052,\n 49270, 5762, 48808, 50372, 32652, 16903, 53524, 59909, 11953,\n 349, 50925, 60496, 42207, 512, 10050, 21446, 27370, 39381,\n 22049, 63112, 14624, 27513, 41261, 4884, 18437, 14322, 47446,\n 40973, 33206, 60952, 44333, 21735, 30201, 38953, 7765, 55425,\n 51216, 32802, 41828, 8615, 16464, 19810, 22025, 5835, 29321,\n 57807, 45535, 40878, 66016, 64609, 37098, 5407, 12727, 45899,\n 60362, 7025, 12744, 44920, 37941, 23245, 10302, 9724, 753,\n 38472, 40886, 16308, 24787, 46731, 36734, 18953, 35388, 21570,\n 36517, 19977, 41584, 30394, 6790, 19007, 31952, 39452, 63746,\n 61327, 22212, 44984, 7544, 936, 36878, 1605, 6761, 38571,\n 4006, 35560, 715, 42525, 17155, 50090, 64628, 7313, 62286,\n 53899, 44848, 29797, 36404, 15194, 37410, 9916, 17030, 18543,\n 12950, 37649, 8222, 50273, 45418, 7945, 62655, 21316, 53821,\n 13243, 59022, 8634, 12025, 1402, 45296, 49917, 17298, 9156,\n 21383, 33489, 31748, 37663, 37195, 57803, 27111, 25571, 32379,\n 56362, 28216, 39883, 17897, 34000, 45113, 30052, 9531, 8776,\n 17558, 24237, 47890, 1757, 43083, 31161, 41546, 54820, 21450,\n 6972, 47454, 21927, 34925, 51413, 61212, 36781, 57568, 43111,\n 32775, 3336, 36885, 35053, 10224, 64246, 58321, 32063, 30361,\n 48107, 64413, 3555, 57515, 55834, 8031, 52983, 10433, 14365,\n 65936, 22327, 50818, 57156, 22202, 20177, 49838, 41965, 50214,\n 65042, 5807, 1596, 40350, 48294, 33585, 16467, 19463, 15018,\n 31638, 48372, 56160, 31720, 64017, 43475, 65595, 66166, 39500,\n 27347, 23372, 25170, 26441, 24282, 43112, 23745, 3384, 50312,\n 6240, 5395, 38872, 53582, 55475, 44951, 24754, 20348, 33697,\n 55246, 45395, 59652, 20783, 19924, 65430, 14595, 6434, 38047,\n 52865, 31554, 9979, 9860, 6596, 61785, 51109, 4304, 66264,\n 38816, 32796, 39659, 18831, 15535, 58243, 11619, 4677, 7075,\n 51520, 892, 10124, 47905, 7440, 48913, 2851, 968, 22835,\n 14394, 48701, 39272, 39496, 13209, 57293, 49, 4014, 7043,\n 52494, 62975, 29563, 47684, 13303, 23000, 27872, 14440, 8414,\n 16796, 52563, 29621, 50329, 65656, 6058, 43128, 17153, 60180,\n 15627, 7967, 19018, 4293, 65875, 34419, 17396, 27268, 32065,\n 16768, 65694, 49288, 57861, 37072, 59371, 48230, 55427, 236,\n 509, 18110, 24138, 45376, 22248, 28749, 30884, 61347, 51882,\n 17217, 25603, 2527, 5771, 10618, 22758, 31387, 21721, 40934,\n 50341, 3583, 56255, 37044, 59535, 17593, 11685, 65405, 31993,\n 29098, 46132, 40377, 9719, 19606, 19900, 19778, 13617, 10924,\n 11098, 21252, 47630, 31093, 59571, 51225, 38840, 65566, 3530,\n 65228, 30559, 5328, 36955, 31346, 14294, 4117, 17675, 52119,\n 39245, 36332, 33578, 58920, 1929, 48067, 29566, 7836, 3793,\n 54692, 29517, 1501, 58979, 28680, 45090, 1671, 48483, 53488,\n 56360, 53713, 50062, 24325, 19000, 24370, 24040, 63023, 17750,\n 23508, 29668, 33348, 20354, 34333, 32138, 44864, 21514, 23131,\n 65512, 44762, 36710, 9608, 45096, 3876, 21858, 62319, 46567,\n 11328, 9243, 1877, 44646, 5150, 34204, 31496, 63424, 36838,\n 62983, 48623, 28677, 37506, 62423, 44888, 34995, 46945, 56322,\n 35507, 43492, 39268, 22358, 17942, 3702, 43905, 14208, 18432,\n 57967, 7806, 19027, 21860, 30206, 18615, 52207, 14689, 244,\n 16919, 16542, 39986, 61388, 6047, 61993, 15485, 64783, 10563,\n 28822, 3126, 56163, 264, 10634, 27177, 47419, 13703, 40002,\n 2928, 4415, 10964, 10066, 494, 64003, 21130, 2953, 26907,\n 2513, 884, 29817, 47805, 2991, 56158, 27820, 539, 9085,\n 55636, 56079, 54125, 21608, 42747, 20797, 7501, 3593, 27719,\n 20574, 61725, 42702, 35059, 49179, 38695, 44198, 43329, 22712,\n 2195, 30010, 50484, 50938, 58291, 46460, 19989, 49664, 3021,\n 32717, 29142, 49007, 35915, 64765, 10744, 22351, 19417, 61358,\n 49786, 55260, 54560, 16828, 20736, 29535, 12332, 1139, 33416,\n 11113, 64973, 5007, 19280, 55377, 14641, 20250, 41090, 23400,\n 3958, 60648, 38473, 3818, 14295, 59021, 34777, 54798, 27477,\n 37698, 8241, 10555, 25097, 44288, 47254, 61778, 55013, 22619,\n 29744, 3046, 21275, 13764, 28464, 39708, 8289, 23304, 17658,\n 20225, 59524, 29955, 4503, 64124, 37239, 29380, 29093, 37173,\n 8419, 50767, 9019, 34109, 65116, 43200, 50600, 46770, 55403,\n 59899, 24299, 6791, 56342, 25480, 29618, 30954, 58452, 23352,\n 18087, 44976, 52336, 49250, 15471, 65352, 66359, 35670, 11341,\n 50028, 38798, 16096, 65934, 36272, 32159, 48460, 11066, 58987,\n 4337, 4363, 64939, 10199, 11788, 64599, 4041, 51318, 18797,\n 5332, 32767, 8694, 7802, 54693, 31874, 46863, 54368, 15518,\n 24827, 29707, 59860, 27810, 29622, 37050, 55410, 25598, 66340,\n 6720, 6347, 16212, 58205, 10929, 9463, 15810, 44973, 42029,\n 31301, 38269, 6115, 18287, 37837, 19085, 66201, 47685, 3036,\n 1871, 11487, 23796, 64199, 16680, 52891, 31046, 37447, 54382,\n 47712, 22812, 58665, 25446, 62041, 13695, 58745, 41345, 29466,\n 45097, 11990, 42912, 62915, 45345, 3139, 44931, 38419, 54028,\n 11944, 55221, 51933, 51211, 54827, 34353, 59351, 57997, 34170,\n 47772, 8606, 48762, 54354, 45691, 26233, 19546, 2137, 9557,\n 55839, 375, 42203, 29659, 12829, 26451, 41512, 49431, 66186,\n 57351, 29263, 45641, 55718, 12863, 56766, 26009, 63274, 15274,\n 30726, 43011, 50122, 28998, 29968, 63422, 21561, 65445, 65778,\n 66009, 10083, 41154, 46748, 53280, 23920, 22245, 46384, 61976,\n 43575, 12258, 25385, 42143, 35672, 23597, 24613, 28270, 13229,\n 32615, 30418, 30577, 26820, 30865, 57328, 1697, 7348, 15621,\n 62808, 38, 27840, 46907, 59124, 9438, 25936, 57513, 4,\n 28974, 7980, 50957, 62125, 6051, 24954, 27506, 58607, 35383,\n 19587, 30792, 20969, 37449, 1732, 5313, 22570, 8500, 18135,\n 53816, 63276, 24963, 41204, 23312, 927, 14479, 23693, 21256,\n 10139, 62360, 57406, 51229, 57316, 9383, 11091, 25919, 51491,\n 43403, 53244, 9675, 22264, 65529, 5565, 11984, 4368, 58474,\n 49054, 7434, 18130, 6730, 52286, 35484, 55755, 44785, 35308,\n 58493, 48687, 3438, 45606, 29843, 56114, 21056, 30907, 54388,\n 21229, 51671, 40312, 48345, 52595, 55454, 29820, 40237, 53218,\n 35606, 21867, 33841, 39411, 53918, 65522, 33791, 60297, 25930,\n 53658, 10518, 46739, 14976, 20304, 46852, 37909, 25573, 22399,\n 60406, 47854, 61631, 22644, 42083, 64435, 16823, 42019, 39552,\n 1586, 64846, 23749, 58885, 44958, 6475, 41179, 28231, 59986,\n 20139, 36060, 26206, 12629, 14841, 23319, 13141, 26566, 61764,\n 29444, 15180, 1204, 34796, 10711, 17935, 43416, 14644, 60002,\n 16193, 23535, 64725, 2237, 56758, 14600, 28955, 17116, 48982,\n 45115, 11147, 62802, 45119, 13410, 30153, 64492, 23589, 59526,\n 60511, 56677, 26722, 29301, 2125, 41108, 31337, 64997, 24592,\n 26957, 63061, 21123, 37776, 13837, 27116, 51264, 12526, 2097,\n 31005, 10758, 58593, 25106, 37114, 15512, 39152, 100, 51157,\n 1099, 3968, 15328, 24107, 57536, 23028, 1042, 36936, 35770,\n 54580, 33545, 65293, 52947, 14105, 10257, 37986, 61412, 55030,\n 8539, 1083, 31122, 65663, 24149, 14557, 60636, 66026, 6396,\n 27667, 14827, 38989, 40673, 25141, 16976, 9297, 60787, 54765,\n 35577, 45448, 51135, 37091, 53147, 55441, 27783, 14933, 3278,\n 26644, 18754, 31318, 56365, 17268, 4418, 13377, 42564, 62398,\n 49211, 3054, 63895, 38425, 38265, 59610, 9034, 52844, 9255,\n 42013, 9348, 32001, 42337, 26376, 33042, 30477, 11430, 21767,\n 23625, 61545, 53920, 53382, 57551, 6613, 28442, 46123, 2462,\n 1022, 64690, 44136, 16490, 10610, 2622, 63362, 2197, 37519,\n 40917, 43086, 3950, 19979, 44065, 49265, 17105, 65173, 5825,\n 6521, 4533, 45818, 35604, 63156, 4761, 18080, 7871, 15655,\n 61000, 33658, 3507, 16751, 15803, 49261, 46635, 46848, 3228,\n 6278, 59031, 23420, 30631, 15707, 38278, 11709, 49521, 23035,\n 59296, 17239, 23012, 30022, 18104, 37661, 44005, 26697, 56382,\n 23503, 20990, 58361, 8278, 47207, 52379, 30924, 8992, 5419,\n 30885, 46003, 52808, 45601, 3525, 31809, 65581, 14486, 52514,\n 65258, 17094, 36261, 32548, 55507, 30785, 46472, 43100, 34144,\n 22963, 62618, 35112, 15970, 36623, 34947, 16007, 44854, 12443,\n 56817, 56577, 56521, 38637, 64117, 34594, 47110, 21344, 36766,\n 22201, 38306, 31430, 62839, 6425, 58704, 13268, 37384, 6375,\n 49493, 30450, 20141, 54428, 49637, 56967, 55485, 1131, 63329,\n 55128, 57914, 52900, 47938, 2393, 20132, 29993, 51648, 44960,\n 45887, 50037, 5082, 2968, 43193, 4047, 47159, 51347, 3526,\n 14178, 26416, 33901, 11298, 60017, 18303, 45616, 34735, 50894,\n 24545, 18279, 50754, 1982, 38004, 65045, 21739, 1368, 17892,\n 33616, 20002, 11080, 20403, 58034, 34568, 43693, 2730, 61723,\n 37610, 58829, 31902, 63811, 4697, 16180, 38411, 17302, 10426,\n 7760, 24855, 9828, 39124, 5350, 11735, 66048, 32157, 9280,\n 26589, 45240, 2591, 57854, 38234, 43157, 26265, 50438, 39583,\n 28268, 36326, 62240, 13911, 2505, 53961, 40923, 7788, 55496,\n 13474, 17869, 34912, 13718, 4814, 9597, 59083, 37815, 51934,\n 8213, 25243, 62748, 457, 21727, 58813, 26977, 8430, 26801,\n 2116, 58688, 18883, 63046, 61571, 12473, 61633, 31225, 45868,\n 57454, 29270, 30735, 28193, 12884, 41967, 50322, 10241, 28349,\n 32730, 3784, 38710, 52227, 30773, 17022, 53590, 64070, 64024,\n 64462, 17113, 65848, 32336, 57779, 38591, 64910, 33415, 1690,\n 2050, 62191, 48709, 32644, 13986, 26914, 3478, 8033, 3203,\n 18479, 50549, 65177, 4809, 21758, 59916, 41067, 51616, 58761,\n 53450, 26273, 60822, 49982, 59063, 35163, 54129, 41569, 17603,\n 7318, 55916, 35943, 60277, 58173, 56066, 20843, 49014, 62407,\n 39902, 2002, 58828, 50264, 59670, 39067, 66237, 15232, 15169,\n 28330, 10174, 2821, 35992, 30407, 29128, 57134, 65762, 25786,\n 53412, 21980, 26515, 30031, 18884, 217, 62146, 17782, 36965,\n 13759, 13729, 60020, 14000, 10494, 58262, 43538, 2594, 48428,\n 16634, 27526, 35197, 41423, 65853, 13613, 57933, 53606, 43056,\n 7539, 65387, 9851, 52051, 23278, 10118, 22299, 7783, 65839,\n 64964, 6260, 60771, 31126, 12649, 61756, 53095, 881, 49872,\n 40223, 25605, 8478, 31520, 12717, 9941, 46267, 2203, 11259,\n 8533, 2633, 26255, 32537, 47257, 44714, 60272, 48504, 13636,\n 7809, 51098, 34753, 6572, 21042, 29373, 38843, 63338, 23575,\n 47054, 49473, 45350, 39239, 32315, 39510, 25087, 29567, 63748,\n 17462, 66337, 11275, 21396, 44141, 9496, 31133, 16565, 65676,\n 25714, 22308, 41344, 17343, 31644, 51285, 31304, 63935, 22663,\n 3850, 32758, 57059, 63105, 30722, 20596, 50374, 52855, 50732,\n 28000, 56139, 25360, 17562, 5845, 50380, 46411, 44465, 60485,\n 55279, 57529, 14947, 35999, 54951, 17824, 15299, 30911, 31182,\n 26332, 37662, 18055, 21746, 32147, 51265, 44876, 7429, 46744,\n 43474, 45449, 61518, 34839, 51472, 41195, 33822, 4062, 49286,\n 30818, 7204, 25852, 28429, 57394, 25248, 31284, 7633, 18811,\n 3320, 9115, 24781, 35250, 10229, 11073, 59211, 49864, 46976,\n 23264, 59725, 23984, 42160, 66217, 9868, 48766, 61697, 42599,\n 26726, 41384, 23654, 17, 17578, 25262, 59149, 57667, 58180,\n 49218, 41131, 22160, 17567, 21962, 7494, 40438, 39935, 33754,\n 16330, 1013, 52806, 9757, 44418, 18842, 49279, 49968, 4542,\n 2717, 11531, 65629, 59128, 35514, 43288, 53251, 56669, 62806,\n 27357, 18169, 27780, 28212, 3150, 41659, 6607, 11516, 11732,\n 60642, 30871, 40769, 26197, 50068, 65049, 41894, 65105, 49895,\n 1247, 24023, 22917, 25208, 45200, 39355, 19464, 44459, 35431,\n 34743, 32313, 797, 40304, 815, 11373, 60515, 4541, 10774,\n 54608, 7833, 27365, 61389, 47091, 7969, 43777, 21908, 58992,\n 21182, 18732, 65723, 57261, 49609, 37994, 782, 65706, 65334,\n 36024, 54779, 14669, 11425, 54426, 3994, 9853, 21196, 59228,\n 29252, 36666, 3810, 22032, 32094, 50129, 63485, 33938, 44368,\n 57535, 12256, 16376, 29180, 47199, 55420, 49604, 54687, 24540,\n 3364, 8201, 13329, 47133, 32167, 15329, 63552, 59549, 37153,\n 21824, 25763, 6235, 34364, 42335, 33104, 3159, 25060, 63221,\n 5187, 54597, 19247, 45883, 2909, 52313, 1162, 10881, 17471,\n 16484, 11308, 55038, 17626, 20756, 56906, 3212, 54565, 31116,\n 60591, 19401, 55771, 35620, 5492, 8861, 22851, 5554, 54644,\n 5017, 63347, 64091, 64868, 27771, 48336, 66287, 64088, 3645,\n 47174, 55552, 19916, 55281, 3406, 8349, 53849, 60336, 47879,\n 15272, 43909, 20838, 43224, 54652, 66106, 41239, 21503, 38360,\n 34445, 54463, 8387, 16685, 52433, 3015, 34146, 42322, 43258,\n 24925, 60429, 39816, 16162, 25027, 42492, 14968, 3540, 66127,\n 66051, 26026, 30261, 55725, 30362, 46983, 17693, 18309, 51256,\n 40143, 62548, 28953, 60953, 13074, 66199, 36168, 43454, 49745,\n 30974, 23057, 47770, 54080, 60090, 32101, 46496, 42449, 37870,\n 5551, 19763, 20707, 53943, 25004, 62783, 17131, 38815, 63306,\n 10784, 21339, 31931, 38181, 20339, 6879, 54256, 23533, 46282,\n 35933, 27691, 13100, 55903, 40746, 2748, 43029, 14501, 10831,\n 64264, 33239, 26848, 56312, 60082, 15883, 43903, 11937, 32105,\n 1066, 47788, 60353, 4816, 20090, 54515, 53003, 47541, 43540,\n 42051, 56622, 14502, 63450, 312, 42650, 14842, 39741, 65425,\n 42909, 63864, 24692, 51101, 62942, 21893, 52190, 30774, 6150,\n 62654, 57385, 22448, 52331, 18486, 39489, 44446, 55601, 26687,\n 25875, 41656, 12368, 5967, 27392, 62220, 15398, 60286, 45566,\n 14671, 63345, 20359, 55890, 55104, 56327, 60702, 56864, 6854,\n 66178, 17189, 12862, 27186, 42920, 18887, 47846, 65102, 4419,\n 53145, 34401, 18422, 47072, 38347, 15327, 899, 6626, 10348,\n 152, 30782, 23300, 42714, 28309, 14830, 43133, 53441, 7532,\n 51688, 46781, 11681, 65060, 37496, 7547, 40706, 10546, 13217,\n 14478, 56644, 31620, 54445, 48157, 25477, 24201, 8659, 19746,\n 1483, 45900, 48489, 4666, 62669, 37395, 49990, 9668, 53219,\n 50752, 21399, 4112, 42009, 15705, 32073, 45297, 3250, 5669,\n 60338, 33831, 62814, 33915, 37796, 8290, 16875, 8370, 26431,\n 5356, 24789, 65926, 43257, 51973, 56707, 7247, 38194, 29869,\n 57478, 27575, 48439, 40017, 41858, 54689, 30981, 58766, 59946,\n 1408, 65841, 64719, 40968, 613, 20518, 62617, 59442, 60070,\n 45642, 17753, 31003, 42352, 49301, 53891, 54268, 44030, 53291,\n 36798, 46597, 56201, 43965, 57627, 52359, 23636, 41504, 46563,\n 22557, 20190, 35068, 6945, 57948, 20538, 27281, 7706, 37612,\n 26351, 56296, 52467, 19583, 21526, 16017, 3727, 11585, 64802,\n 38073, 40136, 56314, 16889, 8338, 27956, 43554, 36314, 34158,\n 57511, 60724, 7351, 34387, 58426, 10525, 30467, 46568, 44685,\n 44488, 44324, 44243, 21694, 15519, 29714, 4507, 40319, 56280,\n 52617, 8309, 57651, 7131, 48466, 3693, 55157, 48206, 40666,\n 59157, 33978, 22944, 6724, 46331, 28164, 46156, 36340, 15911,\n 51059, 49156, 1767, 64080, 578, 60289, 56815, 33668, 51407,\n 47692, 32953, 23132, 56689, 8180, 51644, 17453, 6794, 58788,\n 40133, 23661, 1994, 15765, 17707, 26411, 64513, 192, 28285,\n 54874, 28667, 1987, 7818, 26472, 12444, 5772, 12844, 6897,\n 50719, 39916, 17056, 65107, 62177, 47094, 937, 1107, 44477,\n 2603, 59761, 29727, 7298, 7060, 27561, 30441, 32788, 21497,\n 45689, 23401, 6117, 19860, 37197, 21332, 41671, 36595, 50892,\n 9004, 1458, 65789, 14745, 43141, 798, 45971, 2399, 685,\n 43493, 65422, 3270, 45440, 22486, 49717, 22438, 27851, 55698,\n 19025, 34398, 37792, 42448, 46817, 58348, 60870, 45233, 5780,\n 17608, 43340, 47275, 48750, 34355, 23827, 27432, 15324, 55956,\n 46207, 10268, 3023, 25873, 65453, 57980, 63961, 60574, 39813,\n 42470, 26864, 29961, 47951, 9447, 11786, 37164, 51818, 12660,\n 59413, 18049, 53956, 36189, 16802, 43244, 21353, 4501, 7428,\n 29924, 36397, 42229, 12250, 15064, 54521, 28355, 25850, 40121,\n 12793, 15026, 37339, 47991, 41015, 25067, 58935, 53113, 45040,\n 59054, 61807, 25216, 12929, 17061, 65867, 20801, 5683, 1570,\n 46580, 42412, 24755, 2305, 13832, 50548, 55375, 18427, 57604,\n 42217, 40049, 36376, 38281, 24099, 5467, 38603, 44571, 9084,\n 37021, 59024, 29212, 9151, 34653, 34321, 7756, 63101, 40047,\n 33193, 12211, 62159, 39907, 19313, 9188, 3753, 62023, 38275,\n 12534, 63837, 22832, 28718, 18821, 16275, 1273, 30991, 41128,\n 19996, 12201, 53159, 23551, 14449, 26203, 27617, 54365, 17146,\n 19258, 33187, 13914, 7580, 64708, 58004, 35506, 34957, 28503,\n 58355, 33678, 55303, 37187, 7470, 27219, 30074, 12051, 64721,\n 49763, 22667, 45335, 44835, 36422, 59090, 9725, 25419, 19650,\n 62795, 55346, 11957, 3275, 16567, 8481, 7647, 66191, 36705,\n 12877, 62406, 57668, 47041, 27256]), 'L': array([13406, 55685, 87416, 35655, 52624, 29865, 24212, 21305, 5269,\n 79966, 86495, 28143, 23217, 77219, 28850, 83917, 42648, 49838,\n 204, 41296, 81458, 61953, 50927, 48334, 14618, 69519, 70417,\n 54005, 45690, 65123, 28406, 51289, 23022, 27991, 5778, 87395,\n 21372, 18851, 48393, 53463, 24065, 70286, 61923, 77835, 17787,\n 50298, 1583, 70871, 89891, 49709, 34392, 54735, 54647, 68850,\n 84909, 7373, 37061, 25249, 88711, 766, 23371, 51358, 38882,\n 32816, 80848, 29566, 79730, 68396, 53196, 82006, 62793, 38828,\n 32596, 45433, 66503, 84176, 12525, 88404, 43373, 86100, 76925,\n 19223, 68232, 14316, 59952, 22649, 70347, 9969, 72462, 35816,\n 52842, 69137, 14642, 45193, 21751, 48627, 35747, 88107, 34093,\n 49684, 89482, 43869, 40333, 84187, 37743, 49518, 59945, 13896,\n 51919, 4070, 68417, 83443, 68128, 60935, 33985, 59271, 91210,\n 32887, 91170, 7550, 24642, 35818, 14902, 1543, 62026, 8588,\n 21365, 11239, 52750, 21736, 1849, 67479, 50793, 58909, 11146,\n 17525, 56482, 3092, 89308, 58961, 90454, 56867, 67183, 49457,\n 28147, 91038, 12616, 78651, 58234, 71137, 21242, 48308, 39453,\n 19347, 33377, 41917, 72948, 72552, 76421, 12322, 37576, 8843,\n 18288, 57264, 43355, 9785, 57410, 71580, 17554, 65524, 13765,\n 7189, 62709, 91053, 43901, 6959, 72185, 35000, 36691, 1971,\n 38923, 52040, 44688, 39526, 36446, 61420, 65407, 86899, 86686,\n 27220, 16973, 14681, 26229, 56523, 24401, 53624, 43600, 69969,\n 89098, 27055, 10194, 36834, 4704, 51299, 72353, 31376, 75788,\n 7620, 26438, 37290, 26243, 1642, 79737, 16740, 27051, 18262,\n 3544, 37955, 16659, 44473, 53250, 86992, 66517, 19809, 82661,\n 50203, 77504, 70179, 51040, 59616, 31113, 70698, 64014, 33372,\n 58484, 84703, 6050, 18364, 59846, 84822, 57192, 48834, 67492,\n 16136, 45418, 91158, 30372, 48106, 87244, 12654, 6026, 24305,\n 79850, 17241, 85882, 18793, 49260, 62766, 60731, 46362, 44613,\n 59427, 90280, 20665, 14338, 2070, 80782, 21637, 33735, 42199,\n 50179, 9501, 81496, 77172, 13898, 47823, 42430, 78486, 6221,\n 26026, 11887, 62840, 14381, 70787, 55488, 49858, 86746, 1434,\n 58282, 37825, 64998, 25264, 23081, 4230, 2344, 77560, 50947,\n 9721, 39837, 24473, 65547, 27126, 8128, 90334, 2267, 51352,\n 3420, 66381, 54710, 26172, 55849, 70465, 25281, 32329, 8513,\n 44070, 30816, 21749, 18100, 49130, 23445, 53765, 16158, 71923,\n 33028, 70372, 37188, 60781, 9976, 39204, 37555, 30429, 86016,\n 18395, 24885, 78067, 17324, 64806, 43282, 46477, 62420, 46485,\n 79077, 70340, 611, 24858, 8779, 32250, 81849, 84668, 66874,\n 61668, 43819, 36005, 22481, 52330, 11643, 71789, 88982, 56941,\n 92001, 55844, 14872, 79454, 38623, 2224, 61903, 3602, 71402,\n 8669, 77230, 53560, 64109, 12801, 23578, 15317, 72906, 3721,\n 63310, 91715, 29239, 17179, 2582, 62589, 22371, 7897, 8685,\n 44092, 46896, 83777, 23552, 86728, 78536, 80932, 32634, 9892,\n 41261, 51495, 22530, 467, 7510, 1819, 16318, 37170, 22782,\n 21892, 13483, 36165, 83687, 19235, 21912, 22789, 27297, 61996,\n 2072, 8709, 91898, 38619, 22217, 51799, 4524, 36737, 40965,\n 90344, 46873, 12098, 8718, 53187, 67870, 111, 90746, 63351,\n 53969, 13624, 69616, 22510, 13055, 81655, 57563, 69500, 54498,\n 56633, 31482, 73411, 66938, 37041, 80512, 83033, 25597, 57324,\n 58332, 6826, 78089, 73540, 9469, 25174, 43667, 79400, 38993,\n 80301, 55172, 4916, 22313, 16980, 71904, 9816, 39648, 81546,\n 47970, 49249, 63735, 31002, 2904, 54606, 47114, 49346, 57001,\n 34891, 2630, 6618, 43454, 11650, 28658, 33823, 31228, 61006,\n 28716, 36905, 40099, 88344, 55292, 67203, 70311, 51199, 13636,\n 88584, 51368, 35618, 27942, 52157, 83283, 25603, 19997, 61332,\n 70966, 1176, 23575, 10103, 76806, 51144, 30887, 91703, 18864,\n 71041, 28640, 80101, 88254, 90370, 79538, 21271, 48037, 21961,\n 69706, 86793, 68409, 73279, 17687, 91191, 51375, 63441, 60469,\n 55920, 18934, 79090, 38478, 88481, 85615, 17248, 31683, 58821,\n 2161, 24868, 57266, 11928, 3660, 20401, 29652, 48141, 26013,\n 22719, 55624, 34568, 15946, 91122, 56701, 31905, 33829, 49937,\n 28753, 2387, 17021, 16997, 4755, 24226, 19553, 21129, 3886,\n 49598, 66186, 75459, 80491, 87908, 28164, 34577, 59097, 23126,\n 91290, 36644, 6562, 68215, 26761, 26537, 31016, 68459, 88723,\n 45659, 61964, 61636, 85919, 66099, 21303, 25053, 79665, 76046,\n 67559, 69582, 39903, 47422, 9392, 3679, 22356, 63036, 71695,\n 87544, 44543, 83196, 56519, 92031, 6854, 44577, 26862, 39383,\n 63421, 84204, 54765, 23564, 50681, 25476, 40637, 85417, 11114,\n 15225, 55123, 38168, 54303, 80308, 1019, 8002, 73968, 78850,\n 25122, 74552, 57095, 85086, 57736, 20754, 26601, 60889, 29861,\n 17954, 34127, 34245, 37084, 13502, 187, 57762, 3486, 10898,\n 57130, 48137, 52446, 58643, 60707, 11780, 84666, 30683, 19238,\n 88554, 87293, 77892, 17423, 16001, 9173, 57111, 77559, 83899,\n 3022, 59388, 18456, 33549, 41389, 56441, 21211, 79018, 88588,\n 64731, 85425, 17003, 70922, 12899, 59114, 86421, 39778, 82536,\n 89779, 29398, 24243, 47765, 90684, 33060, 53685, 86664, 87778,\n 2284, 73555, 81222, 84372, 48864, 86024, 56197, 55152, 63494,\n 28663, 65538, 26401, 25262, 74171, 42075, 788, 70138, 17173,\n 62648, 8817, 68671, 85359, 58088, 85764, 46044, 31638, 40103,\n 46652, 81722, 79933, 22875, 66511, 73284, 58206, 73975, 73413,\n 59154, 37354, 72953, 64297, 66886, 57074, 27522, 8542, 66662,\n 70131, 81771, 56884, 61503, 51115, 85063, 3095, 86490, 79541,\n 64071, 24559, 51167, 33680, 63085, 38430, 4514, 14546, 16684,\n 34313, 54450, 33711, 67683, 39855, 60503, 3475, 12051, 85374,\n 13000, 32618, 56065, 68871, 89336, 46521, 22478, 68153, 74254,\n 53911, 6974, 29115, 66409, 9604, 39689, 28627, 35113, 45850,\n 39840, 86697, 45105, 62436, 91649, 55119, 5708, 83256, 37050,\n 82565, 73805, 86667, 24073, 78553, 57366, 19457, 82949, 16551,\n 35849, 80160, 8085, 55370, 60350, 40571, 34962, 31567, 90501,\n 7934, 70914, 23612, 61384, 65322, 77524, 73634, 68939, 29006,\n 77747, 83589, 89016, 88031, 59745, 78002, 73533, 86921, 43794,\n 81307, 67169, 70613, 20697, 53244, 75699, 70647, 84956, 34617,\n 47320, 64827, 85440, 51911, 34681, 28367, 91647, 47400, 73326,\n 60620, 89142, 14122, 88146, 65991, 31135, 26673, 69655, 68271,\n 27912, 34650, 8038, 57933, 19617, 40507, 69467, 42497, 75410,\n 84186, 6161, 58820, 8949, 18140, 77899, 76957, 3379, 87780,\n 47596, 69713, 11612, 86609, 42515, 83024, 81697, 77919, 90814,\n 91500, 26880, 15762, 24882, 31992, 13377, 88466, 70676, 51965,\n 145, 49418, 17072, 64728, 65988, 5895, 13335, 17480, 89757,\n 28515, 76872, 89333, 56404, 78866, 39209, 42327, 32562, 45493,\n 56616, 47328, 53912, 28790, 71207, 47437, 15090, 18496, 28260,\n 18101, 20460, 65707, 73642, 7472, 47692, 36897, 5823, 29049,\n 12945, 88870, 14989, 11422, 81220, 45050, 27122, 54927, 9886,\n 35606, 86162, 58970, 33120, 38896, 76003, 17272, 54593, 72513,\n 7885, 21209, 9910, 63168, 67306, 47242, 50177, 80540, 31418,\n 82326, 58342, 14980, 41509, 38124, 77591, 25554, 31118, 13960,\n 72460, 46080, 1940, 47740, 69135, 11308, 82576, 11823, 26828,\n 29349, 22809, 32942, 62162, 22026, 59162, 7318, 16035, 4387,\n 56087, 48662, 33740, 74495, 76371, 29228, 70529, 64106, 11758,\n 73758, 57389, 67802, 83246, 11179, 71249, 19520, 88946, 15065,\n 39527, 1957, 38704, 22019, 86651, 9030, 83781, 54313, 27659,\n 60941, 78458, 34419, 62651, 73571, 35276, 76900, 45203, 89006,\n 51519, 41562, 42491, 54740, 56064, 42116, 21158, 79264, 85142,\n 28768, 10660, 69248, 70189, 88304, 15141, 14621, 67563, 31057,\n 52225, 5098, 2077, 32246, 43292, 7753, 82531, 49252, 34288,\n 68315, 22679, 54472, 68458, 21881, 14268, 54876, 82382, 46293,\n 15027, 47418, 23202, 8596, 71121, 1656, 15671, 45086, 36544,\n 72596, 81809, 39462, 39073, 10412, 66932, 58386, 278, 72775,\n 82347, 75901, 5783, 20183, 40790, 67047, 50544, 682, 80800,\n 57431, 39272, 50148, 48115, 59692, 1984, 37218, 9210, 21323,\n 84587, 58228, 13018, 74318, 49385, 47446, 70823, 55955, 85992,\n 56279, 31623, 12182, 82556, 10030, 20292, 14868, 39701, 77030,\n 6976, 32970, 31605, 48697, 79944, 29722, 24908, 7432, 64032,\n 15545, 12136, 34946, 34448, 48729, 34743, 10235, 86950, 59737,\n 18739, 74489, 64344, 41688, 20740, 37873, 21826, 12227, 57097,\n 32048, 33121, 44372, 17523, 49609, 1425, 90606, 70011, 67622,\n 23752, 91185, 66615, 44470, 61231, 70362, 38592, 7520, 84749,\n 52232, 64921, 2696, 75150, 87034, 8767, 39200, 73272, 19201,\n 56528, 9923, 19967, 17178, 45540, 12647, 43167, 34029, 64432,\n 18150, 71722, 42925, 45265, 10531, 71582, 11097, 42914, 79957,\n 31851, 27532, 59821, 20700, 73605, 1262, 40547, 1357, 87968,\n 5039, 8828, 78413, 36024, 58356, 51905, 51976, 43603, 37959,\n 9977, 48219, 36173, 35467, 62664, 63236, 63988, 40827, 56081,\n 77288, 60536, 42395, 25492, 77945, 14191, 38117, 66059, 6908,\n 87594, 50475, 57615, 75526, 72671, 50245, 89431, 16604, 42940,\n 41671, 63906, 56558, 55909, 85920, 11275, 81813, 61294, 42162,\n 42845, 43362, 24790, 28751, 28007, 21559, 49355, 24997, 89910,\n 63081, 80497, 27091, 77161, 39647, 43206, 45966, 74892, 51264,\n 49496, 73536, 46440, 11992, 27922, 12856, 21315, 60823, 18836,\n 85497, 19469, 92002, 8436, 21383, 89235, 41104, 55273, 78773,\n 50551, 57829, 9303, 48890, 80701, 14403, 52054, 53954, 29787,\n 77079, 11667, 49159, 64435, 74936, 63221, 23990, 31010, 23377,\n 69530, 33354, 7977, 80912, 83825, 86573, 26339, 55303, 81321,\n 90453, 13002, 8911, 77604, 76315, 12289, 47168, 52505, 17428,\n 15213, 50404, 21231, 64742, 58553, 84718, 68165, 84395, 88699,\n 46597, 87112, 51338, 14734, 55739, 60087, 8585, 34380, 28866,\n 27642, 82018, 54499, 79384, 79228, 51879, 42600, 54664, 20803,\n 30464, 5532, 43631, 37022, 21019, 44184, 13463, 58402, 53708,\n 71481, 33059, 64417, 73781, 3148, 10497, 79489, 56995, 26214,\n 51622, 37803, 74265, 34507, 32976, 44302, 62364, 25418, 39433,\n 54690, 25424, 69324, 36525, 91410, 74905, 84231, 40403, 38530,\n 6080, 2150, 76288, 33475, 20869, 71830, 39953, 81001, 85735,\n 6712, 55905, 55814, 66380, 8698, 34025, 18760, 31063, 86280,\n 47876, 78609, 25994, 11408, 50953, 29365, 40556, 88647, 25440,\n 77361, 62971, 31042, 15187, 63353, 64108, 10644, 2190, 52311,\n 90623, 81491, 21072, 29842, 45871, 47845, 28910, 79705, 88806,\n 112, 74955, 67489, 46692, 86267, 55820, 50322, 49266, 37818,\n 14093, 89261, 70899, 63727, 6379, 75264, 70155, 36091, 67782,\n 39818, 68577, 65167, 61592, 40447, 51351, 67061, 84171, 90792,\n 91669, 87514, 78007, 85287, 38275, 27188, 7960, 4966, 69307,\n 49120, 56377, 39365, 54158, 477, 71179, 67761, 89141, 9361,\n 59471, 36772, 22748, 35635, 48563, 18249, 88255, 10187, 72688,\n 73090, 77449, 69825, 9093, 91418, 36199, 34133, 52841, 62759,\n 82973, 70423, 42829, 51855, 77017, 2254, 41705, 15242, 80202,\n 87755, 85422, 28849, 71688, 69126, 79978, 62576, 51827, 43238,\n 70590, 88301, 64969, 71227, 33025, 46970, 32323, 60879, 73154,\n 32541, 89643, 61414, 73524, 38697, 44143, 73918, 30242, 61256,\n 37351, 60824, 25755, 7530, 47887, 73295, 16456, 18469, 15423,\n 80570, 2022, 60906, 4551, 67167, 8598, 79484, 45443, 74866,\n 28611, 45503, 34471, 53201, 53035, 31385, 28442, 33824, 3557,\n 71919, 87421, 45441, 18810, 70329, 71625, 67499, 64325, 41738,\n 9468, 8255, 29460, 87494, 85926, 25869, 49144, 91804, 34287,\n 5953, 56676, 47371, 20592, 75054, 45094, 54625, 46211, 88720,\n 37622, 17167, 438, 58717, 83567, 34700, 43351, 69024, 65790,\n 79105, 82207, 39570, 44799, 50845, 50807, 29475, 88731, 10822,\n 25005, 87882, 45603, 11632, 57598, 10576, 67620, 32301, 22076,\n 51234, 87874, 59473, 59531, 54252, 8583, 34602, 55724, 14057,\n 84621, 22906, 17463, 2119, 88198, 32574, 17277, 81576, 86725,\n 68864, 72182, 34177, 42457, 89852, 43960, 26140, 83844, 69877,\n 803, 47352, 17225, 78958, 13560, 56125, 50301, 76774, 59954,\n 11183, 61263, 20507, 88445, 86673, 51304, 48415, 33290, 20387,\n 86426, 42208, 55274, 74455, 84959, 76592, 71942, 19702, 18983,\n 18037, 46356, 70137, 85907, 62116, 25622, 17761, 26038, 44412,\n 6808, 46000, 86914, 21687, 76502, 12331, 32194, 88058, 38726,\n 7790, 72595, 80521, 80951, 45079, 64970, 82945, 52159, 76440,\n 30235, 77623, 18861, 41472, 81200, 77197, 37475, 19490, 60835,\n 27971, 31162, 78407, 24207, 65897, 47826, 31742, 48946, 67594,\n 90833, 58558, 45836, 52731, 1088, 82696, 87948, 63071, 58603,\n 71957, 1738, 33852, 66191, 39745, 6546, 37613, 12550, 21965,\n 59971, 22431, 13188, 7089, 66235, 71837, 27675, 35776, 40920,\n 40240, 58799, 67436, 52717, 91421, 67484, 84904, 81357, 43300,\n 9010, 21929, 69125, 35228, 83226, 34907, 87017, 75673, 10911,\n 91967, 72155, 28344, 45162, 73737, 60545, 18562, 29631, 38230,\n 89802, 42561, 3296, 74097, 19739, 27218, 28460, 26505, 79637,\n 32673, 6119, 75134, 1148, 62004, 73177, 91791, 74539, 21415,\n 36028, 8502, 51281, 66587, 74045, 82695, 51014, 54105, 62696,\n 36041, 40276, 48527, 62556, 38968, 34760, 51150, 2583, 52734,\n 68637, 53479, 64604, 16332, 39980, 22332, 22616, 4219, 1307,\n 70784, 59290, 42471, 20158, 83310, 20661, 71544, 61390, 59409,\n 88826, 42692, 12339, 73588, 77905, 961, 1822, 33029, 43420,\n 56683, 64739, 38165, 60513, 5667, 45231, 2780, 43232, 84364,\n 42686, 67784, 53534, 54830, 23859, 15463, 78712, 721, 44032,\n 66080, 63802, 46532, 3505, 11808, 79707, 14181, 36799, 90247,\n 69527, 89931, 24535, 53675, 81063, 46938, 63149, 76662, 38251,\n 41317, 57934, 7065, 39256, 75188, 73367, 5621, 12430, 17706,\n 68249, 84326, 26597, 71182, 75816, 30941, 88316, 63977, 60901,\n 74057, 66385, 59670, 43949, 77508, 35381, 56610, 63254, 9164,\n 26226, 8228, 59591, 1354, 60377, 31608, 48331, 80044, 38656,\n 71655, 43689, 62469, 61854, 37680, 35270, 71989, 70382, 74288,\n 41585, 65173, 78531, 39933, 61919, 27113, 19566, 44132, 66881,\n 8409, 35242, 24317, 79294, 47774, 11867, 91648, 120, 35266,\n 19404, 36976, 62241, 70807, 52984, 69965, 8005, 35918, 24091,\n 51509, 76178, 52564, 60302, 24486, 10553, 27856, 65555, 1097,\n 51230, 570, 81102, 35249, 90488, 60264, 43020, 32208, 57505,\n 75983, 87407, 65519, 32415, 39747, 48666, 77470, 43801, 11002,\n 13025, 28675, 17643, 79863, 10038, 69758, 14292, 17031, 22823,\n 50532, 55566, 85912, 61022, 38315, 128, 30193, 25562, 7481,\n 90614, 52072, 30301, 77176, 88455, 88065, 88817, 75369, 47356,\n 2296, 71568, 34065, 84659, 60728, 1943, 48139, 36907, 51070,\n 16277, 15868, 86511, 39716, 42371, 70044, 54932, 61828, 77020,\n 15138, 54959, 78267, 87145, 34183, 35741, 89278, 31184, 62168,\n 24385, 51006, 68581, 57765, 4258, 12946, 65611, 12272, 38796,\n 79149, 55376, 36089, 30191, 47177, 43955, 89262, 37479, 86212,\n 18909, 86953, 66515, 43105, 54323, 8026, 91600, 83351, 31515,\n 61921, 68533, 46649, 70251, 75368, 77788, 18282, 8379, 82674,\n 43203, 14098, 55769, 90420, 73838, 51098, 59499, 12373, 87168,\n 75011, 46036, 4979, 23284, 76073, 10, 13910, 80008, 20598,\n 29664, 7236, 16877, 43038, 75293, 6811, 14761, 55631, 53056,\n 15994, 29831, 89323, 51201, 26690, 14910, 48791, 8913, 91661,\n 9081, 20633, 7570, 15028, 78228, 77158, 50523, 39668, 25980,\n 15962, 27096, 11355, 75229, 43874, 73260, 56100, 40176, 19633,\n 38029, 44458, 85481, 28327, 229, 12327, 39354, 60880, 11457,\n 23090, 10009, 45161, 1410, 57246, 70561, 66288, 15928, 63196,\n 977, 11469, 60340, 56476, 24567, 504, 59590, 30791, 10662,\n 52954, 63258, 11039, 40974, 61794, 60711, 84976, 2669, 11994,\n 41519, 18316, 17752, 78806, 39469, 62721, 1203, 88307, 19117,\n 83845, 32690, 57757, 54485, 20514, 15098, 1515, 3050, 34161,\n 44761, 19954, 12168, 5733, 32871, 40486, 23228, 46868, 43424,\n 31542, 2544, 84318, 8489, 18245, 80232, 76997, 16170, 81808,\n 42809, 64333, 2925, 36382, 62151, 12451, 87423, 15493, 10971,\n 71421, 65024, 26968, 54148, 5606, 10100, 1564, 53155, 81789,\n 65035, 1829, 71318, 1647, 83846, 89825, 38834, 64503, 4905,\n 30552, 59664, 20522, 73587, 85464, 32264, 51316, 53618, 46634,\n 55188, 15422, 61530, 56166, 13082, 3664, 70437, 16235, 77144,\n 26475, 58998, 17086, 40913, 12073, 57511, 297, 24042, 50202,\n 32008, 89536, 91070, 10480, 751, 55836, 85070, 54216, 55130,\n 74122, 16488, 57187, 55018, 41313, 716, 21662, 37222, 53310,\n 63195, 51682, 10589, 57476, 22961, 64370, 40329, 36070, 13630,\n 36225, 39047, 9484, 83665, 29563, 29106, 5894, 14558, 73879,\n 60910, 46963, 60353, 88244, 34534, 29543, 19118, 42963, 24141,\n 81798, 76107, 90043, 21249, 44129, 16656, 21654, 25001, 15639,\n 68525, 25733, 61618, 25609, 80238, 19569, 56794, 34159, 82066,\n 89067, 85885, 18740, 52776, 5547, 86042, 67291, 29372, 38962,\n 28200, 17986, 57693, 44977, 91383, 40975, 43366, 68190, 75610,\n 85923, 31717, 2837, 52181, 69191, 66462, 20855, 75120, 62461,\n 59860, 37291, 18757, 52096, 84065, 15306, 68509, 1575, 5897,\n 41015, 44744, 37159, 86290, 5092, 61210, 16629, 12188, 8993,\n 3896, 19890, 57143, 53136, 12486, 87707, 65488, 69955, 76246,\n 2307, 15669, 18857, 47071, 83039, 54408, 6687, 50902, 11377,\n 69676, 42180, 66318, 70839, 44368, 46936, 25454, 57239, 74916,\n 14776, 65171, 23695, 42032, 6647, 33905, 45880, 22848, 56160,\n 51895, 44304, 59398, 85520, 32677, 44516, 54801, 87814, 15486,\n 34202, 49208, 8548, 14379, 31144, 90313, 62812, 9473, 16191,\n 47019, 7702, 12799, 26000, 90309, 77261, 22625, 6025, 57330,\n 42338, 50633, 82751, 91219, 41565, 5489, 48167, 58027, 82201,\n 12491, 24664, 64603, 8060, 27620, 18435, 3028, 36023, 91748,\n 79979, 63471, 48999, 4173, 3281, 79613, 71262, 50362, 16574,\n 31138, 91691, 54329, 86621, 31174, 60424, 62273, 55082, 20403,\n 39448, 30362, 9858, 42594, 46243, 36495, 74044, 23193, 14293,\n 55866, 55118, 57559, 5800, 61383, 30024, 25490, 24546, 81653,\n 80788, 59107, 10946, 77194, 54677, 77227, 6831, 1473, 53888,\n 55036, 75824, 36257, 43951, 57500, 70259, 34355, 86895, 2676,\n 32399, 28934, 85749, 72110, 81401, 32028, 51866, 46644, 75222,\n 86715, 50575, 53874, 4553, 60677, 86823, 11716, 66153, 73273,\n 85542, 53492, 68678, 25224, 59362, 80368, 38220, 83744, 9566,\n 12634, 69846, 31250, 26805, 22483, 43634, 67894, 41894, 55779,\n 56130, 52335, 17088, 79363, 25120, 65727, 85472, 24571, 14352,\n 23791, 19390, 76022, 69870, 20101, 54602, 45116, 25545, 73953,\n 50587, 6040, 68104, 84633, 51916, 55214, 64232, 62134, 68933,\n 26846, 58317, 73269, 5036, 43369, 52820, 91304, 21381, 62692,\n 2341, 31300, 51892, 91979, 21764, 4407, 80549, 10147, 41727,\n 69209, 25194, 86606, 64491, 89151, 32934, 67193, 26912, 47931,\n 69602, 27568, 19067, 28180, 18247, 22555, 13311, 12376, 53863,\n 64440, 91189, 67981, 67964, 89653, 49610, 65270, 8675, 34099,\n 21229, 44346, 22251, 17539, 53075, 14411, 70220, 73309, 26668,\n 44066, 80927, 31061, 89177, 65731, 26084, 41515, 33661, 58057,\n 65601, 63949, 27211, 88126, 51091, 71336, 58270, 81987, 58677,\n 40335, 77753, 70334, 16756, 17044, 12024, 21461, 57157, 32804,\n 54074, 87426, 26715, 86197, 46469, 84211, 82800, 88860, 53994,\n 53860, 83418, 58462, 47337, 7443, 20889, 31233, 41717, 19228,\n 64848, 14270, 47776, 45168, 81675, 87256, 31599, 14788, 52321,\n 45716, 41652, 85782, 14917, 83155, 85307, 74750, 26956, 56982,\n 56477, 29044, 35954, 32002, 44508, 5363, 64919, 21635, 15746,\n 57709, 16580, 81589, 43392, 74120, 84216, 43733, 81574, 37083,\n 84032, 1165, 33687, 42141, 41643, 9836, 33417, 73328, 42598,\n 1407, 67700, 35133, 83659, 60390, 65452, 30299, 76207, 48444,\n 14395, 18716, 36512, 75921, 35587, 32772, 76799, 83074, 14427,\n 13468, 44171, 4547, 11902, 67909, 20380, 54437, 48694, 47027,\n 27886, 90396, 6291, 27832, 79411, 79412, 44611, 44198, 68558,\n 45268, 73505, 77874, 46910, 87320, 81979, 75364, 20135, 44137,\n 34394, 63489, 48277, 52743, 2617, 5957, 81515, 50313, 15851,\n 72263, 30168, 64601, 39251, 80367, 39283, 84404, 56144, 27203,\n 55789, 23313, 12447, 65225, 53016, 38123, 21758, 1747, 7683,\n 3024, 37077, 22659, 58858, 37894, 14242, 42687, 14620, 63998,\n 87815, 66471, 19350, 18514, 1900, 46645, 27930, 26437, 14901,\n 4042, 42411, 37404, 61314, 69116, 10748, 39437, 67255, 59813,\n 74003, 10395, 77878, 84717, 87067, 20897, 1513, 46099, 7125,\n 62288, 49927, 4967, 64991, 67006, 91039, 82928, 73541, 74316,\n 1341, 55689, 82464, 62771, 60009, 87769, 29068, 22069, 90389,\n 90382, 49369, 39790, 91965, 34, 70263, 8925, 64458, 17138,\n 90835, 43453, 26768, 86061, 48891, 51523, 74261, 69309, 5421,\n 1139, 4925, 29564, 82272, 541, 22639, 629, 31528, 16531,\n 75155, 89445, 52810, 52815, 12399, 45590, 61211, 53480, 88149,\n 20067, 53469, 40841, 64720, 86277, 9875, 41082, 50094, 78185,\n 87069, 31668, 37819, 66180, 11282, 18922, 39832, 81830, 41148,\n 20071, 75982, 13427, 75501, 75265, 23694, 70352, 86654, 19735,\n 68801, 47782, 90517, 62099, 9465, 38974, 26837, 60238, 40774,\n 50020, 44854, 33318, 41626, 48746, 45336, 14238, 51701, 75106,\n 22121, 61049, 69368, 30476, 31926, 37762, 22569, 62452, 80709,\n 35404, 6613, 10539, 60015, 66734, 5583, 2532, 38048, 33266,\n 20157, 8966, 91712, 43820, 31484, 48209, 48671, 32468, 29214,\n 53996, 29391, 8661, 34845, 47509, 31227, 33942, 15541, 77987,\n 59915, 65522, 33254, 61802, 87628, 41697, 4469, 17963, 754,\n 26749, 88059, 63481, 12549, 26366, 16618, 79741, 46422, 15232,\n 71603, 20374, 42874, 28940, 9441, 14739, 66712, 48914, 47048,\n 84608, 37395, 4738, 4542, 1129, 33811, 60937, 15327, 39518,\n 15465, 52301, 71000, 89933, 81950, 12312, 83142, 45693, 25389,\n 50280, 39726, 21684, 78279, 8557, 57081, 90468, 80988, 13685,\n 82911, 20232, 51260, 74735, 31634, 64487, 4147, 13977, 84767,\n 32904, 22049, 75740, 36206, 3509, 68448, 60678, 37707, 48036,\n 76894, 16547, 22862, 33607, 57110, 82919, 39136, 86547, 30239,\n 14731, 68341, 29960, 68351, 82089, 63808, 48260, 76451, 31486,\n 91405, 36721, 17109, 50983, 72593, 3219, 58338, 27333, 45832,\n 3300, 28475, 84149, 85594, 41205, 71706, 38979, 24415, 1178,\n 42591, 19550, 18452, 25894, 73744, 15633, 37757, 53904, 31602,\n 656, 15194, 34405, 70998, 27803, 29128, 15168, 36346, 78194,\n 48565, 49298, 38455, 40494, 24769, 47298, 63569, 40132, 20947,\n 47867, 41588, 59254, 88399, 9277, 33880, 106, 49560, 38184,\n 39827, 81871, 17142, 67670, 14046, 66582, 87864, 78911, 25905,\n 67285, 33465, 52245, 85200, 70335, 56763, 15513, 60109, 33428,\n 61016, 83888, 43053, 5849, 77530, 67713, 88725, 28979, 37979,\n 53315, 58684, 55438, 20761, 48897, 87304, 8065, 41935, 37880,\n 28522, 41841, 53836, 47873, 52329, 44549, 8180, 65149, 39638,\n 10916, 26159, 25520, 17713, 54868, 39089, 61765, 19852, 51320,\n 63090, 38836, 69083, 91915, 77808, 76204, 73631, 1545, 11483,\n 10793, 48878, 29686, 91639, 59360, 47082, 17300, 29018, 21447,\n 27415, 52299, 15989, 68032, 25237, 44893, 2950, 19462, 62176,\n 31485, 75152, 84467, 50283, 46360, 90004, 83817, 22642, 90188,\n 42043, 17380, 76585, 88105, 31059, 37526, 18053, 68246, 60478,\n 25378, 3819, 51036, 35538, 68156, 47777, 38855, 49689, 21291,\n 72651, 24947, 1508, 18166, 9147, 86151, 38386, 46654, 39564,\n 71950, 82657, 25167, 90854, 37081, 57641, 48520, 81033, 14291,\n 31649, 51834, 53011, 25410, 75906, 26516, 21785, 90161, 22713,\n 28532, 22230, 29380, 82140, 13232, 88659, 57277, 86142, 27879,\n 36252, 30779, 57181, 82151, 42880, 68179, 8192, 27810, 14402,\n 71437, 32969, 26068, 22032, 28634, 89724, 44842, 50955, 70677,\n 82630, 19407, 2053, 13937, 55613, 64421, 22914, 69913, 91524,\n 49742, 23347, 62908, 13802, 25505, 66727, 41144, 91442, 17536,\n 15398, 6405, 81359, 56110, 43325, 8520, 4095, 52620, 12449,\n 77022, 58320, 5275, 42418, 35927, 51838, 33479, 34811, 65869,\n 28826, 87823, 50461, 18650, 58145, 72883, 32031, 68400, 68049,\n 80412, 48641, 22947, 44518, 7731, 52489, 85184, 59253, 60862,\n 22391, 18276, 22674, 72592, 77317, 57812, 53813, 29200, 6211,\n 21450, 14595, 90301, 91644, 14443, 20185, 39350, 3561, 6509,\n 56530, 61917, 75874, 2275, 76701, 65289, 89116, 89173, 25038,\n 61480, 29538, 22856, 83504, 55648, 81934, 37916, 74816, 32345,\n 62169, 84775, 24479, 980, 60980, 89568, 74072, 77601, 66541,\n 72031, 59653, 61667, 54013, 46498, 4271, 9438, 1325, 72401,\n 45098, 59483, 13971, 78594, 68406, 52646, 7173, 19296, 73311,\n 54897, 13883, 77032, 6856, 74266, 85170, 28175, 67116, 17740,\n 42676, 2947, 62014, 30184, 44916, 30170, 56999, 1649, 41370,\n 85601, 90117, 33996, 80096, 34941, 60763, 83200, 20490, 39045,\n 45136, 89137, 76528, 84423, 68144, 22450, 39642, 80729, 1500,\n 48706, 80607, 71079, 14340, 53935, 41945, 86385, 17209, 61890,\n 25908, 5486, 8433, 74405, 13832, 61527, 31425, 38991, 32488,\n 48349, 49148, 66916, 9675, 62115, 23660, 76150, 72717, 80059,\n 61927, 64462, 25631, 56398, 81533, 47962, 60073, 39444, 86821,\n 88793, 25462, 12723, 46943, 89047, 80522, 35387, 82899, 11134,\n 262, 31706, 13208, 45277, 9702, 46976, 40148, 5552, 75546,\n 89126, 49805, 46234, 51468, 23007, 87056, 12483, 87114, 46150,\n 55855, 92053, 60630, 33398, 59137, 43115, 5903, 1074, 57186,\n 17906, 10052, 89220, 75675, 36914, 369, 64245, 31921, 76529,\n 85146, 26456, 59964, 2138, 77178, 39250, 40354, 33791, 83198,\n 91619, 55272, 66470, 28377, 48708, 40234, 22369, 34255, 84437,\n 28862, 6900, 84849, 29878, 33187, 27008, 83354, 87937, 79398,\n 45425, 58676, 77884, 70612, 1200, 9326, 49967, 81194, 31015,\n 31794, 51633, 28188, 68703, 8440, 1403, 67078, 49739, 47910,\n 18699, 24213, 55110, 91348, 7506, 12079, 64335, 35892, 51876,\n 77704, 81664, 61859, 72267, 29863, 1898, 27099, 55210, 1800,\n 55695, 72308, 38987, 88395, 88406, 33525, 9337, 71024, 21325,\n 22320, 24056, 1622, 8411, 19856, 82360, 25500, 65441, 12129,\n 54352, 29525, 91193, 79554, 91487, 74588, 35413, 12124, 88780,\n 32626, 72740, 4098, 39541, 30057, 67892, 59061, 1052, 30437,\n 73984, 63110, 42172, 84339, 67618, 1368, 39826, 78663, 89238,\n 66159, 22015, 44571, 62375, 45046, 27563, 10490, 85248, 41035,\n 38260, 53981, 64027, 27046, 73104, 18367, 46299, 54954, 76851,\n 17256, 85475, 60871, 28287, 68692, 83574, 83655, 26160, 4168,\n 74851, 88910, 6022, 47897, 69509, 44098, 84571, 78459, 35377,\n 39712, 42854, 15400, 7445, 70253, 70873, 78186, 23131, 29170,\n 618, 14011, 29928, 42899, 34596, 68664, 10924, 88302, 85938,\n 66444, 79608, 83063, 46698, 10973, 33586, 25096, 14282, 76247,\n 23988, 70342, 88627, 29235, 57380, 2031, 66382, 1486, 85211,\n 16662, 43859, 27725, 14922, 41431, 80542, 41616, 56889, 61207,\n 19362, 82965, 4968, 25322, 23417, 89666, 62019, 39164, 37518,\n 63704, 45497, 34007, 22016, 72530, 77903, 10606, 7124, 50851,\n 71008, 60680, 78404, 11910, 87024, 55002, 34062, 17081, 12055,\n 57126, 8521, 87072, 51928, 81641, 8062, 13258, 41311, 23333,\n 76457, 44846, 5712, 5045, 69168, 5753, 23662, 16016, 38255,\n 70445, 83289, 84812, 70550, 84876, 46683, 31196, 71791, 26075,\n 8724, 49217, 72878, 58335, 6178, 84595, 68496, 2830, 52277,\n 58191, 1573, 22527, 1437, 56791, 89773, 43389, 31639, 20924,\n 53791, 87227, 37511, 82114, 6663, 39071, 43686, 29694, 48061,\n 28879, 4774, 71720, 45392, 16096, 68088, 11631, 90166, 23300,\n 14762, 47791, 56955, 27885, 79539, 25862, 41929, 18719, 58011,\n 20242, 78429, 74100, 25936, 48721, 77886, 12792, 15516, 47942,\n 28880, 61755, 17916, 50612, 77131, 83017, 60085, 27956, 21038,\n 32619, 22020, 76791, 71467, 27753, 7134, 63972, 62590, 14756,\n 59046, 18574, 31518, 80107, 70230, 23471, 76978, 76024, 91257,\n 31252, 91488, 5757, 35645, 24763, 55774, 74089, 7268, 72464,\n 80415, 53561, 71883, 90347, 49510, 16768, 37577, 50745, 1237,\n 53481, 79427, 70657, 38958, 46336, 19968, 71654, 44827, 16811,\n 26184, 83430, 1559, 1674, 25475, 83893, 9250, 31419, 75018,\n 57461, 22079, 25605, 72836, 56066, 75247, 40498, 70099, 51946,\n 55271, 56560, 12846, 3181, 2428, 20453, 9056, 78619, 69977,\n 20224, 27144, 73889, 59705, 16747, 37584, 74253, 2749, 44353,\n 64085, 83237, 41822, 4405, 14322, 36254, 70790, 61408, 91779,\n 1755, 59544, 72668, 88106, 78066, 81856, 57978, 10764, 23597,\n 39065, 39375, 60371, 78696, 44309, 17317, 38904, 68916, 12409,\n 67617, 45527, 22189, 3977, 75758, 82304, 51379, 90727, 54862,\n 90083, 65936, 29791, 11319, 71836, 79223, 56501, 27920, 17994,\n 40445, 40763, 45617, 19669, 4033, 2956, 78703, 7419, 70477,\n 86369, 2942, 2839, 72372, 81683, 5723, 28899, 20991, 44993,\n 52430, 12293, 52628, 44110, 11835, 29843, 25576, 77568, 25420,\n 19658, 68198, 7572, 86884, 70639, 68063, 86652, 61040, 35296,\n 62119, 38470, 33296, 7263, 88340, 29130, 66317, 9754, 72683,\n 70268, 87556, 89271, 77090, 57710, 80056, 45439, 76820, 9280,\n 30153, 19661, 26036, 54687, 25426, 14222, 10918, 62926, 58133,\n 31183, 83235, 34658, 50574, 13050, 90591, 82322, 41218, 80047,\n 72422, 89737, 85022, 121, 20913, 7302, 21202, 25064, 62181,\n 21576, 35082, 71538, 86674, 88762, 29246, 73023, 67529, 27883,\n 12174, 50782, 55449, 30471, 90937, 34427, 46637, 10367, 22756,\n 45981, 52332, 79285, 63319, 21632, 39156, 40987, 54262, 8418,\n 19218, 28337, 74819, 50838, 31675, 70684, 10421, 39775, 29238,\n 73750, 65490, 19976, 3266, 67786, 50606, 14454, 75520, 69069,\n 11533, 36055, 20064, 11900, 64224, 62346, 23630, 44127, 87783,\n 58193, 67976, 39704, 24339, 10631, 58755, 48899, 275, 75635,\n 16073, 33537, 32149, 76429, 90781, 52061, 4941, 49697, 23685,\n 47708, 13823, 84232, 56411, 53550, 30342, 83635, 2876, 46382,\n 89575, 66076, 85151, 1954, 62837, 73291, 85400, 90672, 54619,\n 52467, 89624, 84035, 29232, 1662, 60592, 85290, 74067, 14218,\n 51209, 25601, 42694, 3160, 33181, 40690, 23254, 62970, 70080,\n 30847, 20736, 36763, 59310, 84290, 54993, 6440, 90514, 60368,\n 63954, 42508, 7267, 44452, 10045, 58400, 46113, 46653, 13253,\n 47755, 84863, 60120, 37859, 29527, 84544, 11646, 54023, 61114,\n 16007, 77211, 61354, 35525, 75427, 23830, 30906, 23936, 53915,\n 42581, 8522, 54512, 5938, 52541, 48026, 8160, 84109, 88353,\n 58064, 17190, 5896, 16987, 54666, 51341, 2977, 33583, 59049,\n 39505, 68813, 76626, 71381, 51454, 65800, 34164, 1820, 49421,\n 60533, 24118, 45003, 33383, 55476, 77242, 55592, 68228, 31889,\n 62003, 22043, 11771, 22306, 21029, 63520, 55828, 35735, 91335,\n 43323, 56421, 11620, 80714, 47122, 44946, 18487, 65629, 52058,\n 51444, 21342, 49204, 53402, 26409, 75204, 85824, 53358, 51675,\n 79772, 34873, 71665, 33312, 42696, 43130, 42276, 42268, 37925,\n 58828, 70792, 6316, 41287, 46916, 1566, 43122, 18271, 18112,\n 12218, 66973, 43527, 79984, 8826, 67485, 84970, 47157, 4438,\n 27047, 22037, 25286, 14265, 48555, 35899, 91263, 17384, 57262,\n 91603, 6218, 85878, 61103, 39923, 70906, 59599, 79108, 11596,\n 32467, 52694, 92046, 32480, 66931, 1404, 7324, 86775, 46444,\n 1439, 8263, 53597, 79670, 81517, 54229, 72224, 38667, 85,\n 53869, 52037, 62172, 807, 28898, 77616, 45459, 21292, 29498,\n 74030, 42280, 84711, 65527, 24193, 54734, 31358, 54992, 60031,\n 26359, 15979, 46606, 48854, 88129, 8230, 47011, 16361, 60816,\n 24443, 70051, 70350, 44074, 57379, 21108, 7583, 1654, 66922,\n 53606, 78422, 7686, 79727, 69448, 48301, 59198, 45279, 56000,\n 51155, 19806, 65694, 16037, 88066, 3880, 79852, 75403, 58366,\n 10079, 48970, 58750, 49945, 69362, 55914, 9251, 40157, 54113,\n 79543, 83362, 87352, 20358, 73364, 53026, 75886, 6579, 64967,\n 35323, 8785, 36733, 66247, 17326, 33373, 86780, 68867, 49555,\n 19778, 41412, 29029, 42786, 26346, 24791, 52388, 24439, 35246,\n 5988, 71228, 28227, 42287, 81742, 50819, 11054, 64077, 11294,\n 142, 37215, 38358, 84948, 62207, 46969, 87408, 90855, 90223,\n 16567, 21265, 50661, 21016, 66639, 57206, 23849, 60810, 28638,\n 77682, 23620, 50007, 83181, 82450, 59261, 91951, 8420, 14915,\n 3371, 74494, 82644, 80699, 55587, 76043, 87906, 24613, 56568,\n 80180, 34256, 16071, 7212, 8486, 61942, 84400, 78918, 45838,\n 69450, 57195, 20848, 66726, 35643, 65512, 68225, 6671, 38082,\n 85830, 74328, 16126, 22507, 72880, 47107, 15115, 66018, 84766,\n 23866, 82704, 48674, 72142, 65189, 30916, 73645, 15626, 31045,\n 89938, 31345, 45535, 59725, 58528, 1997, 10273, 20631, 83143,\n 90218, 69737, 60137, 57912, 76192, 22264, 50334, 28732, 4673,\n 11923, 53617, 9179, 57415, 82444, 42262, 30452, 26118, 25404,\n 1010, 15140, 28649, 45811, 44986, 58401, 13745, 59000, 85395,\n 85588, 34520, 18643, 85327, 22760, 41318, 82, 36642, 29433,\n 88335, 84184, 84599, 78608, 61899, 29253, 66538, 51405, 38218,\n 89798, 31030, 44432, 88805, 719, 83621, 1405, 24966, 46714,\n 57860, 26077, 27207, 55670, 91465, 48575, 5519, 26996, 53694,\n 12117, 6507, 25793, 11769, 5187, 21731, 55977, 10257, 18357,\n 65641, 74158, 45511, 44654, 26074, 33974, 8472, 78530, 43135,\n 57740, 73717, 64155, 52390, 23751, 57273, 80546, 38565, 73079,\n 18451, 59250, 27148, 19461, 48552, 54385, 38307, 20744, 77859,\n 30483, 84964, 52769, 68325, 73351, 5918, 11577, 83573, 68779,\n 29458, 29138, 34768, 63152, 7747, 9759, 82439, 24147, 16657,\n 34185, 78793, 912, 82703, 58164, 5861, 770, 57851, 79498,\n 87020, 18795, 76159, 6776, 57987, 11718, 802, 39925, 78949,\n 69061, 40473, 10039, 78771, 79190, 11383, 15502, 16881, 38856,\n 5690, 50607, 75562, 89618, 80164, 59974, 52621, 52379, 35102,\n 46197, 68160, 19834, 24321, 90205, 8526, 27382, 65067, 68847,\n 7477, 50445, 18752, 87302, 68326, 87383, 14712, 4557, 90564,\n 86615, 19294, 60142, 12390, 48717, 67861, 26350, 21010, 52027,\n 80227, 67904, 74082, 3598, 2713, 70090, 46206, 56546, 55059,\n 59068, 20851, 18259, 80132, 67856, 12249, 19574, 68280, 7566,\n 22131, 75812, 17018, 48801, 4687, 77676, 84532, 34346, 706,\n 81387, 89299, 57193, 18113, 17655, 77581, 22152, 76518, 11071,\n 18372, 48422, 35638, 22357, 57180, 43731, 46660, 1722, 86428,\n 58180, 73652, 59390, 53218, 82559, 38356, 53454, 50394, 10847,\n 79790, 49901, 55159, 66067, 16459, 83405, 6909, 11953, 76550,\n 37814, 58611, 31509, 71192, 82529, 63694, 70631, 61410, 48462,\n 91402, 46179, 29354, 5743, 50184, 80598, 11864, 42443, 46497,\n 2017, 87686, 34766, 25770, 86668, 52405, 27572, 77461, 63501,\n 54363, 3601, 20554, 53293, 57398, 18848, 37460, 6176, 33674,\n 39361, 38156, 60311, 12674, 72670, 75336, 21639, 22125, 73360,\n 53858, 29810, 67162, 68834, 349, 19646, 4975, 29692, 48553,\n 54411, 60468, 74474, 27078, 68788, 52257, 36052, 19742, 46869,\n 43230, 24189, 29731, 27607, 11504, 907, 57515, 88390, 31981,\n 17305, 31047, 71174, 82590, 45117, 74262, 43848, 15268, 4955,\n 14467, 30541, 40355, 73234, 5226, 40588, 47515, 63292, 80646,\n 61844, 1764, 76164, 294, 90936, 29377, 42904, 12975, 50993,\n 91961, 56033, 22101, 90228, 40659, 85239, 12599, 29908, 29547,\n 30580, 82868, 54549, 42358, 4088, 73065, 27673, 44349, 76567,\n 69054, 43736, 16302, 61887, 43630, 11684, 42707, 17660, 27288,\n 90028, 44208, 61416, 30873, 64725, 15854, 83704, 51475, 27360,\n 24087, 29378, 64214, 18024, 43278, 10996, 82340, 66008, 34957,\n 38237, 22632, 71395, 89785, 47569, 27487, 86416, 46523, 33894,\n 64089, 57690, 31492, 19845, 77854, 36385, 50876, 90221, 71441,\n 76106, 35515, 62453, 40059, 60538, 53170, 89701, 11283, 80126,\n 20286, 68931, 58950, 52669, 83523, 55956, 86208, 8580, 30438,\n 81869, 66157, 52468, 41324, 5991, 6196, 61536, 36251, 29270,\n 71932, 653, 23686, 45381, 27287, 56483, 2670, 71976, 62729,\n 87451, 67293, 55106, 78601, 52668, 11458, 80240, 87992, 77189,\n 33709, 30107, 32476, 75024, 42742, 1950, 9797, 3454, 83211,\n 65321, 66649, 13128, 14666, 7466, 47260, 56500, 21675, 3431,\n 36094, 19152, 78454, 34639, 27577, 31961, 55220, 32455, 77501,\n 17712, 74210, 87728, 79615, 86318, 84269, 51863, 90203, 82898,\n 55733, 8242, 26309, 11034, 19753, 825, 53812, 18921, 169,\n 52323, 27180, 57595, 86329, 38878, 78024, 68317, 74550, 35872,\n 17908, 77308, 87798, 43543, 45648, 22317, 9964, 49656, 18094,\n 52394, 65793, 21638, 66141, 89146, 67991, 6943, 70881, 11978,\n 77462, 57865, 5238, 70586, 78880, 43835, 47588, 79483, 28060,\n 44618, 15616, 56032, 51330, 17863, 80395, 91633, 69843, 61418,\n 12630, 56598, 87276, 82971, 81726, 12999, 79761, 50479, 66619,\n 77073, 27858, 57701, 65363, 66663, 39812, 73159, 23834, 51948,\n 8743, 32559, 44364, 65091, 91942, 81417, 244, 22787, 29405,\n 42601, 18648, 77977, 33571, 58669, 57308, 90268, 35524, 60845,\n 22565, 44390, 73358, 19586, 84148, 18685, 52073, 55038, 17253,\n 89902, 85273, 44531, 29347, 54795]), 'R': array([15430, 41788, 15210, 28209, 32399, 20542, 34205, 20130, 52288,\n 23842, 32686, 21320, 51686, 21357, 16788, 32231, 12992, 38571,\n 46952, 10909, 23811, 50748, 48429, 37859, 28393, 24853, 37605,\n 1408, 30946, 17343, 41782, 24617, 41677, 8927, 4868, 48678,\n 956, 31718, 15959, 31563, 27541, 51219, 16523, 30273, 4051,\n 16502, 29173, 5688, 51615, 49664, 30469, 3462, 36515, 26554,\n 19512, 27174, 24019, 18656, 2482, 3215, 217, 2316, 32021,\n 22837, 681, 50940, 27486, 15016, 28062, 15265, 4787, 36456,\n 43334, 12510, 51700, 37039, 45691, 35837, 1322, 26514, 38214,\n 34006, 7057, 33601, 44669, 9853, 27930, 34334, 20868, 31350,\n 51295, 39168, 40189, 42176, 39454, 25971, 22691, 23304, 32979,\n 20704, 12489, 4476, 30706, 34768, 32134, 29610, 16286, 48410,\n 6109, 41607, 16315, 36869, 5110, 42142, 41550, 11248, 41492,\n 19994, 4064, 18905, 10792, 29720, 30541, 48858, 24727, 19121,\n 50360, 44711, 3295, 22128, 8031, 7964, 45151, 13749, 9247,\n 47125, 15064, 33069, 23870, 31558, 34105, 35683, 33485, 6005,\n 46180, 35223, 8819, 14368, 9997, 31601, 41958, 43049, 11783,\n 51411, 16056, 32408, 23503, 52153, 33892, 25969, 7416, 41619,\n 50293, 50542, 50813, 2144, 8533, 50231, 8273, 31698, 16842,\n 290, 27957, 26281, 30213, 13208, 35299, 13861, 46865, 21626,\n 51161, 993, 48660, 38988, 21415, 3831, 33260, 32035, 14985,\n 23350, 14377, 30972, 50766, 45258, 35309, 3704, 13171, 3360,\n 31478, 41028, 33617, 50913, 51133, 17335, 39209, 12491, 38613,\n 19357, 50070, 38375, 7750, 50449, 28942, 8107, 49271, 4578,\n 16395, 7083, 7573, 25516, 33184, 36530, 17274, 6189, 35108,\n 43536, 3416, 52173, 12971, 15945, 11909, 44425, 45592, 8123,\n 25227, 52187, 27600, 19268, 36459, 15389, 31200, 11415, 10526,\n 26418, 46516, 2233, 25043, 1062, 21670, 48644, 9601, 11796,\n 45097, 1159, 32375, 215, 12181, 38700, 27661, 4670, 2388,\n 6110, 44357, 513, 22270, 42520, 46564, 31952, 44642, 27085,\n 46328, 8388, 44427, 16991, 19741, 38510, 60, 41079, 36328,\n 12600, 27826, 41512, 40890, 39866, 14364, 21704, 7945, 18576,\n 14765, 5554, 29814, 796, 24005, 43761, 35456, 50746, 15877,\n 48059, 4091, 38554, 43893, 51654, 28939, 45537, 22210, 10300,\n 15810, 8476, 13798, 33962, 15701, 7990, 15145, 41307, 50027,\n 50671, 18129, 9335, 8842, 7709, 39030, 26173, 38816, 25226,\n 25722, 35426, 35500, 1415, 32786, 29595, 12877, 39193, 20562,\n 27228, 21918, 20800, 32047, 22611, 11870, 43522, 31894, 40618,\n 20336, 9996, 27837, 5337, 28777, 43452, 14340, 34842, 2631,\n 6032, 48129, 3275, 48819, 45456, 304, 37137, 31236, 26065,\n 24100, 50759, 50144, 24249, 36197, 22779, 26370, 43185, 48747,\n 9230, 36520, 8608, 45549, 19320, 37121, 43944, 21980, 49229,\n 49791, 51315, 47754, 1606, 39512, 50450, 28, 8494, 50620,\n 21715, 11828, 21421, 37275, 20884, 17133, 18120, 36623, 47499,\n 20995, 51121, 18364, 31568, 44281, 9004, 11884, 14354, 5177,\n 5639, 26101, 14547, 33177, 35921, 15593, 36744, 51107, 13463,\n 12540, 36215, 11559, 33137, 28227, 32579, 37295, 6365, 29413,\n 17946, 23839, 23189, 40881, 5016, 47511, 48049, 8356, 3601,\n 18572, 31619, 23198, 15778, 12282, 36634, 48267, 17601, 19507,\n 35258, 7393, 25076, 17292, 30065, 2591, 24894, 7647, 26288,\n 3676, 46291, 35296, 38343, 33879, 9349, 27118, 9767, 28723,\n 19242, 28433, 51186, 18906, 19460, 14132, 23262, 1366, 16414,\n 29511, 23885, 40353, 26969, 7391, 36357, 35654, 48930, 41036,\n 40126, 46373, 40773, 8798, 47234, 29213, 31170, 35587, 23713,\n 27036, 11752, 28189, 21387, 19316, 41684, 41950, 27568, 6486,\n 20912, 35443, 36510, 4347, 28061, 21871, 21428, 41337, 14392,\n 4422, 50131, 9694, 6592, 26194, 14027, 48324, 40786, 15472,\n 2601, 23284, 30563, 40372, 38433, 23348, 51601, 49608, 20867,\n 48259, 47696, 4105, 36287, 177, 45486, 47780, 50997, 19152,\n 20162, 22769, 37612, 7993, 16932, 9380, 27183, 52, 48620,\n 33843, 26053, 26721, 40473, 2417, 8852, 13479, 2929, 32967,\n 51918, 42641, 13982, 34583, 43326, 25329, 12959, 1537, 11404,\n 11693, 39082, 13412, 23496, 52162, 19088, 9071, 17661, 38781,\n 1906, 29233, 34742, 35253, 27886, 45992, 2126, 17567, 37304,\n 9640, 50669, 27994, 32554, 15445, 382, 6562, 49546, 12803,\n 38341, 12389, 11855, 46298, 3882, 37747, 12893, 37657, 22095,\n 11969, 30154, 4776, 37157, 30526, 45412, 22024, 34993, 47694,\n 34108, 40203, 50862, 2295, 43733, 46887, 3961, 23228, 11753,\n 16820, 30823, 9088, 42320, 37438, 11201, 47591, 23873, 2731,\n 17130, 52043, 26544, 8288, 18041, 46601, 8267, 18691, 5403,\n 36202, 11671, 22373, 26114, 11724, 36883, 47255, 32471, 29944,\n 24767, 17456, 6149, 46866, 2776, 5859, 7897, 475, 15728,\n 16654, 38247, 48968, 22905, 47318, 40099, 11981, 27442, 33250,\n 29640, 28905, 25482, 20329, 5727, 28937, 18284, 48624, 42545,\n 12553, 37546, 23247, 14619, 47823, 47567, 15771, 45167, 15649,\n 3051, 8114, 6287, 31246, 7540, 25664, 20331, 8089, 31098,\n 26484, 11135, 4520, 38759, 15764, 19861, 45878, 40996, 12678,\n 3082, 37766, 32116, 38804, 48828, 23601, 5600, 213, 38891,\n 45887, 51270, 877, 38427, 51541, 13261, 33783, 3865, 33556,\n 9448, 24522, 9179, 16016, 13482, 50948, 41790, 37023, 9882,\n 10441, 36207, 46441, 1880, 25604, 6770, 17718, 41780, 30901,\n 7736, 45043, 14350, 45017, 8141, 37890, 32599, 51332, 15620,\n 47065, 18081, 34196, 34643, 49091, 36614, 17811, 7589, 5843,\n 49341, 2258, 41008, 39243, 51097, 37484, 29179, 8081, 22770,\n 38100, 51303, 34048, 9954, 30160, 249, 33370, 4429, 43972,\n 15099, 17665, 5987, 2415, 25421, 35661, 2156, 24944, 7619,\n 28896, 30700, 5566, 26934, 16206, 48166, 3062, 45405, 13527,\n 6507, 45820, 656, 12581, 30573, 43754, 34780, 24363, 16510,\n 2913, 28780, 2017, 33542, 39002, 49670, 14659, 29063, 45988,\n 11555, 46054, 7044, 40430, 31666, 24044, 10783, 8931, 33204,\n 26047, 8262, 45822, 37935, 39682, 41896, 42230, 9184, 2418,\n 41984, 18128, 28330, 16308, 49449, 7532, 12922, 12645, 3306,\n 954, 48474, 19800, 19276, 11677, 52301, 23838, 29363, 48082,\n 49921, 23846, 16314, 887, 13088, 46492, 30007, 11473, 44468,\n 51182, 40701, 40947, 31142, 51668, 7640, 25500, 29652, 45171,\n 29280, 30529, 33047, 634, 36985, 3530, 7773, 20606, 37574,\n 23061, 14007, 10656, 43853, 37101, 16516, 7581, 25652, 13667,\n 28752, 42195, 35513, 16885, 15334, 39644, 36137, 6634, 14039,\n 18350, 25467, 28130, 48361, 6150, 36965, 16307, 555, 31188,\n 34829, 29105, 6595, 46157, 6025, 41195, 46346, 42872, 44944,\n 33113, 3464, 13685, 25392, 16092, 21028, 6751, 36839, 52235,\n 27201, 25976, 26536, 24597, 11914, 19803, 36224, 17193, 7033,\n 42608, 43935, 11900, 34859, 34718, 48591, 4980, 12321, 17375,\n 13780, 13346, 52035, 26810, 20065, 29644, 16430, 19394, 46914,\n 35181, 39751, 40704, 22980, 37940, 7264, 34456, 6638, 39452,\n 3006, 2709, 27846, 10326, 4335, 18196, 45462, 6037, 31134,\n 33503, 8716, 31651, 49632, 44004, 43660, 49910, 37708, 23321,\n 50464, 51623, 40762, 9180, 3183, 28769, 3753, 15570, 45425,\n 50376, 25725, 23557, 3589, 4473, 23974, 2241, 40697, 2766,\n 51597, 25934, 47614, 35140, 26598, 28661, 23785, 35441, 23087,\n 20498, 43277, 1263, 43100, 10631, 42221, 27163, 8681, 49738,\n 32015, 3554, 17097, 5868, 41886, 21991, 16320, 1113, 14515,\n 12013, 11957, 12272, 8084, 33548, 38252, 33400, 42567, 11166,\n 16631, 38551, 50125, 46682, 9167, 25244, 51088, 15687, 6406,\n 2932, 7831, 12275, 33511, 47597, 9899, 41281, 27527, 20198,\n 34983, 31472, 32765, 14471, 2980, 45133, 48061, 10725, 32218,\n 27069, 27008, 15049, 33053, 39831, 22610, 1915, 20838, 41129,\n 38405, 37636, 9120, 48270, 20895, 20323, 22909, 7440, 52129,\n 45066, 44183, 930, 29339, 50075, 47333, 50388, 37079, 6317,\n 44341, 34395, 28866, 2953, 45372, 45862, 46653, 29138, 26073,\n 34603, 40375, 43897, 7559, 11152, 29419, 51325, 2267, 31115,\n 18257, 28716, 23789, 13567, 46546, 13580, 6080, 8619, 36190,\n 40393, 32668, 23572, 50879, 33812, 20614, 33321, 39531, 30361,\n 27221, 19327, 3431, 27074, 21125, 22813, 13057, 37749, 45519,\n 34682, 38762, 47792, 22512, 16380, 31129, 48195, 30894, 49386,\n 6293, 23023, 6416, 39328, 11018, 6650, 7479, 645, 8263,\n 3623, 1317, 32366, 36386, 45260, 7424, 19788, 33237, 33942,\n 26423, 43104, 32243, 39532, 34605, 36149, 26825, 20180, 12941,\n 2307, 26079, 32013, 30514, 23397, 18577, 36072, 31019, 40971,\n 44556, 27436, 50192, 51523, 30667, 31284, 31076, 31570, 45773,\n 31916, 3506, 24046, 5302, 33356, 15936, 32677, 36997, 9387,\n 8796, 13643, 9612, 37828, 20161, 6225, 1352, 14043, 46762,\n 34527, 23199, 39847, 36145, 41726, 37945, 39928, 46510, 43829,\n 25995, 42401, 8672, 16143, 26537, 11605, 26248, 41817, 32151,\n 15746, 38844, 1138, 43628, 41891, 4326, 16651, 46219, 11902,\n 47583, 22661, 51505, 31012, 41108, 35685, 5847, 3607, 30803,\n 28151, 35838, 37666, 7, 51058, 16214, 28294, 45476, 47044,\n 43454, 8624, 9091, 10515, 41994, 35821, 26371, 30796, 41087,\n 601, 36635, 41570, 49258, 43380, 31655, 44736, 46363, 26076,\n 35873, 29781, 40904, 28989, 42402, 3644, 47963, 33685, 7550,\n 45063, 35126, 42161, 46856, 49905, 13103, 2635, 26298, 30558,\n 1292, 21640, 18641, 8365, 7600, 16137, 3648, 23082, 37867,\n 45944, 50805, 38194, 50141, 9678, 33327, 26286, 10112, 14754,\n 33694, 37075, 26587, 12725, 39668, 6151, 45117, 52029, 49569,\n 49988, 34534, 29203, 45579, 39972, 15176, 14876, 37613, 42709,\n 5705, 39052, 33005, 5937, 49753, 1831, 24452, 30970, 43496,\n 31803, 25953, 22910, 27194, 41321, 10372, 35750, 43535, 8394,\n 37987, 14603, 8925, 37479, 49592, 51945, 3500, 27450, 11209,\n 3085, 29425, 11143, 18859, 30882, 15312, 7493, 35793, 27470,\n 4847, 40849, 37003, 31542, 48325, 15629, 24501, 1429, 51312,\n 7375, 24216, 25417, 25140, 18328, 6989, 40937, 25302, 29102,\n 7929, 22680, 16482, 21127, 22572, 8055, 27831, 50110, 49147,\n 44392, 31436, 6704, 38073, 42539, 27689, 18566, 37667, 35302,\n 43946, 41412, 4561, 9028, 36532, 35238, 7414, 43503, 25110,\n 21635, 51917, 12857, 34843, 19378, 36182, 2541, 37409, 47266,\n 33643, 36975, 17212, 35696, 17395, 25637, 44771, 37985, 6380,\n 19038, 49993, 48731, 17307, 1189, 22378, 14714, 12338, 4079,\n 27928, 20244, 26975, 31273, 38575, 31624, 40259, 43847, 46607,\n 40902, 39419, 32631, 23927, 24187, 43308, 49564, 23250, 28895,\n 30675, 24411, 15682, 26438, 28253, 41675, 1467, 29765, 18996,\n 25512, 22059, 23461, 22531, 45828, 24022, 38127, 40666, 24786,\n 42463, 36396, 29319, 27121, 15062, 37221, 41927, 50411, 3909,\n 14539, 25751, 47126, 34414, 33811, 32671, 3933, 20573, 28498,\n 3767, 41015, 49035, 23287, 25023, 51772, 20223, 35324, 45739,\n 10134, 5038, 26343, 15636, 25631, 36940, 28176, 15695, 20216,\n 16120, 35355, 21370, 25962, 34916, 33406, 37233, 5354, 47426,\n 29623, 50150, 49614, 22745, 9739, 5732, 51979, 10581, 12187,\n 2434, 43703, 14665, 45919, 274, 10018, 42556, 26332, 8971,\n 40824, 1520, 26300, 33875, 46104, 7478, 576, 1060, 41890,\n 23169, 18842, 51181, 21855, 15531, 46333, 37687, 44164, 35555,\n 13908, 566, 35675, 1134, 9641, 34786, 40927, 28430, 10017,\n 48610, 32339, 5132, 37870, 50630, 37236, 17554, 32626, 41737,\n 44678, 23780, 19084, 24510, 4775, 31024, 41765, 36347, 15939,\n 25675, 43332, 12420, 24811, 44258, 8001, 17973, 469, 34296,\n 46155, 14389, 36404, 1947, 20089, 5066, 6712, 12514, 6664,\n 15257, 42276, 40609, 31856, 28376, 17757, 41137, 45322, 45306,\n 46634, 12575, 36742, 16281, 24266, 25922, 44129, 7900, 20194,\n 6404, 23681, 37633, 47344, 41141, 33697, 27490, 3361, 40693,\n 3634, 49715, 10059, 13790, 22844, 18941, 25708, 31455, 31860,\n 47489, 33795, 37539, 34788, 6567, 47894, 10691, 15128, 6914,\n 6863, 31539, 25973, 18773, 8029, 40285, 23039, 41060, 10087,\n 9412, 24625, 34592, 10031, 45816, 7656, 2365, 39330, 45479,\n 8431, 18125, 21314, 10904, 39420, 5827, 8357, 48418, 27482,\n 3628, 8563, 24790, 2830, 30263, 42992, 22365, 19032, 38122,\n 43752, 31511, 48839, 33594, 39508, 42544, 12156, 34937, 40922,\n 36290, 44827, 386, 23458, 33516, 12622, 4889, 22230, 29016,\n 49034, 44715, 8713, 51569, 36189, 21666, 7922, 8649, 30995,\n 38811, 1873, 2727, 36959, 24113, 11907, 19481, 11622, 27694,\n 1381, 25169, 47179, 34344, 10979, 51342, 38438, 30145, 11549,\n 30560, 8771, 51258, 49191, 9519, 39818, 12598, 20352, 10467,\n 9556, 13040, 15955, 45199, 50920, 3665, 22664, 35444, 6401,\n 44914, 45153, 18790, 21285, 25653, 24632, 20338, 43747, 44168,\n 26157, 34917, 6677, 39739, 28179, 43140, 18955, 40143, 12431,\n 26449, 42410, 36211, 7983, 12594, 32199, 17515, 22215, 39174,\n 8411, 45245, 51998, 40596, 19153, 45880, 17409, 26262, 5041,\n 49185, 30316, 9377, 35831, 9165, 31271, 26214, 28818, 27887,\n 34613, 6812, 17869, 35788, 29287, 41633, 39290, 8544, 41291,\n 34765, 12160, 27918, 48927, 14677, 30597, 10681, 17581, 23382,\n 50119, 17666, 24523, 28974, 21332, 26024, 6932, 26683, 46673,\n 15014, 39582, 30180, 14574, 43370, 13843, 38425, 49421, 34187,\n 31690, 43797, 5253, 8783, 42585, 21190, 50492, 31316, 35054,\n 18399, 1460, 16296, 7782, 11579, 16466, 40684, 10774, 25304,\n 35078, 47193, 45619, 20156, 48842, 43768, 36555, 41534, 19222,\n 29973, 27299, 4595, 23438, 43187, 45418, 35190, 51677, 48611,\n 41637, 15037, 45461, 19055, 9029, 21078, 31359, 15148, 17612,\n 11963, 16189, 19352, 34686, 50312, 45570, 30056, 1609, 17160,\n 11146, 20801, 8119, 14162, 7972, 16882, 16499, 26741, 49575,\n 44616, 46979, 19409, 5982, 14074, 24686, 44332, 40260, 18868,\n 20783, 15192, 49238, 5638, 44555, 34978, 10959, 10936, 41969,\n 50113, 15363, 44621, 2486, 20593, 40842, 51290, 24055, 25151,\n 38364, 14477, 46621, 10199, 27714, 22218, 27717, 48347, 12403,\n 24753, 2603, 1835, 39212, 48180, 18852, 50962, 45441, 20960,\n 16080, 40648, 36321, 43331, 10155, 8671, 29098, 28225, 33969,\n 23414, 24382, 48068, 26658, 7250, 38993, 16283, 19632, 28053,\n 21920, 42013, 38310, 22926, 50071, 28726, 7378, 33240, 15344,\n 11853, 19281, 18761, 28200, 16511, 14638, 10489, 21534, 17061,\n 11090, 19389, 14730, 16825, 38953, 12987, 6391, 22598, 50857,\n 40351, 6648, 10330, 32731, 39572, 7475, 42920, 47057, 28892,\n 46493, 4752, 24618, 44446, 42236, 22048, 6180, 38854, 28309,\n 3551, 34521, 46603, 12903, 11065, 38526, 41214, 17829, 25830,\n 42301, 16069, 18813, 10519, 19774, 29120, 14479, 44391, 36095,\n 5299, 50723, 10502, 30407, 22540, 30530, 41799, 13762, 30478,\n 40719, 429, 36103, 27407, 7196, 15648, 48832, 37004, 1268,\n 16670, 16568, 38202, 19341, 43723, 9868, 394, 48800, 41355,\n 30060, 19470, 30716, 28628, 52184, 29949, 44557, 25272, 40521,\n 48245, 51604, 32730, 31122, 32615, 3072, 13708, 35228, 43453,\n 31914, 15330, 44490, 8277, 49436, 16391, 51037, 33201, 37804,\n 10897, 44021, 29076, 15193, 11142, 22741, 28481, 5481, 29216,\n 8253, 6194, 44089, 15688, 39492, 14, 50859, 11574, 20097,\n 34893, 45348, 10292, 29209, 46066, 4876, 38332, 8725, 17908,\n 16267, 3943, 9094, 15319, 30809, 14752, 32830, 30661, 34779,\n 5334, 22778, 8142, 4521, 11644, 25660, 46699, 35832, 42621,\n 39954, 10912, 22803, 31840, 52102, 10958, 27743, 36029, 31608,\n 13542, 14735, 51498, 22195, 20187, 22591, 20645, 48878, 48707,\n 35093, 11922, 2406, 34804, 33537, 12068, 41324, 25650, 19248,\n 15473, 6428, 35123, 48459, 13363, 23984, 32024, 1715, 48431,\n 17557, 25400, 35385, 48255, 6471, 32762, 15306, 7407, 21401,\n 26880, 13713, 10592, 27794, 33774, 44784, 19461, 23210, 39219,\n 4990, 40727, 37771, 41385, 24832, 43916, 20684, 2612, 24306,\n 10684, 49951, 23847, 32760, 27752, 50982, 10077, 14366, 9700,\n 12884, 9769, 40411, 1149, 3404, 27810, 2340, 23654, 16003,\n 7451, 1123, 27755, 3003, 77, 34119, 4833, 30897, 23260,\n 18091, 1792, 30395, 5023, 48143, 41164, 10298, 19552, 19177,\n 30427, 21436, 2021, 36794, 21277, 29615, 18960, 40572, 40369,\n 34967, 33375, 21163, 13062, 38328, 50252, 49430, 46862, 6153,\n 45078, 35092, 43350, 41049, 17588, 37372, 25054, 34122, 23949,\n 23156, 25892, 13608, 7327, 23244, 26392, 41596, 20271, 46130,\n 21056, 28412, 12875, 51266, 10021, 51738, 48578, 19366, 51458,\n 39387, 28877, 43964, 40279, 44320, 39929, 16204, 17602, 198,\n 44349, 36681, 1144, 12541, 45091, 6872, 20991, 17896, 23886,\n 14587, 42482, 14955, 47962, 18065, 28472, 34778, 30849, 39693,\n 20878, 13533, 23021, 11039, 44307, 2155, 27196, 11700, 3724,\n 36112, 3425, 582, 45470, 17918, 14717, 25235, 15427, 40855,\n 44653, 10776, 31512, 16406, 24460, 42532, 38079, 35117, 13445,\n 1373, 23799, 40419, 6784, 46203, 13219, 25889, 28060, 37734,\n 12091, 20493, 21001, 17889, 20848, 29128, 32773, 28025, 32076,\n 6911, 8379, 10380, 28210, 37930, 49780, 41603, 13403, 48139,\n 38420, 9271, 11233, 45469, 45526, 15750, 6282, 25755, 11493,\n 3801, 46540, 43269, 12410, 29351, 13181, 37464, 31878, 27489,\n 24784, 33578, 11994, 49996, 35028, 38455, 5345, 18541, 20817,\n 66, 37322, 16293, 14483, 38031, 19290, 17293, 22941, 10658,\n 45558, 38680, 40627, 38717, 47387, 15295, 22336, 26072, 10473,\n 37917, 36077, 50660, 27999, 11165, 36123, 24672, 24932, 50078,\n 38818, 38479, 31613, 9401, 17842, 30437, 28893, 26911, 45054,\n 33329, 44567, 13747, 38082, 3907, 19118, 49517, 21547, 39679,\n 8587, 19778, 47058, 2786, 21645, 8919, 41358, 4788, 47514,\n 38359, 19727, 30310, 29367, 52095, 35013, 29207, 32939, 44805,\n 10524, 12867, 40435, 51301, 42901, 5912, 10402, 21663, 29001,\n 52242, 9149, 23751, 12296, 27728, 33098, 33723, 37215, 28196,\n 37191, 9530, 16409, 19644, 3030, 12460, 33417, 2496, 26985,\n 29474, 37495, 23609, 11634, 35570, 49598, 31702, 15726, 46437,\n 50399, 26901, 23408, 10119, 27896, 48606, 17621, 10195, 4625,\n 7751, 41926, 38141, 44321, 46273, 20080, 7914, 33611, 23804,\n 28396, 52290, 6614, 26525, 4172, 19758, 45631, 26645, 18067,\n 25346, 48332, 52164, 50818, 32947, 13675, 35043, 13878, 915,\n 38807, 19816, 29169, 359, 37389, 43676, 8578, 13480, 19124,\n 32922, 47253, 6550, 7632, 10782, 23071, 35648, 13311, 47089,\n 4694, 28638, 36367, 44479, 50233, 11972, 13469, 41125, 2259,\n 26696, 38020, 48772, 30369, 13857, 28097, 34730, 40122, 37668,\n 13496, 9311, 38929, 29811, 15909, 4088, 47801, 23180, 28771,\n 22781, 4499, 28107, 47719, 1718, 5465, 17854, 19554, 38493,\n 32457, 29730, 20827, 46214, 18441, 39711, 46196, 16334, 8617,\n 27757, 1248, 14453, 36583, 33340, 23441, 5593, 24613, 13765,\n 2379, 21116, 30423, 51017, 6972, 42034, 18602, 51765, 32441,\n 50564, 34599, 21848, 13504, 43252, 36426, 7437, 14533, 18870,\n 51152, 46274, 20454, 51420, 43045, 42759, 43910, 28536, 13338,\n 15920, 5672, 24637, 6830, 2014, 40018, 17968, 11024, 32072,\n 17157, 23065, 742, 27506, 4900, 39207, 7827, 35525, 12143,\n 35362, 43359, 38572, 11798, 22639, 49044, 52091, 35124, 8938,\n 10880, 8421, 20658, 28860, 18573, 8258, 29639, 16198, 26296,\n 38203, 24766, 22289, 8382, 22304, 11384, 27965, 16952, 19193,\n 37206, 34110, 8067, 5211, 11264, 48045, 8990, 40787, 26416,\n 43796, 49941, 30265, 40404, 17116, 32335, 45761, 51559, 33986,\n 119, 2694, 26965, 50915, 7894, 7176, 39937, 1545, 41869,\n 25589, 33804, 51884, 10778, 38402, 37714, 33141, 32628, 12320,\n 25597, 29163, 48149, 23277, 18901, 46841, 16071, 13565, 5494,\n 5555, 27309, 37879, 38449, 45044, 42926, 18259, 39429, 38022,\n 18685, 23820, 24875, 543, 46299, 38564, 8116, 8286, 36843,\n 6794, 38987, 47160, 11000, 29576, 2012, 31378, 29264, 16323,\n 34515, 14632, 22705, 35193, 35461, 27973, 43271, 40101, 47498,\n 10269, 52126, 29904, 50902, 18894, 47205, 37853, 45178, 30075,\n 8032, 33016, 17861, 31775, 50460, 41742, 26077, 20192, 36971,\n 44504, 52146, 29915, 32459, 24243, 6889, 21358, 33057, 46823,\n 5181, 2463, 21330, 10753, 16729, 19002, 31239, 30610, 3253,\n 28324, 48169, 39748, 32266, 39062, 44348, 18703, 36930, 18451,\n 31768, 35174, 30536, 42609, 16615, 17708, 8315, 40878, 9675,\n 46106, 7692, 45825, 13801, 143, 43918, 41295, 13940, 28366,\n 37646, 30038, 44729, 1490, 42064, 39660, 18502, 24864, 28014,\n 43497, 29426, 18857, 40491, 52255, 8004, 318, 37489, 20921,\n 30943, 6974, 30749, 10964, 25431, 27960, 35858, 30833, 22013,\n 44924, 9317, 43132, 31605, 32126, 22795, 21599, 15707, 9938,\n 36240, 44413, 37772, 2086, 46969, 40591, 36074, 26622, 6203,\n 34403, 7102, 43190, 3832, 50147, 32684, 24985, 28873, 9348,\n 41062, 24918, 45545, 22530, 50676, 8143, 25878, 10074, 12965,\n 18517, 44398, 47884, 29902, 26688, 10108, 42422, 9599, 18525,\n 43153, 40518, 29637, 21961, 3406, 6838, 36371, 35721, 27078,\n 2836, 28587, 3388, 46727, 13578, 19604, 15493, 21009, 48009,\n 51724, 2232, 3063, 1031, 49428, 7860, 27783, 28136, 4013,\n 1924, 5659, 49620, 39927, 8679, 46350, 51324, 45651, 9103,\n 1496, 19502, 9636, 44576, 39264, 41089, 8667, 18571, 30819,\n 5282, 35387, 49206, 19319, 45494, 37611, 30040, 34351, 46814,\n 42394, 34918, 44719, 32616, 27414, 45543, 46275, 28021, 42398,\n 15733, 40734, 2024, 16758, 15216, 10411, 49233, 15412, 8994,\n 18077, 3098, 52351, 25871, 35779, 16628, 51470, 4067, 36552,\n 3280, 4789, 4568, 16243, 27549, 52355, 20703, 24912, 46685,\n 10243, 8246, 3351, 41515, 21948, 8541, 29427, 36835, 49130,\n 33423, 19148, 38367, 51136, 49948, 44084, 50546, 12921, 8803,\n 40871, 1514, 31814, 24142, 21158, 8073, 51035, 27439, 22479,\n 13492, 51888, 22007, 8339, 24913, 20296, 7394, 49567, 23646,\n 1851, 9748, 24107, 20927, 45314, 38676, 161, 2600, 6126,\n 32083, 41003, 27812, 26388, 37298, 41300, 18959, 42986, 25331,\n 33095, 50591, 12625, 44423, 13839, 13274, 51352, 970, 17463,\n 48246, 13952, 40221, 10358, 35687, 3221, 38754, 14760, 39761,\n 18265, 17138, 16500, 24584, 18001, 3056, 8183, 1917, 33361,\n 33347, 40456, 13079, 44060, 15065, 38072, 49757, 23196, 31933,\n 47021, 5001, 31035, 23015, 8150, 19123, 8547, 30162, 33003,\n 29240, 19798, 389, 20457, 51630, 39759, 22655, 25093, 47268,\n 38091, 39312, 27664, 24765, 3661, 32382, 28522, 13800, 24585,\n 15879, 32744, 32060, 15301, 31387, 47303, 1944, 37530, 2341,\n 42610, 29048, 40204, 18886, 42094, 11973, 46890, 29588, 50368,\n 49861, 44960, 29403, 10126, 21648, 12777, 35777, 38905, 8492,\n 45085, 43333, 45102, 44107, 30539, 27954, 29613, 15776, 15582,\n 8513, 33972, 27637, 30377, 40283, 31895, 1125, 15767, 34694,\n 31550, 25489, 41325, 28365, 49883, 45276, 12090, 13709, 25774,\n 27413, 10709, 3359, 12078, 17965, 40349, 15856, 23657, 45111,\n 25852, 47259, 25022, 4882, 22211, 33031, 298, 17784, 28484,\n 16604, 28785, 20971, 30757, 2757, 4141, 29256, 22748, 26228,\n 37826, 51536, 24490, 15992, 23637, 41918, 46612, 19871, 31592,\n 16375, 46712, 27923, 30023, 15035, 11977, 37434, 31970, 9782,\n 27589, 47095, 33732, 69, 46525, 35762, 48574, 25839, 45084,\n 27983, 7826, 22762, 48343, 10098, 39386, 6460, 11727, 3054,\n 12742, 18157, 2, 20717, 36122, 25275, 31111, 14628, 15187,\n 51796, 10733, 26352, 27292, 48454, 36815, 26139, 43806, 17474,\n 3460, 20908, 52053, 44574, 5115, 40771, 13348, 20822, 4169,\n 45812, 38397, 30770, 24937, 12059, 2151, 31786, 34649, 17189,\n 26651, 812, 29821, 25066, 40290, 28706, 46633, 37335, 52072,\n 29373, 24887, 41866, 5187, 19664, 40428, 12881, 8330, 47803,\n 34636, 23623, 29571, 28561, 31946, 17094, 31980, 22929, 37371,\n 29336, 4023, 16795, 50003, 18714, 37950, 8463, 6700, 48483,\n 49890, 6552, 47856, 24872, 47164, 38984, 11327, 44012, 14784,\n 28000, 11950, 11820, 36792, 19126, 36124, 40476, 46797, 19034,\n 35151, 6448, 27248, 32287, 42725, 31636, 8789, 49526, 21676,\n 21388, 29057, 27695, 18116, 26476, 12939, 25488, 17561, 49866,\n 32816, 4992, 11398, 29865, 39832, 51232, 8897, 44429, 23502,\n 16785, 19736, 3806, 41735, 5997, 17039, 41819, 6702, 24746,\n 50595, 50317, 17678, 16157, 633, 52361, 49495, 21668, 35885,\n 33155, 48451, 52307, 19271, 34472, 37245, 12295, 29089, 4550,\n 50424, 52194, 4974, 22588, 10323, 569, 40830, 6116, 25950,\n 33398, 7539, 14452, 1264, 29329, 19585, 4354, 10254, 4576,\n 35159, 22954, 37482, 11990, 28837, 7018, 32185, 20811, 14893,\n 30406, 11012, 29688, 8308, 11277, 39253, 28539, 49199, 26302,\n 46320, 46666, 1177, 8384, 17161, 48854, 2616, 41898, 8197,\n 26366, 16321, 23402, 14778, 25872, 11897, 48866, 19064, 2646,\n 36117, 18246, 24251, 20674, 24227, 13200, 30342, 47521, 47169,\n 29795, 1133, 7816, 10849, 295, 13627, 19940, 7617, 41968,\n 46958, 5201, 25985, 43258, 19093, 37594, 14143, 22015, 27185,\n 16207, 47769, 32586, 6555, 5504, 22323, 17691, 6817, 42155,\n 15256, 45230, 33044, 5722, 10886, 9439, 51593, 47871, 35481,\n 9432, 33168, 7295, 27435, 50577, 27641, 29809, 1258, 25425,\n 6640, 28606, 40026, 13707, 5301, 973, 23064, 510, 47497,\n 26223, 563, 31898, 48657, 14759, 20403, 2148, 20640, 16008,\n 34385, 8741, 17729, 5235, 11280, 26588, 11200, 47782, 34832,\n 40795, 43939, 48116, 42693, 40445, 49585, 27536, 6954, 20994,\n 25469, 43543, 30308, 17256, 23235, 18556, 37451, 38896, 15571,\n 23060, 37955, 36363, 22276, 20007, 13370, 45587, 6188, 24190,\n 22031, 1419, 5239, 44819, 34883, 49179, 31331, 1139, 27116,\n 46622, 9717, 44293, 11687, 29667, 18876, 1833, 40934, 18306,\n 51769, 19449, 13471, 38357, 28427, 8290, 3269, 32074, 18210,\n 40033, 13593, 30121, 49262, 35284, 16612, 40102, 23070, 23074,\n 41772, 30031, 3484, 29533, 34416, 46498, 9913, 43524, 21496,\n 33125, 41123, 23452, 39564, 10558, 1336, 35772, 49783, 30871,\n 1707, 9869, 12700, 5121, 25580, 46190, 5295, 24614, 3458,\n 5508, 32292, 223, 22722, 25835, 48532, 15866, 6988, 3073,\n 11706, 27636, 23607, 27266, 595, 47813, 43868, 46838, 15181,\n 11664, 3097, 22245, 43721, 10249, 24532, 45132, 18545, 19135,\n 40078, 30050, 38342, 48726, 15826, 37590, 23094, 13978, 38592,\n 20245, 35427, 20934, 14661, 34905, 38064, 16381, 33857, 48797,\n 45809, 15372, 39985, 50568, 5696, 29279, 12355, 9793, 34357,\n 43089, 3118, 37062, 50602, 34041, 6129, 31034, 48482, 6190,\n 17840, 24083, 21503, 6547, 11226, 51932, 32420, 51245, 38400,\n 51327, 2173, 8792, 9445, 13584, 11309, 28459, 5348, 6312,\n 9340, 34614, 38465, 49207, 26509, 43078, 16, 40756, 29218,\n 50060, 36871, 45465, 51950, 17915, 26034, 14718, 41501, 20227,\n 16836, 12753, 20748, 10202, 16774, 34174, 51280, 9491, 13725,\n 20, 43240, 13289, 23110, 11080, 11912, 31286, 16873, 37963,\n 27768, 47982, 17361, 4523, 50068, 30044, 4766, 7296, 6570,\n 30965, 7373, 3467, 6795, 17467, 12621, 25480, 31862, 22618,\n 1184, 9055, 26602, 10511, 19362, 44363, 11495, 46378, 43903,\n 47820, 17673, 7078, 22973, 1157, 42485, 30802, 24286, 11691,\n 11655, 40442, 5304, 51433, 20290, 27387, 3844, 15302, 43556,\n 12416, 49605, 28404, 47083, 51386, 24125, 5459, 33479, 25042,\n 44184, 25904, 22492, 8905, 9100, 15142, 27676, 27397, 45689,\n 32847, 47341, 48106, 48963, 36938, 38358, 15674, 8641, 39713,\n 49234, 7889, 17159, 38815, 25492, 29042, 16339, 13485, 4873,\n 16365, 7952, 42305, 39545, 35830, 49957, 12056, 6913, 2939,\n 23084, 2752, 30674, 4804, 8110, 19238, 50085, 23748, 13841,\n 17060, 19096, 37339, 17947, 30202, 26382, 4108, 52132, 37868,\n 48093, 49658, 44109, 3294, 3690, 50822, 41144, 44833, 15153,\n 42828, 340, 10995, 43772, 43324, 8769, 45690, 47692, 21475,\n 21683, 37984, 31063, 39887, 16766, 48938, 29938, 1481, 38104,\n 35162, 15083, 40616, 9288, 18653, 33608, 17105, 27341, 21016,\n 9481, 38529, 18133, 44708, 51440, 17272, 42859, 5484, 22436,\n 46691, 4730, 36757, 922, 48136, 43124, 11787, 25907, 12800,\n 36593, 24325, 50175, 46902, 30305, 44773, 13932, 26316, 31833,\n 2386, 24845, 2314, 44892, 5276, 32253, 11150, 27491, 28260,\n 32205, 46329, 22699, 14700, 23930, 19399, 31483, 32181, 25009,\n 20221, 25612, 8527, 9390, 37630, 51144, 14454, 6603, 43468,\n 19441, 15138, 41683, 7337, 52210, 24255, 42010, 28506, 24081,\n 13859, 32480, 30296, 13897, 16611, 35290, 37980, 3385, 22900,\n 3390, 36677, 46750, 39225, 8986, 26151, 12342, 50181, 44206,\n 8981, 28388, 5933, 36692, 14118, 14999, 29912, 23875, 43472,\n 2986, 17205, 15415, 9511, 34266, 18711, 8477, 49300, 18694,\n 19594, 52273, 41405, 40340, 48813, 25440, 38836, 36858, 2378,\n 19944, 6522, 30198, 48151, 50045, 2127, 8202, 4879, 50250,\n 29538, 43312, 47905, 1185, 585, 15096, 12986, 49553, 3762,\n 23309, 759, 37357, 34373, 46031, 38071, 30022, 45491, 43147,\n 41232, 26780, 35283, 18692, 18864, 16662, 7480, 26429, 17366,\n 9642, 33483, 18521, 42880, 50511, 50355, 30652, 18227, 17334,\n 24420, 25059, 12719, 38976, 37080, 39252, 35992, 45632, 46215,\n 44467, 17329, 25231, 7335, 5906, 42334, 36767, 9665, 9329,\n 29672, 13033, 11, 3304, 37173, 29647, 44408, 32342, 2169,\n 32329, 8225, 3952, 38391, 37654, 49134, 47683, 49313, 39849,\n 3036, 3930, 2423, 27682, 48826, 30739, 39159, 46715, 35476,\n 26762, 10208, 39868, 47130, 9510, 29406, 3722, 16156, 4303,\n 30196, 12932, 49209, 47509, 37146, 36800, 42325, 37066, 33032,\n 49804, 45128, 37257, 44322, 23734, 39514, 47262, 38094, 36277,\n 25452, 23584, 11788, 35407, 7209, 7408, 12069, 30441, 336,\n 7694, 17671, 10398, 16420, 34600, 29675, 48857, 42454, 19215,\n 19060, 41163, 16802, 27712, 42066, 4130, 50586, 1253, 15149,\n 16893, 45713, 45512, 15346, 23999, 44890, 44692, 36496, 49898,\n 25579, 28508, 23345, 17693, 9823, 18762, 6727, 43544, 42988,\n 44550, 35651, 15615, 42812, 347, 46222, 5306, 40876, 51444,\n 29228, 41188, 19402, 36786, 50500, 42488, 6684, 34939, 6576,\n 23944, 8079, 49022, 1227, 28382, 27478, 23699, 44050, 40623,\n 38434, 23338, 2446, 30669, 34852, 39614, 47142, 10203, 8556,\n 4046, 23997, 4100, 39125, 24193, 32948, 32196, 17113, 43444,\n 29660, 22666, 37065, 34727, 9794, 19624, 36574, 47779, 39147,\n 48792, 45669, 19296, 6828, 18995, 47232, 8854, 17460, 35137,\n 6399, 113, 14032, 29217, 44047, 31266, 28651, 38293, 14475,\n 18926, 41538, 12386, 23211, 25776, 38895, 736, 7306, 47043,\n 35397, 23781, 47461, 25079, 33887, 51348, 3120, 43419, 26864,\n 6442, 6095, 34020, 2130, 989, 32119, 7703, 31659, 8614,\n 1713, 45045, 40302, 51545, 27995, 52289, 30795, 41063, 44973,\n 52359, 34272, 31630, 32898, 19116, 36831, 38491, 35802, 507,\n 48676, 12246, 29004, 51701, 29031, 6663, 18363, 39548, 4173,\n 8898, 7769, 7745, 41439, 32157, 11945, 21524, 14345, 41779,\n 46015, 21477, 49856, 28084, 33779, 29460, 47072, 39651, 35971,\n 29222, 40177, 43007, 31309, 14374, 21326, 2553, 44187, 35870,\n 37754, 31077, 45403, 41271, 45285, 36589, 19968, 48394, 18042,\n 15779, 24558, 18764, 14500, 14092, 37413, 16601, 19279, 45301,\n 16158, 11838, 43162, 27725, 497, 21395, 18293, 34509, 12351,\n 43280, 45122, 33818, 37225, 24749, 17783, 44229, 30653, 27789,\n 1938, 49008, 1236, 48627, 28531, 48133, 2761, 52082, 19574,\n 29524, 27577, 45675, 23663, 25783, 42968, 47951, 32306, 27645,\n 4265, 22233, 2562, 16429, 7546, 41284, 29346, 18520, 47293,\n 16038, 43159, 31896, 6192, 23993, 35969, 11712, 2771, 3866,\n 19455, 23877, 17532, 39795, 44383, 38157, 230, 4024, 98,\n 2650, 32635, 33860, 23317, 22865, 36587, 32703, 41551, 46539,\n 1519, 9779, 33637, 32940, 19733, 22201, 36132, 38706, 35628,\n 45093, 41347, 10132, 12190, 3, 38932, 8963, 44792, 32920,\n 45152, 321, 9708, 19623, 51790, 22379, 18963, 52039, 36935,\n 45886, 1961, 44910, 25327, 27129, 5517, 34568, 23109, 6548,\n 5207, 26954, 50843, 23360, 43169, 34731, 22636, 48210, 4323,\n 31685, 49307, 43248, 2097, 22723, 43090, 9863, 36141, 51105,\n 9003, 31033, 4487, 51992, 20665, 27545, 41500, 42803, 10115,\n 31396, 17990, 51625, 24389, 18964, 42491, 9107, 31589, 15324,\n 27431, 18333, 14268, 26798, 15820, 42930, 2178, 39743, 28264,\n 10396, 49143, 18006, 19065, 31135, 27916, 17198, 7214, 13301,\n 18271, 25494, 18971, 17139, 47146, 41096, 41368, 13603, 35709,\n 34402, 49429, 33701, 43485, 21573, 36714, 49371, 34732, 24263,\n 37053, 46135, 23696, 1526, 13229, 11805, 20882, 13468, 35032,\n 33288, 33298, 45500, 50971, 20359, 36086, 17299, 2637, 28990,\n 3301, 3199, 29846, 40182, 12157, 18266, 17700, 49440, 9200,\n 37775, 31965, 30508, 18587, 19151, 4171, 30258, 16875, 43914,\n 44794, 17187, 41763, 21673, 34186, 33979, 33631, 40858, 1899,\n 10590, 1256, 29422, 24554, 47837, 8407, 42291, 51215, 24368,\n 7127, 11449, 5842, 46095, 40165, 15213, 25726, 21693, 14467,\n 28192, 1864, 45033, 27175, 322, 17106, 40894, 246, 1049,\n 49735, 33910, 36537, 23754, 51174, 17899, 2778, 47131, 35418,\n 603, 27237, 38612, 24807, 21346, 7351, 42096, 25052, 20659,\n 47175, 1458, 51696, 707, 51981, 43119, 25914, 15263, 34879,\n 19863, 18089, 20851, 5611, 51479, 552, 4945, 43689, 21225,\n 19106, 44772, 50975, 8449, 6797, 39870, 8668, 25453, 28580,\n 24288, 51870, 12043, 18407, 32425, 30338, 21874, 13097, 31036,\n 36012, 22220, 26679, 10994, 47279, 36370, 44507, 32037, 17882,\n 33171, 18771, 48566, 33935, 24874, 28190, 40847, 8264, 23135,\n 48103, 6258, 38244, 3689, 42020, 21932, 10969, 8378, 21112,\n 39029, 20803, 13215, 35879, 50597, 19263, 17985, 32954, 8325,\n 22630, 9920, 38622, 24980, 1811, 16656, 42587, 32625, 50643,\n 36542, 23343, 21311, 48795, 41004, 31835, 40197, 25384, 3555,\n 23454, 36619, 39344, 20890, 29716, 24854, 25071, 41266, 31730,\n 7566, 14323, 36023, 3999, 50807, 40983, 283, 24002, 1690,\n 7041, 48392, 21681, 28615, 13487, 51863, 5602, 29310, 48548,\n 28115, 30562, 45694, 19793, 40708, 11826, 24636, 16236, 46131,\n 6034, 29569, 25803, 18763, 6320, 12216, 9079, 42557, 13994,\n 15863, 37827, 1973, 30133, 4943, 2691, 33230, 4036, 33159,\n 10911, 5952, 50775, 13179, 20567, 4782, 22660, 48079, 36020,\n 35785, 5391, 29266, 29129, 24524, 25158, 1697, 1843, 50594,\n 46719, 47810, 36075, 48369, 18538, 6097, 2172, 3720, 36295,\n 33705, 21381, 12352, 11229, 50328, 26548, 16686, 4266, 20395,\n 11457, 38312, 24461, 47324, 7859, 17265, 51731, 45131, 29948,\n 25929, 48912, 1223, 49108, 24380, 30821, 9294, 7131, 32562,\n 1117, 33943, 3290, 22879, 4997, 31207, 33703, 23, 25033,\n 47797, 15247, 1826, 31538, 30093, 12076, 7410, 49472, 25356,\n 46526, 14713, 32465, 12590, 37071, 46115, 50958, 40605, 23979,\n 8051, 44851, 45426, 10673, 51878, 21128, 15545, 49847, 34965,\n 32553, 42668, 15476, 21520, 26519, 3240, 20770, 48000, 5483,\n 23558, 46352, 7346, 2022, 23488, 31804, 2067, 29083, 37791,\n 2431, 40597, 32646, 6782, 42446, 15680, 11822, 2790, 37914,\n 47442, 7614, 201, 7646, 32460, 22321, 26813, 8566, 22427,\n 32298, 42022, 44992, 20733, 7634, 19614, 39478, 39388, 26758,\n 11564, 24395, 428, 12671, 34644, 1470, 40167, 20804, 49419,\n 38562, 16096, 44470, 10094, 15465, 38771, 24118, 3178, 94,\n 3878, 44740, 30703, 37692, 11623, 16606, 9741, 39360, 42425,\n 33751, 16521, 47479, 25727, 45706, 9223, 21813, 29015, 20041,\n 15813, 12954, 25385, 692, 43050, 33580, 27657, 33106, 50461,\n 7238, 9366, 35559, 39941, 29441, 13253, 22145, 49506, 21154,\n 23429, 51652, 2668, 5098, 20624, 46996, 46706, 26062, 43227,\n 8185, 39285, 30953, 4567, 21200, 37308, 7241, 26475, 42049,\n 15548, 28600, 24528, 15887, 11899, 2119, 43618, 42959, 30933,\n 14834, 38068, 12542, 7448, 23491, 31621, 27618, 28485, 47706,\n 27633, 27845, 20879, 41853, 32073, 1898, 31041, 1461, 3089,\n 31221, 41915, 37436, 42232, 3512, 47480, 25750, 6220, 19022,\n 5068, 47891, 44374, 31093, 7182, 18110, 24935, 17574, 13172,\n 19347, 46060, 1585, 29027, 18297, 7623, 41240, 30255, 4142,\n 38662, 1612, 23724, 35698, 26705, 29309, 50477, 47623, 24691,\n 3045, 33428, 20798, 35729, 9799, 16387, 39006, 28478, 1939,\n 10628, 49176, 45316, 25825, 19930, 6254, 16998, 51865, 37805,\n 31810, 34850, 13404, 46471, 14424, 33898, 24827, 27391, 44119,\n 41186, 27668, 43388, 45212, 23538, 15132, 4274, 44144, 30615,\n 28417, 12654, 22471, 1910, 18351, 19203, 17349, 41279, 41954,\n 11183, 32850, 39170, 26328, 49687, 50158, 30620, 16726, 38547,\n 14682, 18583, 30317, 32534, 28666, 33938, 5706, 41540, 49339,\n 5123, 37001, 45550, 30108, 50401, 11508, 42208, 49049, 14028,\n 1883, 13541, 4365, 36724, 44106, 20861, 40784, 9661, 15391,\n 23191, 10401, 18737, 50657, 24873]), 'D': array([26446, 45858, 40182, 20057, 54211, 41004, 42798, 37409, 23209,\n 58102, 14868, 28300, 15902, 35966, 3258, 34931, 23099, 34147,\n 55277, 36750, 21686, 6954, 32087, 37653, 42674, 8420, 46481,\n 54591, 44811, 58730, 50749, 21047, 42743, 55892, 23069, 19402,\n 19978, 29223, 43364, 14918, 2854, 54854, 46976, 3038, 11352,\n 35038, 162, 31207, 9872, 14252, 11248, 14903, 52426, 7836,\n 42988, 15936, 4478, 47720, 32472, 30432, 24661, 43217, 9897,\n 46653, 48322, 55260, 21744, 43867, 54420, 19159, 51387, 48082,\n 37604, 17950, 7068, 8394, 27458, 6210, 23005, 58103, 31439,\n 24165, 34548, 36013, 37189, 34605, 31509, 14774, 41289, 55863,\n 41594, 42673, 50629, 56032, 25367, 44588, 33823, 44150, 7374,\n 4688, 37451, 57670, 35561, 54662, 38567, 35732, 58535, 16472,\n 43142, 18623, 34138, 8201, 6560, 38067, 4170, 41442, 54528,\n 46262, 57939, 11660, 35924, 28785, 59671, 34914, 46267, 3409,\n 47462, 12732, 58949, 6256, 48162, 58963, 22602, 9931, 43294,\n 57190, 52349, 54502, 11881, 15821, 22446, 39, 8479, 5878,\n 23092, 27583, 13415, 33060, 55634, 56206, 34983, 31686, 54082,\n 26495, 41963, 31781, 52707, 36719, 27065, 60350, 44145, 22041,\n 13906, 19971, 28244, 28120, 36458, 27167, 977, 57004, 11000,\n 38627, 2268, 59041, 53555, 43008, 30034, 33248, 9465, 3445,\n 55933, 30712, 7189, 51266, 5468, 53246, 23882, 60216, 24012,\n 47341, 26413, 46769, 4216, 31161, 41640, 17376, 7598, 44847,\n 5075, 19101, 323, 36418, 40487, 23444, 7898, 20644, 25115,\n 28174, 57837, 36117, 58157, 1482, 31775, 31245, 56760, 32755,\n 26619, 6108, 40707, 1937, 48720, 12142, 10176, 39084, 48841,\n 16538, 38943, 40301, 59307, 36649, 16192, 41303, 49419, 59331,\n 43249, 9938, 47407, 38562, 58104, 26242, 21425, 30943, 58854,\n 9231, 49518, 9810, 11113, 29814, 17277, 2934, 59045, 11344,\n 8253, 11530, 34261, 470, 4730, 24073, 47286, 39101, 11853,\n 45817, 28236, 14563, 28866, 27400, 26559, 18784, 31289, 35620,\n 10927, 42426, 28710, 52221, 52257, 29320, 16742, 31105, 52757,\n 57000, 54513, 45991, 50026, 40699, 33152, 32808, 56811, 53961,\n 897, 54760, 2162, 2997, 23330, 52553, 51749, 25096, 1771,\n 55142, 20633, 4950, 39798, 27836, 32123, 48612, 30201, 21916,\n 19305, 3466, 38142, 16554, 15282, 17903, 21294, 2445, 16670,\n 15582, 47183, 50578, 54185, 52818, 58491, 15358, 27670, 13347,\n 59265, 29006, 25406, 36655, 30252, 58883, 12924, 38700, 35848,\n 5276, 41878, 23649, 58106, 37394, 32260, 43136, 31641, 56672,\n 21108, 55447, 58583, 36611, 50813, 48115, 32300, 24641, 46607,\n 4888, 23816, 18695, 12468, 17252, 57155, 3291, 26758, 11598,\n 41081, 26546, 50780, 24601, 36864, 37853, 40592, 30649, 60496,\n 55383, 13887, 57669, 37822, 38669, 32872, 5405, 15797, 14999,\n 11947, 60732, 34113, 33133, 41316, 2721, 59714, 9119, 31776,\n 25855, 18513, 26910, 58138, 1904, 8117, 57609, 59597, 29014,\n 55496, 44829, 42841, 41117, 6130, 58377, 27369, 50327, 26326,\n 26111, 27273, 41869, 54252, 31396, 47006, 27216, 31649, 48457,\n 43676, 49563, 47265, 14866, 30353, 52596, 15174, 5094, 9654,\n 18621, 28368, 7772, 16898, 48512, 22700, 1244, 7105, 59373,\n 39698, 3686, 16868, 10921, 8702, 18582, 46100, 31552, 45927,\n 26026, 2752, 35862, 45127, 27645, 25306, 26698, 21722, 37178,\n 5996, 55258, 36136, 57534, 20499, 38640, 46463, 13945, 16164,\n 53597, 52313, 31242, 56467, 18687, 15820, 7331, 50092, 3134,\n 12698, 21625, 60590, 32912, 20902, 34984, 50070, 29168, 36936,\n 47806, 8809, 30259, 49234, 15352, 53, 24728, 31954, 57857,\n 27759, 6892, 27424, 2679, 32605, 2979, 11655, 25687, 44281,\n 57916, 54210, 13099, 36550, 26672, 1372, 51483, 18583, 58113,\n 27386, 40226, 56564, 35972, 37534, 3651, 19643, 36340, 28783,\n 51412, 11004, 32596, 10124, 49761, 36480, 58162, 47770, 49204,\n 15460, 42664, 41707, 31479, 33353, 12707, 4341, 24314, 9435,\n 14240, 59986, 42305, 17291, 21226, 26629, 32048, 3628, 7102,\n 19485, 1316, 9921, 57017, 17110, 50971, 43857, 17329, 26208,\n 10181, 46535, 6085, 15322, 19882, 60283, 25198, 20905, 58991,\n 59549, 3338, 8903, 54852, 50829, 16134, 58792, 14113, 44113,\n 37457, 22442, 39826, 7, 20353, 31806, 29909, 41209, 55825,\n 2505, 49182, 31647, 38409, 48186, 33700, 3535, 22192, 44856,\n 10129, 32910, 26489, 58224, 18409, 55643, 33958, 4579, 53834,\n 9032, 15942, 29097, 53926, 12126, 33582, 46128, 50765, 9892,\n 34904, 57053, 48849, 29291, 54558, 29835, 58717, 42749, 21215,\n 7643, 14633, 24072, 33961, 4761, 52153, 49543, 8946, 45996,\n 25385, 37336, 42108, 11886, 35104, 46921, 37173, 57436, 31405,\n 37295, 57343, 29914, 3685, 44089, 1201, 8509, 57393, 32192,\n 58937, 59022, 21001, 16609, 8116, 14622, 39161, 17320, 862,\n 34932, 42631, 53572, 8147, 28076, 48349, 49098, 34319, 29101,\n 23177, 17083, 57329, 44476, 50170, 27712, 2290, 26125, 32685,\n 57826, 7132, 31379, 54339, 22112, 45352, 53836, 56577, 31673,\n 25013, 8427, 18768, 34912, 58404, 11507, 20304, 33137, 57102,\n 28671, 2951, 49709, 26274, 28332, 4313, 7570, 16854, 45302,\n 10625, 1858, 33687, 7954, 58186, 13464, 31810, 15579, 53223,\n 50404, 47649, 49200, 52763, 23778, 22591, 15674, 10947, 1889,\n 25295, 50653, 7297, 23720, 43305, 7781, 23376, 29577, 6925,\n 3001, 32989, 24491, 57715, 388, 1073, 27792, 12085, 35565,\n 19673, 14273, 23820, 9405, 14450, 15711, 54425, 28470, 19187,\n 27294, 10779, 1799, 54067, 16218, 1611, 32927, 30923, 18797,\n 2800, 3580, 6127, 47647, 34806, 52752, 14887, 51304, 34998,\n 36180, 56273, 3798, 30611, 13946, 52728, 59756, 38369, 13767,\n 1489, 43853, 13304, 2650, 41573, 13563, 16838, 41499, 33200,\n 43734, 15867, 52979, 59917, 42722, 58252, 44929, 45000, 43010,\n 37833, 8449, 24595, 46357, 41847, 52713, 50533, 46362, 171,\n 45338, 18267, 12434, 15223, 14587, 41516, 29989, 49082, 42949,\n 1054, 7890, 55300, 57293, 5916, 40929, 51121, 33448, 33667,\n 22968, 17777, 32144, 12708, 31222, 57735, 37616, 48197, 30320,\n 33743, 4445, 51500, 22850, 44427, 31665, 47351, 14437, 45554,\n 59014, 57060, 9631, 37435, 4178, 55606, 37792, 31347, 23269,\n 33082, 23931, 2288, 52847, 44170, 24287, 21974, 13213, 57208,\n 21775, 21184, 7959, 31797, 32456, 5160, 54725, 35137, 18103,\n 234, 23728, 18450, 7937, 58465, 7382, 923, 59359, 13435,\n 11524, 28672, 30367, 33231, 31496, 53197, 40984, 41471, 42706,\n 33030, 49121, 10228, 12563, 46943, 31303, 47835, 6738, 832,\n 60601, 5202, 60151, 18476, 35962, 42911, 41823, 42885, 2158,\n 9749, 43202, 42038, 25930, 8676, 5281, 55380, 37001, 9914,\n 53628, 49448, 41628, 49971, 15337, 45360, 24931, 53881, 50676,\n 24526, 58669, 26895, 55052, 36615, 38252, 13395, 60455, 14027,\n 44758, 54043, 57882, 58797, 45889, 52195, 57450, 46067, 30364,\n 6494, 53813, 8847, 55333, 47422, 46791, 15967, 45547, 20367,\n 23854, 42800, 43824, 59590, 52053, 51434, 6398, 4519, 16894,\n 58959, 42243, 3688, 51565, 32968, 31569, 14099, 58544, 40227,\n 36755, 1976, 29596, 7593, 41428, 27473, 33057, 34216, 29341,\n 18910, 22579, 12644, 17755, 6721, 27663, 31466, 50262, 45861,\n 24403, 40188, 33022, 1455, 34367, 25185, 9760, 37506, 37386,\n 26598, 32987, 2381, 13003, 26515, 59079, 9412, 47089, 20745,\n 43665, 25199, 11644, 23117, 11241, 34273, 45473, 35846, 14291,\n 41971, 37849, 45735, 19995, 33526, 44699, 27118, 14442, 40389,\n 23526, 45239, 28571, 10655, 28088, 48665, 44692, 46752, 60504,\n 20870, 212, 31548, 11988, 6632, 22015, 21695, 15168, 14422,\n 18030, 29833, 15444, 32141, 12521, 31028, 17243, 25638, 17158,\n 45320, 4969, 9448, 8861, 36468, 1631, 53010, 14464, 44921,\n 8286, 29248, 9096, 15685, 48392, 29007, 26828, 32601, 8341,\n 49573, 8277, 25465, 5518, 27717, 59231, 14527, 30862, 36363,\n 48715, 8400, 10463, 8466, 7693, 54938, 22216, 44912, 10989,\n 10212, 35566, 55228, 32096, 51323, 53473, 32827, 41234, 31020,\n 12193, 52672, 35958, 46327, 45899, 20662, 31904, 43327, 44670,\n 5865, 55049, 23466, 11048, 29331, 35143, 26436, 47477, 49975,\n 19566, 3007, 1836, 56200, 45558, 4960, 23508, 48594, 24406,\n 24634, 42686, 19282, 37742, 36321, 8083, 2387, 15701, 419,\n 54572, 24817, 47837, 22116, 28855, 4205, 39982, 25071, 30119,\n 7621, 761, 8919, 38509, 27066, 29756, 10255, 524, 31029,\n 34719, 23319, 2640, 21521, 9992, 9733, 26591, 29793, 54490,\n 53530, 24229, 53949, 28556, 38556, 26069, 54438, 2091, 45333,\n 50534, 3619, 25288, 30362, 50779, 26786, 51195, 45450, 2432,\n 17389, 43851, 41006, 6232, 19731, 48807, 55566, 26031, 27102,\n 47225, 44831, 2877, 38281, 60143, 14553, 50480, 25201, 9282,\n 16891, 52198, 41096, 30537, 43687, 13627, 40802, 15507, 6786,\n 37794, 10706, 23833, 20912, 30029, 7560, 27269, 43681, 7351,\n 43569, 14465, 8272, 40753, 28563, 57593, 7752, 9453, 13466,\n 15580, 32081, 27595, 50289, 20646, 50603, 26714, 42488, 13742,\n 41980, 9079, 24513, 4082, 15377, 35736, 4420, 29523, 12685,\n 14002, 8258, 7094, 44293, 49330, 4198, 51061, 3915, 22797,\n 31950, 41206, 12453, 23842, 28468, 45514, 22033, 1643, 26856,\n 14070, 29967, 5023, 54446, 7943, 11317, 30873, 43165, 30286,\n 44303, 53587, 36233, 52647, 19502, 14917, 29682, 16659, 18741,\n 845, 3254, 29034, 3335, 16422, 24142, 43709, 39372, 46339,\n 37509, 2226, 34675, 44185, 56447, 5474, 20903, 56433, 53464,\n 33492, 36949, 55463, 32389, 54874, 17955, 25560, 24413, 41392,\n 20732, 50387, 59044, 8584, 775, 26121, 58918, 55108, 30695,\n 38811, 55657, 33429, 18950, 4005, 25639, 32217, 27175, 44592,\n 20930, 36300, 219, 4685, 15663, 30534, 32073, 11016, 4145,\n 38725, 25656, 49021, 56774, 14540, 3438, 11130, 34043, 35048,\n 25768, 2849, 17589, 43596, 48714, 56053, 28269, 8178, 32878,\n 52019, 15762, 17925, 15004, 34305, 49081, 31428, 1012, 34454,\n 53034, 40249, 9700, 5217, 2673, 17326, 47131, 15414, 60676,\n 59012, 7176, 39955, 48206, 30609, 1975, 20723, 20675, 59011,\n 329, 6334, 16299, 28655, 4288, 3249, 26024, 2904, 23240,\n 14831, 44007, 54707, 10959, 50291, 47665, 39282, 3606, 20931,\n 43273, 7879, 48149, 20533, 26839, 29200, 52814, 20359, 24604,\n 39090, 53145, 2546, 22290, 19193, 43802, 27718, 7277, 54176,\n 54166, 57533, 43954, 25730, 2424, 2458, 22210, 51256, 11567,\n 10796, 43944, 44101, 35539, 57576, 56334, 21676, 12964, 27068,\n 20654, 2499, 50486, 45949, 13881, 39016, 49139, 45343, 45161,\n 29987, 58050, 34356, 46218, 9815, 20778, 60144, 15856, 59103,\n 53755, 59250, 15713, 1902, 50157, 6865, 43831, 26033, 52210,\n 44781, 41068, 25015, 44559, 13121, 29642, 49086, 11171, 4937,\n 55342, 59609, 51701, 20077, 26683, 31134, 56338, 28438, 10583,\n 41611, 1639, 22408, 23907, 60650, 54554, 48986, 22678, 9094,\n 23976, 59492, 28868, 45897, 183, 45178, 25340, 50832, 29432,\n 49812, 44357, 38195, 17335, 55720, 45167, 4238, 32725, 50594,\n 54514, 17, 50165, 18733, 54790, 38161, 13736, 46960, 41690,\n 33312, 53497, 40846, 444, 17075, 49656, 36446, 48723, 37704,\n 5030, 41472, 1122, 54933, 2238, 18686, 30982, 35901, 36886,\n 29649, 38300, 44215, 25548, 39907, 38292, 21421, 39417, 37906,\n 59888, 38967, 1295, 43614, 37487, 12059, 19719, 22207, 14156,\n 39612, 31935, 46154, 35776, 54983, 4936, 41637, 56961, 5634,\n 27309, 20027, 56106, 55473, 11093, 29012, 31928, 27611, 12386,\n 40150, 18885, 17510, 1208, 55431, 57217, 27882, 58039, 3648,\n 13474, 19638, 9108, 20722, 24333, 10229, 33746, 57472, 8551,\n 29661, 13651, 17292, 14004, 6798, 48924, 10147, 812, 19437,\n 57014, 6952, 37363, 49864, 12795, 47432, 27847, 14617, 19596,\n 18505, 12993, 26888, 46451, 21541, 1930, 36279, 13060, 38848,\n 43046, 23650, 28918, 3955, 18677, 18714, 10639, 6382, 47505,\n 40828, 12736, 36487, 27488, 20647, 820, 12345, 4983, 42645,\n 12921, 49634, 40545, 49455, 11866, 40748, 36674, 50694, 33047,\n 58338, 24312, 2122, 24610, 51172, 3633, 35544, 9708, 48763,\n 317, 42891, 9546, 22823, 49435, 10451, 33549, 12220, 42352,\n 31709, 44915, 30540, 58225, 34725, 53854, 15798, 47125, 48779,\n 37357, 50053, 44147, 52081, 39296, 26773, 49948, 25834, 17846,\n 13362, 3261, 11735, 37293, 29344, 48329, 26733, 52873, 243,\n 4558, 46155, 28873, 43651, 34734, 14073, 39108, 29576, 55986,\n 29187, 12507, 46329, 16205, 16269, 21206, 56709, 9796, 4870,\n 39937, 45208, 7169, 47416, 41879, 51443, 45909, 26539, 20166,\n 10848, 53784, 43087, 32875, 23913, 18403, 4673, 41780, 47384,\n 18045, 53147, 43778, 10855, 6881, 14932, 31701, 33261, 52298,\n 1097, 11838, 10419, 32614, 59132, 16686, 51284, 47278, 34529,\n 8673, 3207, 32484, 41403, 55591, 34020, 26696, 39200, 38071,\n 37308, 6066, 41512, 8399, 20825, 27487, 2248, 55712, 23806,\n 28753, 35148, 41451, 14091, 42447, 48575, 29511, 47222, 1124,\n 47702, 22557, 13431, 8741, 18448, 7087, 19715, 40078, 35466,\n 27951, 31088, 3498, 25379, 188, 57315, 17294, 14355, 2308,\n 25727, 24171, 55110, 119, 29321, 30095, 28580, 1700, 59864,\n 33008, 43948, 42511, 36762, 52348, 31798, 59749, 45833, 41837,\n 30469, 7426, 9272, 37568, 41318, 60341, 27756, 54051, 50939,\n 9258, 54611, 24022, 27486, 6440, 48129, 9004, 31325, 25343,\n 47059, 49447, 42101, 42355, 1447, 15811, 17059, 2565, 5398,\n 26008, 17676, 7888, 38844, 55523, 21470, 52129, 22961, 58516,\n 44092, 37705, 48646, 16155, 49618, 54278, 14311, 41784, 53139,\n 51335, 48961, 459, 54501, 19768, 19162, 10123, 1220, 24103,\n 1834, 11956, 43490, 43246, 14566, 21465, 14185, 16279, 49929,\n 8022, 56683, 29678, 37239, 21806, 4426, 49147, 44328, 10454,\n 29345, 5048, 18671, 59642, 35336, 46073, 19074, 40266, 8704,\n 28371, 961, 41220, 27179, 29717, 22515, 5956, 6815, 26779,\n 33449, 59877, 48746, 11863, 15409, 27804, 51964, 53429, 34199,\n 6251, 54122, 7576, 16540, 9018, 52181, 2259, 3728, 44072,\n 27296, 53847, 15783, 57800, 22856, 9046, 29560, 38991, 1826,\n 19494, 19261, 40022, 57481, 27280, 24392, 19989, 43632, 25398,\n 15217, 15362, 14756, 22987, 40244, 36555, 4091, 45575, 5668,\n 15144, 49408, 6467, 18494, 58828, 7680, 53131, 60599, 48997,\n 42430, 9226, 56507, 30983, 58751, 18049, 54463, 49620, 8489,\n 42085, 31306, 51459, 41896, 22365, 53492, 21885, 45841, 7516,\n 40041, 44798, 32789, 34533, 58856, 33485, 18056, 4758, 9633,\n 6176, 39046, 49034, 42662, 47921, 10786, 28408, 27793, 57091,\n 38182, 3693, 21831, 14220, 19498, 49931, 49772, 58938, 52158,\n 28547, 18756, 53972, 7456, 24598, 27204, 30238, 25582, 9571,\n 57312, 10793, 55259, 37632, 11526, 3150, 29412, 10513, 20537,\n 24170, 41817, 45480, 28194, 58099, 7878, 34888, 51543, 4253,\n 25433, 56336, 40141, 17085, 37747, 57968, 39738, 40348, 55384,\n 15511, 59176, 27404, 37202, 52836, 25407, 35289, 28344, 38249,\n 30880, 60040, 50890, 52579, 49492, 33894, 15134, 25173, 23727,\n 35197, 39117, 26706, 5885, 45863, 49313, 47524, 14589, 6488,\n 47164, 46595, 43260, 3726, 40669, 16682, 16090, 26509, 31679,\n 619, 45236, 3456, 21363, 9999, 14216, 1215, 14058, 10654,\n 59226, 56126, 31001, 10798, 59334, 45834, 10154, 19527, 38206,\n 33387, 35128, 22376, 27855, 40460, 14190, 25901, 25455, 42945,\n 60296, 34253, 10449, 36064, 30830, 57951, 47956, 55666, 19043,\n 13751, 47289, 51176, 58880, 11642, 56875, 5441, 27641, 26032,\n 48799, 45039, 4677, 53360, 17888, 18212, 40243, 15265, 38623,\n 52060, 37717, 58575, 25151, 17438, 47819, 7980, 45746, 52956,\n 53739, 28029, 17405, 7495, 16630, 13108, 40008, 14119, 11889,\n 24412, 3216, 36124, 18541, 34201, 53558, 24516, 56743, 11921,\n 18991, 56994, 60486, 10854, 53504, 38703, 57249, 42807, 38490,\n 1217, 9816, 9232, 21008, 36533, 21527, 15311, 44402, 43445,\n 34847, 24338, 49049, 7192, 33454, 49528, 39766, 23365, 37063,\n 32104, 24702, 5149, 24951, 20547, 14796, 5134, 16935, 21173,\n 14249, 52177, 44685, 42368, 42857, 35894, 1477, 28589, 21652,\n 36361, 20858, 21759, 8304, 57234, 29982, 24994, 33402, 31359,\n 55546, 763, 47975, 24336, 20102, 59654, 51147, 8935, 58753,\n 28000, 22967, 52726, 9033, 58848, 20258, 15411, 53934, 49663,\n 11430, 55642, 30, 4982, 25644, 32205, 21393, 45512, 33226,\n 9769, 37730, 23356, 45266, 55206, 23964, 37674, 9069, 44681,\n 35872, 10136, 29972, 48313, 22021, 36961, 25910, 5366, 31459,\n 48131, 58199, 16584, 59505, 12114, 34566, 7588, 31867, 36130,\n 18566, 27707, 2046, 13274, 15962, 54720, 51033, 47069, 16734,\n 1562, 585, 45060, 30376, 27587, 18481, 7649, 48084, 1831,\n 27413, 24840, 58155, 9201, 671, 35363, 45394, 4240, 30526,\n 44872, 22128, 47801, 50019, 20819, 46720, 23878, 57664, 51517,\n 29805, 12072, 55275, 28195, 60220, 25762, 11540, 16378, 48324,\n 12769, 54317, 43747, 3800, 21914, 16381, 43413, 44444, 10771,\n 30172, 33504, 30065, 12629, 41144, 15791, 28914, 54845, 11869,\n 35969, 34835, 609, 18465, 47273, 10086, 16260, 11140, 13792,\n 1286, 46048, 10442, 18266, 17411, 5466, 7491, 28990, 12494,\n 26643, 53310, 1603, 28020, 56630, 43389, 29522, 55810, 28124,\n 38942, 34699, 44066, 48982, 7751, 53537, 4035, 53878, 16280,\n 47578, 52764, 28150, 3814, 52717, 25220, 57717, 39981, 42360,\n 45838, 3177, 6785, 23557, 2390, 48653, 43358, 28558, 7217,\n 19916, 39215, 4403, 51791, 36009, 19435, 13480, 993, 19738,\n 23464, 9606, 26412, 10972, 1960, 6404, 27965, 21036, 2242,\n 44979, 18292, 11479, 53088, 9347, 30393, 31198, 33172, 10622,\n 26703, 9521, 51905, 21121, 15476, 38860, 8171, 51097, 12343,\n 39843, 11808, 2462, 14226, 33388, 51091, 30390, 32352, 56651,\n 15356, 54050, 27767, 35706, 60320, 18471, 24543, 5820, 14769,\n 50837, 36904, 3349, 56086, 22306, 23097, 52311, 32309, 3629,\n 27604, 27658, 713, 27846, 7876, 52018, 46324, 56834, 40503,\n 31564, 8082, 18638, 14116, 24274, 35529, 35270, 41619, 9012,\n 22601, 15218, 36632, 20605, 50517, 21687, 13074, 38899, 16626,\n 59175, 16952, 29375, 55026, 47934, 51231, 35914, 37398, 7965,\n 12351, 39750, 2293, 57366, 22819, 42796, 11865, 36039, 23535,\n 40991, 40602, 57414, 31566, 21818, 44294, 68, 8282, 4506,\n 44127, 33053, 45724, 3037, 8468, 24206, 20004, 30063, 55483,\n 686, 43420, 25274, 9966, 59372, 59163, 37783, 49300, 28635,\n 37536, 53405, 42916, 11099, 23379, 50821, 3710, 40, 10232,\n 52457, 47708, 25392, 59136, 8461, 56658, 11611, 14993, 43673,\n 41561, 38082, 19250, 19267, 1289, 36720, 39788, 59335, 39885,\n 9386, 538, 56156, 34348, 8086, 46750, 40821, 2830, 50072,\n 10285, 59194, 25995, 43158, 37735, 57724, 10217, 44468, 25871,\n 25961, 58019, 30786, 51182, 52590, 24806, 49293, 58999, 35249,\n 31129, 28910, 10514, 41356, 30541, 36259, 42538, 16449, 56069,\n 51761, 43609, 56567, 18283, 54835, 46615, 31498, 41252, 55641,\n 27439, 29184, 14583, 60331, 40003, 41387, 34919, 44474, 1127,\n 57561, 2130, 1281, 8168, 38498, 57996, 56538, 25154, 14352,\n 51548, 22348, 11311, 9295, 57294, 55880, 13716, 46454, 28105,\n 46083, 33438, 22648, 55577, 41380, 59745, 21630, 50793, 36640,\n 52704, 35402, 23879, 5080, 35673, 44199, 29114, 7232, 29981,\n 58689, 1334, 21096, 46575, 16677, 6810, 31953, 23961, 58233,\n 30853, 44648, 45115, 42168, 47403, 16101, 11051, 1008, 13696,\n 3867, 39951, 44182, 22511, 2832, 16981, 50270, 34309, 57688,\n 6607, 14299, 7556, 2156, 46963, 5362, 2384, 8188, 14988,\n 8801, 54464, 19717, 12518, 13458, 60150, 41675, 18947, 28902,\n 17464, 60625, 928, 19091, 513, 26654, 55432, 54418, 9509,\n 56730, 54702, 12042, 51771, 16022, 40454, 24679, 37559, 39185,\n 12027, 13323, 50522, 55176, 20661, 9149, 28717, 25647, 37005,\n 397, 46213, 13617, 36107, 15489, 31966, 12905, 39351, 18522,\n 19264, 49768, 10643, 60093, 59774, 29262, 40554, 8687, 26342,\n 21216, 5744, 27805, 41901, 12638, 60530, 49941, 59933, 5854,\n 9724, 1483, 7901, 51535, 43838, 40740, 3644, 10963, 34508,\n 58574, 24972, 134, 60276, 20325, 35170, 10687, 49972, 11627,\n 11794, 26975, 25876, 43502, 3121, 21825, 45094, 25909, 16623,\n 8776, 15773, 42895, 40840, 46268, 38205, 7538, 32610, 22001,\n 31963, 1582, 55931, 51734, 58260, 60224, 15788, 55788, 51429,\n 21170, 55762, 36068, 27429, 29241, 48301, 17387, 19342, 35508,\n 2938, 32395, 11738, 5450, 13582, 59261, 21409, 24818, 45702,\n 26988, 25257, 27497, 49232, 41532, 43016, 24186, 45761, 46326,\n 30413, 57517, 14725, 51344, 30671, 22170, 49470, 39740, 55197,\n 18974, 23350, 37002, 20129, 10856, 51893, 37352, 20139, 27802,\n 55801, 19314, 31417, 22569, 38472, 19201, 22361, 25188, 37358,\n 13442, 26796, 57880, 41160, 16240, 38370, 9008, 21244, 1099,\n 21145, 59478, 7616, 32493, 38983, 5311, 34802, 691, 38451,\n 35457, 51639, 42654, 60514, 5976, 7345, 59720, 11631, 32564,\n 22685, 22801, 13463, 57868, 49673, 59383, 56636, 54357, 17070,\n 30836, 38717, 7678, 60524, 36789, 4050, 9436, 56911, 9078,\n 47109, 54014, 27734, 22222, 13244, 52120, 60028, 52718, 47538,\n 41323, 58756, 16086, 55817, 3924, 20792, 21568, 25375, 47530,\n 9292, 11639, 2620, 56396, 50830, 49861, 35266, 178, 53117,\n 37599, 41449, 390, 43552, 47727, 54992, 54564, 12154, 29595,\n 33487, 529, 56714, 20791, 7698, 40746, 12331, 13491, 49148,\n 24437, 48053, 10550, 22354, 9013, 37155, 4176, 57382, 13382,\n 16121, 22407, 52263, 11717, 43009, 11740, 58624, 19374, 21743,\n 3014, 41792, 47369, 56685, 20810, 45279, 47214, 47919, 51739,\n 36019, 50883, 32852, 22427, 49556, 10995, 43794, 47773, 55123,\n 37459, 53917, 51115, 18984, 40090, 15366, 33092, 16745, 58606,\n 24288, 9552, 15434, 49988, 43805, 15266, 40767, 57782, 32603,\n 41058, 44641, 7714, 9705, 5984, 20341, 58121, 881, 7989,\n 44756, 52222, 24565, 2042, 25756, 28520, 21754, 15922, 57266,\n 43314, 26126, 54046, 25602, 42630, 9026, 38458, 56215, 37426,\n 33801, 45272, 45130, 30495, 60361, 55532, 22715, 14023, 19239,\n 24106, 5002, 34186, 38599, 164, 27143, 54359, 27338, 33719,\n 39122, 21075, 45465, 4683, 47760, 32409, 30103, 7107, 38518,\n 44481, 18593, 40573, 29038, 34117, 6300, 50232, 546, 55101,\n 31675, 4696, 40094, 2449, 24375, 42020, 15298, 28237, 52882,\n 17249, 33123, 13084, 1516, 7037, 29934, 51233, 19515, 49458,\n 52854, 33174, 32078, 26946, 31983, 3839, 55684, 21314, 38675,\n 34222, 3304, 24785, 30800, 23551, 1583, 32492, 23838, 51504,\n 27941, 22668, 49516, 6520, 44422, 11728, 34410, 44131, 10478,\n 10386, 25294, 20588, 57895, 51196, 49322, 2346, 42485, 37125,\n 30904, 58434, 6862, 11009, 44536, 42670, 41973, 30523, 14995,\n 58327, 32889, 42208, 41850, 34571, 47104, 21899, 39073, 45873,\n 42145, 29608, 17671, 21179, 59723, 58522, 22160, 9919, 40410,\n 50701, 9444, 40383, 56064, 49011, 24525, 47981, 49715, 1827,\n 34469, 27683, 42403, 33071, 33347, 34106, 4832, 32740, 12439,\n 47615, 8054, 24898, 33886, 50732, 15994, 9305, 50781, 56860,\n 14315, 3442, 20562, 44818, 38785, 49376, 9687, 50902, 26391,\n 11028, 13757, 16394, 5155, 33386, 13753, 39864, 17948, 12246,\n 6370, 19562, 6319, 52280, 661, 9132, 60494, 31196, 26862,\n 49298, 7335, 18603, 47896, 3183, 40251, 49763, 50287, 21162,\n 47574, 14703, 28407, 25888, 17304, 49183, 6693, 18534, 25971,\n 34772, 55223, 42867, 1052, 29142, 21624, 19615, 46521, 1265,\n 43564, 49438, 3050, 43767, 31959, 1064, 31232, 6403, 52438,\n 51668, 30691, 25085, 59830, 7414, 32379, 51043, 34604, 2858,\n 270, 28711, 27557, 53655, 59028, 53158, 14663, 46315, 27676,\n 13871, 33586, 18802, 35080, 29713, 18753, 25122, 39169, 5800,\n 41427, 27471, 42416, 27983, 25097, 4287, 20526, 29384, 2155,\n 1672, 5229, 39564, 31525, 54095, 17749, 42546, 45092, 51239,\n 17935, 56600, 46118, 41531, 50171, 18704, 58800, 16874, 76,\n 29022, 46832, 26566, 55948, 49350, 11012, 57188, 6420, 33920,\n 33292, 15262, 10712, 41016, 26599, 14937, 13743, 14202, 54740,\n 10713, 44796, 41542, 40637, 48675, 4689, 22871, 56091, 56560,\n 42776, 1711, 57497, 26955, 51613, 31945, 19986, 23729, 30489,\n 4252, 43456, 8744, 54581, 41658, 9619, 55347, 7727, 8454,\n 60166, 3759, 47386, 59701, 45217, 36335, 28830, 27945, 20170,\n 5705, 19120, 55059, 52592, 14874, 17901, 2674, 6870, 32174,\n 5175, 25043, 15464, 2494, 29770, 3565, 22498, 17082, 20806,\n 13626, 16102, 25528, 47868, 51326, 56131, 44823, 22282, 2035,\n 56866, 20230, 14098, 34038, 22711, 40776, 6988, 4951, 31365,\n 13507, 10925, 46080, 57617, 29637, 3306, 3664, 15357, 1854,\n 2885, 5279, 29146, 28890, 14789, 44229, 18281, 46722, 22200,\n 25277, 38324, 24179, 27087, 54517, 21489, 47644, 5139, 9858,\n 33333, 22571, 53039, 35755, 32874, 26576, 20509, 59895, 3625,\n 29645, 58413, 29339, 28917, 45837, 21095, 45679, 60022, 5219,\n 23661, 15394, 31090, 8791, 32439, 505, 56869, 23584, 28153,\n 27812, 35641, 33963, 4262, 22013, 6502, 53549, 41618, 38386,\n 33415, 28279, 52586, 37341, 17293, 3402, 41201, 42159, 48033,\n 6777, 4052, 59330, 59507, 15595, 27025, 40663, 43643, 14420,\n 19381, 4990, 56285, 48038, 41193, 60657, 35820, 12510, 11342,\n 43597, 1665, 44187, 59271, 51931, 4369, 45584, 4285, 39018,\n 52527, 35612, 31483, 54262, 8632, 58427, 28949, 42392, 4196,\n 48952, 40380, 31838, 34715, 48568, 60417, 29278, 3913, 27738,\n 55295, 59487, 48827, 1888, 34598, 13210, 50554, 26814, 10198,\n 17605, 38461, 4770, 54831, 56172, 46023, 52745, 49130, 13306,\n 48901, 43982, 48829, 60572, 27789, 8518, 52014, 25606, 55746,\n 58909, 24131, 1502, 15972, 42061, 38877, 48366, 37896, 27304,\n 1355, 47087, 30831, 43576, 29700, 51036, 56543, 31896, 14072,\n 49894, 20310, 25880, 40087, 8245, 15861, 43486, 22377, 33500,\n 24129, 14160, 1058, 44495, 58864, 10598, 40199, 16321, 5288,\n 1740, 13577, 30909, 42023, 5344, 28229, 3073, 16262, 33624,\n 24949, 55172, 56108, 54112, 50531, 50936, 7434, 42098, 60048,\n 54653, 23448, 28700, 10787, 25919, 53391, 32982, 34792, 36624,\n 36220, 37412, 51775, 39246, 52366, 25828, 5731, 46314, 10227,\n 55771, 34176, 19934, 49108, 48468, 46046, 36158, 39505, 35748,\n 4260, 39905, 7038, 43284, 43750, 14704, 5881, 7048, 13525,\n 31244, 24342, 29694, 35877, 32420, 50125, 40934, 12075, 38903,\n 9614, 6569, 57822, 54880, 17179, 1797, 14403, 34489, 5837,\n 14518, 492, 31186, 13752, 58819, 22995, 52950, 57546, 46188,\n 14682, 30682, 14726, 12892, 50395, 35302, 24211, 49197, 43073,\n 5879, 24457, 1872, 16597, 39466, 37171, 18585, 6781, 58125,\n 53974, 7910, 10101, 43988, 38795, 54601, 20721, 3449, 44654,\n 36802, 47566, 18515, 34938, 53794, 24896, 56727, 23155, 19983,\n 38209, 36983, 30849, 30984, 58222, 46200, 27870, 41536, 21733,\n 49110, 31377, 25576, 59949, 9562, 41133, 7090, 38743, 56924,\n 26329, 44433, 6531, 48293, 55032, 21165, 31264, 28453, 12661,\n 16189, 50115, 38093, 37070, 20417, 38884, 36690, 52345, 17791,\n 31074, 55063, 18629, 57816, 1332, 54459, 33808, 1312, 11128,\n 861, 58235, 210, 56096, 29656, 19154, 20493, 60032, 33907,\n 35970, 663, 42192, 56820, 48336, 18104, 22509, 48139, 47621,\n 57385, 18789, 39913, 5108, 18119, 44102, 51669, 50731, 8674,\n 8565, 55308, 32393, 479, 9799, 40589, 25529, 13976, 46384,\n 16083, 30965, 4029, 60563, 5438, 14706, 7318, 23036, 54234,\n 7492, 51356, 45748, 19078, 34990, 37733, 17829, 31440, 28788,\n 40654, 38365, 46895, 57559, 18054, 53990, 54031, 14133, 26921,\n 50827, 27282, 48331, 29100, 33530, 5946, 30425, 42832, 35277,\n 43397, 34430, 44212, 13635, 42133, 12577, 1021, 45787, 34276,\n 1329, 60024, 30154, 53892, 55159, 13374, 17947, 17114, 53909,\n 20206, 50782, 25700, 47876, 50741, 17461, 20389, 45689, 10440,\n 54896, 29632, 9039, 52118, 58584, 27105, 32885, 52357, 63,\n 59967, 4433, 47260, 57544, 2193, 42276, 41370, 56313, 39462,\n 1348, 27691, 26690, 42972, 55866, 28527, 42762, 43198, 32482,\n 37510, 49423, 31371, 58594, 60703, 42601, 3914, 39345, 51753,\n 2771, 21415, 8100, 32080, 1814, 38932, 38353, 51419, 43855,\n 15432, 23455, 41490, 9591, 35906, 18599, 23233, 44035, 48344,\n 39521, 58334, 43090, 20821, 36587, 39348, 36512, 6507, 43708,\n 32299, 57214, 10821, 23592, 2987, 18040, 15169, 24107, 28037,\n 4583, 51977, 35162, 45767, 8602, 18729, 10316, 41708, 54485,\n 31063, 7137, 46624, 8275, 57741, 35268, 42632, 29528, 48836,\n 47003, 55607, 22020, 19216, 38104, 27498, 49958, 8067, 4133,\n 15404, 38184, 17457, 30482, 17996, 29237, 3569, 25904, 48890,\n 22705, 18145, 51894, 54365, 6263, 47693, 7712, 14406, 32557,\n 26190, 30307, 10762, 27665, 44865, 56188, 55563, 15348, 22651,\n 94, 51592, 58320, 3147, 17683, 5156, 37306, 56254, 30509,\n 36486, 7063, 22965, 54708, 42660, 22418, 39730, 56318, 37980,\n 17632, 12458, 44478, 18304, 40205, 45189, 47606, 46099, 8499,\n 52352, 57331, 13048, 9519, 44515, 9017, 24849, 18279, 22071,\n 45910, 28948, 41268, 59618, 42373, 32999, 52364, 32866, 38194,\n 32951, 7726, 49949, 6096, 23191, 7761, 25468, 48625, 1498,\n 15655, 54994, 55055, 6391, 48689, 38582, 47100, 51883, 2043,\n 8418, 3754, 37476, 11819, 39829, 48812, 46246, 2948, 19236,\n 17987, 27223, 57374, 19324, 12312, 36206, 53076, 10929, 19660,\n 39453, 60521, 48846, 31561, 53513, 24050, 59047, 53381, 10305,\n 1565, 47029, 49677, 41053, 44765, 7496, 15605, 12806, 35722,\n 20137, 14389, 355, 6979, 14739, 28792, 54989, 8460, 36399,\n 7150, 26818, 30985, 11278, 33561, 48514, 9788, 56253, 20688,\n 15865, 27373, 49705, 54489, 46594, 49771, 28701, 15161, 41114,\n 19796, 57130, 9492, 35560, 32706, 14712, 41972, 8324, 22593,\n 19831, 30009, 12866, 43074, 57578, 7218, 50770, 44100, 22224,\n 18537, 52648, 47677, 23047, 41123, 39874, 7994, 57134, 60293,\n 58215, 54477, 57703, 49965, 29397, 30887, 7171, 47250, 11480,\n 17638, 50691, 15224, 38309, 13860, 29165, 35207, 40308, 3289,\n 13417, 17546, 30241, 48193, 41199, 29521, 25098, 48170, 55845,\n 38854, 51350, 28790, 54037, 39871, 26291, 54310, 1929, 34100,\n 13090, 58859, 19150, 11356, 15171, 17867, 54562, 51698, 11470,\n 48117, 11040, 35279, 21020, 33315, 46638, 48330, 17364, 32646,\n 56349, 7463, 23276, 53916, 49172, 36873, 54616, 18787, 9536,\n 5927, 15648, 59858, 36245, 32219, 25181, 15183, 19814, 31614,\n 59567, 31819, 19105, 35485, 33716, 25578, 9051, 16221, 19490,\n 28845, 35610, 30071, 48446, 56615, 52271, 36435, 9308, 27761,\n 13372, 20781, 6753, 38267, 57254, 3140, 3714, 14296, 34975,\n 21199, 40075, 8545, 55499, 59587, 27442, 9932, 9035, 52685,\n 42520, 10737, 23410, 14717, 1951, 34900, 12999, 39816, 33348,\n 24325, 3735, 43713, 54922, 53210, 21817, 8529, 6322, 11230,\n 2052, 46649, 24764, 32263, 10326, 22617, 52888, 14541, 59860,\n 1671, 17235, 9682, 26741, 12570, 34337, 4041, 22709, 44513,\n 5657, 14574, 2740, 14613, 28860, 44005, 18467, 59509, 45738,\n 42144, 33945, 41864, 41072, 60458, 28658, 2244, 19663, 31133,\n 37428, 41007, 27596, 47022, 15830, 4031, 24971, 5264, 35177,\n 35223, 37518, 15714, 27888, 39229, 10592, 12411, 23779, 38837,\n 37911, 19191, 44614, 31872, 44171, 6261, 2104, 2621, 48348,\n 48220, 36209, 36619, 19958, 35438, 13622, 6166, 9472, 13231,\n 50957, 12535, 39068, 49718, 1655, 20932, 40719, 28880, 56660,\n 33613, 41335, 19670, 50173, 20913, 59771, 3142, 41505, 15878,\n 22830, 39952, 24195, 6802, 30625, 31774, 37135, 2076, 59885,\n 16881, 31595, 49406, 43423, 29834, 18271, 29513, 161, 4449,\n 32119, 3570, 17629, 27899, 35708, 56855, 28862, 49211, 16556,\n 59052, 11074, 8519, 8964, 49816, 54135, 17919, 10563, 12572,\n 38923, 30510, 23637, 42539, 58237, 40412, 34151, 21514, 1737,\n 59821, 43769, 23751, 33951, 32896, 5051, 24378, 18491, 58140,\n 43254, 46266, 3263, 9363, 42434, 24141, 29635, 21739, 44018,\n 53948, 33097, 6710, 8149, 12056, 37180, 3030, 37103, 50809,\n 58869, 10192, 30546, 4741, 42752, 45234, 632, 40709, 58916,\n 42591, 36734, 60228, 11426, 22716, 12094, 45201, 13833, 12173,\n 34477, 17210, 12590, 13730, 30002, 22889, 25428, 15101, 7387,\n 20831, 51992, 14571, 16334, 16740, 43870, 22099, 54909, 21708,\n 8986, 32687, 28766, 31992, 42051, 42176, 50811, 56515, 52462,\n 36453, 59493, 41418, 4125, 51528, 42659, 51616, 2720, 56570,\n 55081, 2132, 56539, 1202, 4979, 39896, 59901, 49646, 48742,\n 19265, 31420, 17038, 33938, 11003, 52750, 34911, 37979, 44264,\n 36575, 55671, 24701, 20229, 56956, 24363, 13421, 34878, 16006,\n 22127, 43263, 11425, 24928, 23590, 8025, 26248, 42165, 28513,\n 22394, 34758, 15631, 46633, 52919, 49141, 42267, 46121, 22487,\n 20069, 19421, 52008, 51405, 33, 11882, 35836, 25660, 43627,\n 59356, 5943, 29371, 44108, 40660, 5519, 10720, 34625, 59938,\n 8767, 8989, 54203, 43911, 34064, 60547, 53207, 28417, 54893,\n 38275, 26096, 54015, 31687, 33038, 35519, 47557, 28426, 18233,\n 36879, 14783, 49085, 58790, 5619, 28136, 11213, 7441, 42012,\n 5300, 12084, 28836, 36454, 47113, 39143, 26809, 44465, 38563,\n 36022, 41799, 45216, 46429, 11547, 16329, 22794, 21079, 59883,\n 49465, 26692, 11594, 12344, 41672, 25066, 36478, 7254, 35936,\n 5550, 44060, 12672, 49674, 26450, 11449, 9106, 60163, 21135,\n 23199, 6055, 30578, 54417, 35301, 34440, 20349, 54072, 45621,\n 24856, 43792, 13907, 36916, 33310, 32862, 3540, 58510, 60719,\n 23756, 59557, 47751, 11967, 46688, 49775, 8548, 2806, 5055,\n 32556, 43786, 20329, 4374, 16649, 42786, 10200, 30370, 16753,\n 29572, 26345, 41444, 53832, 48064, 14986, 51877, 6928, 6059,\n 7983, 38285, 34168, 56787, 52033, 49272, 10765, 56893, 27088,\n 16470, 48851, 35396, 3425, 28501, 53431, 34593, 15710, 33810,\n 12809, 25635, 13510, 7911, 32390, 29329, 10455, 13449, 55169,\n 15757, 47800, 1570, 25269, 59439, 43674, 35238, 36650, 3395,\n 51173, 36819, 59627, 32549, 45520, 12061, 23038, 9615, 57980,\n 22948, 6169, 13236, 17562, 16096, 18008, 23437, 6553, 56051,\n 52880, 3181, 13229, 34080, 43970, 24639, 8348, 53857, 16564,\n 55506, 24488, 57695, 57689, 21556, 13701, 20430, 22171, 26972,\n 29709, 5449, 39732, 12610, 41180, 6511, 17422, 58422, 42764,\n 49593, 31540, 37325, 59146, 4550, 24547, 37570, 37271, 40362,\n 57169, 7958, 20225, 59914, 52874, 6418, 56546, 35092, 10301,\n 40681, 21039, 38029, 40004, 50415, 3074, 23298, 32710, 12127,\n 36950, 60141, 27695, 38805, 38380, 45811, 45119, 10649, 31340,\n 20670, 46033, 20459, 39165, 22697, 13136, 13923, 45715, 28878,\n 34163, 57608, 55709, 29739, 2590, 19142, 4656, 39930, 19707,\n 31689, 13028, 13093, 60377, 54397, 47176, 46840, 56715, 50915,\n 47633, 2534, 33341, 11008, 45219, 10187, 7490, 7554, 30988,\n 56475, 12568, 20118, 12135, 39712, 51681, 13041, 47802, 5419,\n 41660, 19238, 45158, 34019, 57667, 40037, 28452, 4727, 22266,\n 38662, 19721, 47280, 45407, 10296, 20747, 29147, 54351, 60628,\n 29944, 40104, 45727, 35803, 13924, 24611, 2746, 38054, 6044,\n 56591, 35541, 4487, 10184, 55581, 18769, 50906, 47686, 25752,\n 25817, 40977, 28250, 59788, 20548, 4023, 15012, 27056, 5733,\n 18341, 34111, 34711, 35271, 24760, 45022, 11319, 35174, 40603,\n 22332, 48104, 3872, 8830, 11111, 55920, 51567, 24384, 40801,\n 9355, 34325, 53174, 48340, 26331, 58193, 54679, 34740, 59426,\n 19222, 24567, 35419, 48221, 3154, 44598, 25899, 18107, 9771,\n 59978, 43222, 58405, 52541, 674, 57052, 5322, 18118, 48762,\n 21724, 47709, 58919, 54428, 48308, 16487, 44344, 16110, 922,\n 30208, 42444, 8014, 42119, 57258, 3329, 47245, 17940, 33110,\n 55472, 32764, 49055, 41312, 38761, 38439, 39156, 31238, 9061,\n 53114, 18334, 18670, 44068, 44047, 10526, 3367, 22912, 33706,\n 22369, 11283, 32122, 24467, 57740, 685, 42747, 3156, 3911,\n 40171, 50112, 53277, 16944, 43539, 60280, 5600, 21549, 51168,\n 17113, 23135, 6818, 59395, 25755, 46325, 41430, 51510, 45619,\n 52612, 27376, 26844, 55539, 56444, 35445, 41435, 27537, 12804,\n 2100, 56574, 35944, 6456, 17622, 48543, 39132, 34402, 1966,\n 59694, 6994, 21765, 30623, 27023, 20544, 7921, 60516, 44978,\n 49795, 16313, 11306, 51527, 12475, 28394, 49950, 40039, 12949,\n 30138, 27730, 10426, 24187, 88, 38535, 43182, 36928, 302,\n 15215, 39486, 28406, 33116, 8093, 39318, 23714, 28841, 41328,\n 42770, 1016, 7266, 53964, 26369, 51020, 21139, 51317, 38580,\n 16118, 21773, 19068, 18400, 46869, 33052, 35968, 803, 37040,\n 31367, 2147, 37533, 46387, 32565, 45495, 40218, 57345, 57646,\n 20466, 40515, 9844, 55230, 59213, 6897, 21657, 17963, 34378,\n 35034, 54116, 33079, 30299, 4964, 39186, 47734, 54937, 47533,\n 42307, 32042, 3287, 37955, 26889, 29489, 37879, 13410, 20346,\n 45891, 340, 51967, 17541, 44615, 7096, 36232, 29763, 11761,\n 35436, 14550, 28833, 3593, 7820, 48787, 2722, 36334, 27694,\n 44733, 15708, 60440, 41450, 8626]), 'G': array([64474, 67587, 702, 21820, 58172, 30188, 3878, 52264, 19439,\n 51389, 59984, 26763, 62935, 63792, 59709, 64364, 73831, 36988,\n 73252, 28785, 31036, 31433, 46239, 6803, 7804, 50340, 59902,\n 72112, 25061, 18256, 29732, 501, 39640, 70012, 44972, 33249,\n 73299, 5677, 26347, 779, 16259, 28031, 62967, 49268, 3858,\n 71747, 27187, 58682, 3543, 47823, 73680, 74313, 17684, 68135,\n 56423, 60871, 71437, 51491, 26806, 41657, 30686, 61332, 63083,\n 28666, 46157, 63451, 59735, 40909, 65848, 316, 17152, 29519,\n 13156, 3591, 37728, 19080, 34485, 34528, 6410, 71047, 55444,\n 72655, 670, 19230, 33150, 17934, 53263, 40018, 63379, 48702,\n 29991, 36888, 35927, 29213, 28835, 69697, 60412, 69466, 24515,\n 24253, 15977, 11917, 69252, 2113, 64632, 31556, 12881, 10504,\n 2772, 56420, 61364, 38161, 12575, 47476, 59772, 12341, 56804,\n 43580, 25170, 11486, 586, 14017, 8315, 32928, 31676, 32442,\n 11869, 11905, 25167, 53786, 20382, 14743, 10047, 9331, 45332,\n 12591, 57949, 26539, 22506, 48883, 21577, 40043, 74495, 67129,\n 29718, 45975, 37686, 28985, 5766, 62607, 42712, 20819, 3958,\n 73171, 24874, 26513, 2001, 7452, 35118, 15782, 27795, 11829,\n 7739, 50916, 71121, 15442, 29120, 28073, 44079, 35919, 71622,\n 74347, 62886, 24066, 55432, 5215, 72320, 74407, 50789, 46937,\n 27178, 45076, 40830, 71650, 42930, 71588, 58492, 41488, 1103,\n 46053, 28394, 39779, 47029, 12432, 13126, 31530, 55175, 1484,\n 63076, 28310, 34738, 25356, 14357, 13013, 61601, 64088, 26173,\n 1403, 55402, 21294, 34786, 41180, 29413, 6615, 15620, 55018,\n 8375, 8277, 61183, 20615, 74278, 59847, 52335, 30140, 22591,\n 48480, 31419, 14278, 45145, 64740, 44813, 4073, 31852, 54514,\n 71061, 19839, 4451, 38822, 29218, 20862, 51429, 26452, 56321,\n 12028, 24384, 17514, 4056, 35614, 27165, 51205, 63827, 18921,\n 48743, 41094, 18255, 17994, 7031, 51197, 55660, 57685, 47011,\n 3966, 17060, 15216, 17793, 57361, 26975, 65797, 10412, 1801,\n 32944, 27502, 63992, 8265, 72669, 24453, 48, 57618, 64983,\n 51606, 71689, 10738, 50401, 16760, 38832, 20162, 56875, 23430,\n 25517, 37177, 68182, 31981, 55977, 22464, 60674, 42570, 8095,\n 45554, 23834, 16859, 18539, 729, 23831, 37416, 55637, 51608,\n 64063, 70022, 53328, 69544, 35956, 22214, 35025, 35975, 3675,\n 8734, 71123, 69000, 32862, 13401, 7068, 55039, 22273, 61861,\n 31816, 8478, 73317, 20022, 31624, 52920, 44855, 6990, 48968,\n 45543, 34933, 44024, 63276, 65621, 27762, 34375, 31990, 1708,\n 11243, 17591, 35814, 28375, 15455, 30299, 54305, 43821, 53630,\n 57400, 48178, 4925, 1057, 14700, 3566, 51093, 19983, 13732,\n 12456, 24505, 33739, 15380, 12370, 23124, 28999, 34332, 47478,\n 7843, 55253, 44071, 44954, 70424, 62416, 15047, 38683, 59276,\n 5339, 60970, 51192, 66733, 44844, 22239, 15319, 61967, 12465,\n 63476, 57933, 47627, 43417, 41173, 8340, 93, 50923, 33960,\n 14553, 28987, 20697, 56386, 21315, 36518, 68197, 14773, 14063,\n 29721, 55902, 39323, 60679, 68541, 71629, 22947, 37996, 29182,\n 4822, 26096, 15637, 44298, 62378, 73223, 49412, 50610, 38241,\n 71957, 42663, 44795, 35835, 67998, 59243, 74638, 53089, 60912,\n 53117, 48803, 46529, 43924, 11113, 2184, 30297, 46620, 42756,\n 45771, 16204, 48073, 30153, 44601, 44189, 36294, 32556, 23526,\n 67155, 23274, 26580, 61285, 4209, 3523, 60535, 38670, 35263,\n 65345, 1784, 70239, 50286, 32131, 66930, 2543, 64118, 42467,\n 8061, 53254, 12654, 67319, 26567, 26012, 39511, 573, 39218,\n 61613, 11701, 14521, 23336, 16476, 12734, 58248, 40040, 17084,\n 5814, 13042, 2604, 53866, 5967, 26565, 32252, 55560, 52059,\n 70386, 71390, 48848, 45530, 66349, 4349, 39393, 34819, 22827,\n 56857, 57961, 41526, 3866, 62548, 48129, 24330, 51184, 7950,\n 12578, 23427, 11821, 4784, 47590, 41406, 16616, 72607, 28500,\n 39641, 55979, 6564, 73827, 33620, 41691, 17829, 37059, 66742,\n 69308, 3224, 63428, 6913, 47091, 36710, 15708, 70631, 3754,\n 52494, 11919, 41470, 58047, 48402, 28311, 61463, 7495, 43334,\n 5988, 55767, 26897, 66305, 11042, 8490, 61201, 12690, 9954,\n 6922, 27532, 69981, 71530, 68484, 3734, 2985, 15663, 10107,\n 70502, 60776, 60305, 66660, 69589, 7204, 48960, 36984, 45700,\n 55861, 30113, 62396, 62120, 36644, 68640, 11966, 65105, 53808,\n 58203, 37959, 28165, 9843, 24015, 8400, 42082, 52139, 56366,\n 65507, 70703, 6081, 26018, 9821, 56897, 20832, 50560, 63713,\n 65020, 15294, 15847, 52727, 16572, 24439, 72362, 69175, 60414,\n 35100, 13874, 44983, 60504, 49208, 44613, 6243, 40067, 47357,\n 54912, 56920, 8916, 30784, 71636, 49599, 26957, 21220, 38732,\n 25581, 28485, 51669, 26902, 16520, 36510, 33769, 4592, 6706,\n 67693, 5590, 27493, 33233, 8776, 67158, 2445, 64304, 39337,\n 55291, 38727, 62703, 22794, 2762, 73116, 54372, 9984, 35319,\n 31188, 1670, 32986, 61860, 10967, 31727, 11722, 43993, 57912,\n 38863, 4461, 5557, 31728, 32915, 29039, 60558, 4055, 40314,\n 19699, 27369, 6559, 37995, 69456, 3414, 72227, 45668, 27585,\n 23662, 8825, 64287, 35341, 3454, 72360, 39342, 20165, 57511,\n 32448, 58434, 59123, 52181, 33210, 12896, 930, 9672, 13122,\n 12417, 9045, 61631, 8446, 45174, 37565, 15272, 55546, 25549,\n 22958, 44677, 66803, 55748, 44482, 29305, 18150, 843, 31061,\n 10327, 53220, 42783, 18331, 19250, 73913, 50903, 28226, 16137,\n 47910, 1906, 25303, 24016, 44766, 21174, 4006, 47905, 17388,\n 55288, 68727, 46429, 35002, 74233, 37501, 31608, 54730, 1856,\n 49598, 40823, 4427, 32918, 53054, 4150, 62483, 36728, 54310,\n 51979, 55222, 15621, 44350, 59505, 66492, 10575, 58199, 68108,\n 28689, 70561, 2833, 64914, 58844, 20621, 37089, 23812, 26489,\n 73871, 38743, 54082, 69077, 3049, 48329, 29727, 46713, 56705,\n 10361, 42469, 30916, 16464, 8269, 18990, 54506, 29951, 66881,\n 1284, 32240, 16905, 24406, 1240, 5209, 39177, 72582, 68352,\n 3319, 53113, 10227, 49093, 52155, 1530, 48216, 18306, 68353,\n 54284, 69030, 70886, 43395, 30466, 63861, 61858, 23275, 11615,\n 7901, 30341, 63624, 19430, 9734, 25546, 65028, 69751, 30580,\n 68941, 36498, 10397, 42985, 53059, 71929, 29348, 31069, 33472,\n 23436, 52231, 33023, 28717, 54258, 18881, 43464, 69066, 65311,\n 28090, 7954, 59020, 45172, 54475, 56850, 17857, 68750, 45949,\n 42517, 21071, 30515, 5450, 47647, 14378, 53196, 49606, 45918,\n 5075, 17845, 18656, 40255, 33741, 4011, 20076, 74693, 21874,\n 74378, 221, 45471, 63873, 20073, 48330, 67232, 70224, 18363,\n 45159, 32660, 62135, 34813, 47913, 34092, 52805, 38855, 70296,\n 11771, 49513, 71, 44858, 48247, 18679, 68028, 2006, 37161,\n 24155, 16466, 55186, 67181, 13303, 65489, 29813, 72967, 4256,\n 48699, 73123, 34275, 14406, 62696, 1202, 8157, 60965, 37517,\n 58797, 6173, 70198, 7604, 74572, 15061, 45689, 4353, 30925,\n 71877, 27561, 27122, 26616, 2800, 15775, 41074, 3388, 67913,\n 10901, 5025, 31547, 41566, 31700, 6771, 32860, 55004, 71452,\n 16023, 72107, 49471, 59126, 2030, 68400, 63718, 39621, 73148,\n 42775, 7528, 34614, 29383, 17056, 36850, 16328, 41633, 10127,\n 31880, 58058, 71240, 73523, 44984, 8913, 34270, 52289, 18792,\n 32830, 73610, 379, 70152, 1684, 28561, 41282, 39422, 24961,\n 23787, 30056, 29319, 7577, 17666, 8933, 9179, 71510, 58267,\n 20326, 13741, 19862, 67950, 68721, 17784, 14393, 41877, 43755,\n 41179, 1008, 58527, 320, 38869, 404, 36542, 49969, 68678,\n 5053, 34324, 2248, 52416, 261, 49113, 53204, 74622, 3191,\n 29687, 54428, 1975, 68001, 2876, 24774, 13890, 62289, 8718,\n 9529, 22937, 47985, 57657, 28636, 19336, 44199, 58228, 15561,\n 18123, 5395, 74286, 17153, 25336, 33569, 71407, 51498, 63471,\n 2052, 60436, 2750, 47575, 40486, 49811, 48063, 36453, 70210,\n 57756, 40931, 59824, 12384, 13283, 30562, 2767, 71874, 4161,\n 37890, 72546, 27264, 59792, 68389, 13898, 16487, 44740, 66984,\n 16042, 63335, 18404, 48307, 17911, 52036, 60783, 63740, 15639,\n 74114, 65246, 65400, 10441, 2671, 47245, 37649, 73418, 8994,\n 15842, 15399, 25177, 33529, 15589, 61808, 29407, 24822, 11287,\n 1255, 37072, 30600, 13646, 31049, 46441, 52970, 56023, 64301,\n 21583, 56372, 36151, 58166, 16692, 9856, 27409, 15141, 46986,\n 16876, 13554, 22813, 27936, 16333, 49920, 46412, 38010, 73362,\n 48274, 27335, 29582, 2727, 21905, 48898, 18862, 28919, 30290,\n 37813, 8373, 58151, 70092, 22440, 53679, 23313, 20420, 28280,\n 2419, 30149, 27744, 7424, 62232, 43658, 50582, 30874, 12867,\n 52298, 61072, 37087, 41757, 30204, 13991, 71215, 39288, 39599,\n 27962, 24706, 31854, 47650, 35962, 40450, 53426, 47728, 15947,\n 64829, 12557, 49635, 13879, 65608, 15009, 71561, 63988, 49685,\n 59603, 63115, 7098, 21355, 62925, 44450, 27250, 26979, 5362,\n 31104, 37531, 17199, 26243, 40500, 69398, 19090, 58976, 11704,\n 28436, 23594, 63021, 17619, 34360, 50102, 20666, 50494, 25969,\n 10314, 6506, 46693, 6214, 48982, 17858, 51326, 11471, 14825,\n 65592, 55166, 48416, 15368, 69774, 12044, 42762, 23571, 38632,\n 23976, 22137, 19860, 35214, 6652, 34482, 48182, 54680, 751,\n 10946, 51163, 28020, 724, 37601, 27994, 49943, 59640, 58377,\n 54842, 15076, 69822, 6223, 53132, 47748, 17048, 325, 56674,\n 17733, 67420, 52790, 71783, 60627, 47481, 25816, 63566, 40525,\n 68285, 32298, 42246, 55301, 45498, 1597, 29535, 6889, 8403,\n 33978, 44185, 17757, 16705, 66513, 43743, 30215, 5578, 16802,\n 5199, 71003, 72142, 35564, 74124, 49439, 22841, 24434, 5595,\n 67659, 29925, 7252, 5708, 20990, 58458, 41144, 70737, 4419,\n 36158, 64992, 41615, 196, 48076, 64540, 73860, 8626, 14277,\n 40125, 27318, 59229, 38627, 13173, 31985, 20606, 58670, 46226,\n 67965, 30952, 10269, 22087, 1887, 67362, 47021, 39889, 19239,\n 33907, 38096, 69286, 45291, 64955, 66426, 3398, 32229, 54413,\n 30863, 10180, 34843, 44065, 19718, 8618, 13467, 34432, 1844,\n 63552, 5197, 47600, 34506, 29671, 64675, 4761, 69951, 29087,\n 21419, 67617, 30722, 26773, 8270, 62558, 39741, 59632, 8091,\n 6665, 565, 30479, 30909, 69675, 6659, 30270, 70606, 62485,\n 3855, 70884, 31769, 57914, 42252, 71374, 9889, 21754, 63817,\n 10052, 59857, 35404, 15048, 35395, 53383, 48715, 26222, 9994,\n 30410, 7918, 57049, 13589, 63910, 45582, 45171, 55323, 43680,\n 25028, 49988, 60314, 57574, 40812, 32160, 48864, 66448, 31174,\n 66528, 60155, 50080, 16236, 40512, 38880, 10831, 69988, 34763,\n 13333, 13975, 56425, 52518, 58194, 64186, 61403, 70531, 30142,\n 5188, 18461, 18015, 70685, 65956, 27482, 58767, 55115, 26762,\n 46291, 74208, 21859, 47084, 24847, 9148, 47669, 28521, 3154,\n 60521, 9102, 72033, 6499, 34563, 48922, 66639, 21808, 6794,\n 3331, 62085, 23928, 28975, 35415, 33915, 29275, 17139, 22100,\n 12424, 50147, 64175, 71166, 54431, 46873, 24827, 48542, 58576,\n 61432, 41793, 1060, 65493, 12982, 39867, 64603, 24493, 72700,\n 58779, 32585, 46507, 27027, 12107, 67860, 18506, 9300, 44840,\n 7522, 47801, 38444, 2899, 26317, 49639, 67572, 26220, 73472,\n 11450, 40938, 72557, 13390, 19293, 45628, 6500, 50787, 29142,\n 12308, 36115, 70251, 42924, 54358, 48950, 32163, 12306, 38694,\n 32071, 64730, 74575, 33386, 15584, 49918, 22133, 55024, 8067,\n 54799, 45327, 48976, 49234, 15942, 7648, 31864, 25626, 6731,\n 25162, 48305, 64160, 65644, 18114, 71843, 36272, 16777, 51748,\n 33827, 7947, 58391, 60311, 35713, 14945, 20208, 15087, 16681,\n 12153, 55481, 15295, 67868, 22143, 827, 8334, 45382, 28516,\n 920, 40095, 73033, 68693, 58219, 73789, 48029, 67891, 19541,\n 35745, 19562, 47778, 17651, 4463, 13686, 43468, 51026, 57806,\n 1468, 24750, 33515, 50317, 65136, 30773, 13059, 36230, 42946,\n 32267, 36060, 39275, 6126, 6463, 57798, 17843, 35029, 64853,\n 27540, 57579, 54364, 23559, 24035, 35456, 56692, 63370, 21912,\n 31004, 7621, 44511, 20144, 19391, 11741, 65008, 67470, 57965,\n 26247, 18227, 74573, 36030, 59416, 43712, 49824, 34987, 41152,\n 3160, 63846, 1547, 52866, 68027, 56573, 856, 21285, 47435,\n 28502, 57025, 67689, 45928, 52173, 63790, 5443, 45770, 33792,\n 24987, 948, 67323, 2732, 57621, 1599, 56307, 52355, 1951,\n 55259, 1024, 68333, 23840, 1235, 35780, 44757, 15739, 38231,\n 31008, 20463, 24640, 24932, 3940, 7720, 69377, 7560, 66545,\n 24708, 20115, 17467, 51479, 7583, 54752, 33959, 59582, 44778,\n 56212, 58586, 53716, 15474, 18335, 45533, 26413, 70216, 52974,\n 21267, 591, 8332, 73344, 57773, 12268, 4572, 10455, 70080,\n 66300, 41286, 72621, 35508, 29343, 37194, 684, 15608, 4042,\n 49078, 61819, 65046, 70711, 10904, 240, 6397, 20648, 51896,\n 55577, 1545, 52098, 65958, 53034, 23536, 64564, 45990, 763,\n 23669, 58843, 46716, 29759, 24355, 51089, 42952, 67113, 39725,\n 30747, 57141, 43662, 29174, 41225, 8898, 27310, 25257, 68714,\n 61091, 45598, 33870, 18593, 30966, 10185, 65263, 13232, 10040,\n 12955, 28430, 48017, 42717, 38257, 4038, 19927, 14887, 53651,\n 25448, 22149, 68127, 33355, 2440, 44333, 65755, 31572, 3461,\n 22683, 33066, 40192, 33504, 65492, 35221, 33126, 36269, 232,\n 28939, 11715, 31663, 43859, 37425, 40185, 41054, 74506, 32557,\n 20698, 67624, 56133, 13391, 64615, 53745, 15321, 4895, 37678,\n 19298, 23243, 65233, 37556, 4610, 66975, 52841, 32359, 44156,\n 45954, 53153, 71980, 3738, 25344, 27404, 73815, 74723, 2881,\n 6228, 12117, 64303, 70805, 74565, 46447, 43974, 6164, 34809,\n 13112, 24876, 805, 64322, 9464, 41886, 73331, 35258, 40780,\n 17786, 50082, 16810, 34926, 48896, 55205, 31617, 32477, 65361,\n 29287, 15670, 21385, 44322, 68452, 14303, 2724, 12572, 21692,\n 12469, 17138, 48546, 37848, 9443, 73521, 2949, 27665, 48614,\n 43392, 36759, 43222, 36031, 68104, 11614, 14994, 2527, 5363,\n 32352, 44366, 45826, 50935, 69500, 68530, 58765, 18868, 72330,\n 17259, 5784, 32806, 35045, 56778, 55573, 16756, 2582, 72162,\n 31742, 33367, 52257, 64760, 31817, 33483, 30454, 36635, 11686,\n 10621, 72786, 31146, 3356, 72988, 4829, 67938, 30683, 24367,\n 64436, 51173, 53337, 66040, 26450, 185, 25265, 51914, 37096,\n 17287, 24089, 40317, 3814, 65151, 25754, 17569, 23670, 29381,\n 53234, 54201, 52191, 66414, 73787, 11354, 39401, 33170, 73357,\n 53468, 40659, 65729, 1741, 4659, 61396, 883, 73487, 18821,\n 15232, 21669, 381, 14958, 29016, 72379, 46357, 69260, 69301,\n 6216, 32804, 48822, 69360, 31503, 21356, 41546, 14645, 61989,\n 9420, 41401, 35101, 63249, 67017, 2063, 61218, 65343, 71786,\n 3836, 6583, 22347, 38233, 57189, 12483, 2453, 8737, 42169,\n 74823, 20279, 53552, 23813, 4699, 51587, 36127, 65882, 3667,\n 5924, 6805, 46260, 27733, 8430, 50999, 42575, 8565, 66045,\n 16875, 40068, 51650, 23386, 36265, 33038, 53597, 23034, 46001,\n 35538, 60093, 35271, 31237, 52597, 15095, 27995, 11333, 1839,\n 39018, 1323, 68379, 50488, 31780, 20164, 13506, 825, 56152,\n 74097, 22747, 40728, 36931, 43389, 39444, 62041, 43954, 45117,\n 24629, 68668, 29175, 62557, 66533, 15183, 53377, 45126, 36224,\n 38508, 36170, 58368, 44314, 54925, 29169, 36449, 30464, 46704,\n 50014, 70221, 44657, 58978, 19931, 59710, 23762, 70333, 49614,\n 3908, 73595, 61362, 61824, 31056, 66424, 28081, 14826, 46331,\n 601, 26193, 72460, 2925, 72533, 23014, 53250, 70645, 227,\n 72453, 2297, 6527, 49901, 13184, 56715, 45067, 40034, 19289,\n 5821, 27389, 61641, 16983, 48217, 50123, 8817, 8474, 53692,\n 6898, 40358, 17280, 36312, 73342, 74154, 54581, 15462, 60130,\n 72760, 13872, 48733, 74698, 71341, 70828, 70634, 55758, 22926,\n 6363, 35623, 74735, 38355, 56615, 69699, 13426, 16053, 17333,\n 12898, 51663, 37228, 16152, 60898, 54684, 3255, 73941, 23455,\n 29848, 67969, 20839, 57882, 38174, 8859, 3162, 46444, 35167,\n 31324, 5577, 36122, 45210, 53230, 27405, 32402, 71607, 73635,\n 70272, 32578, 3243, 32026, 2766, 68125, 62071, 18443, 53448,\n 34860, 18629, 48009, 13292, 43467, 41363, 27184, 71314, 67567,\n 39671, 59116, 52972, 13579, 68306, 62111, 37882, 42194, 38664,\n 68053, 65421, 17678, 48231, 44077, 74323, 30560, 18690, 32147,\n 46449, 43905, 63376, 6921, 22687, 36964, 4960, 18518, 30203,\n 4278, 36059, 68953, 43936, 4, 69351, 10366, 31046, 70979,\n 37276, 58241, 55334, 73674, 7091, 10256, 30292, 26688, 1683,\n 4978, 22882, 40557, 37102, 69326, 29724, 45112, 12379, 36033,\n 31751, 20088, 50088, 65894, 41190, 8833, 31437, 69440, 38572,\n 10537, 45021, 31186, 22801, 37132, 44206, 33576, 65393, 22322,\n 56413, 19980, 4310, 36389, 12309, 6859, 49277, 18196, 5200,\n 42623, 26388, 45896, 20749, 33295, 28844, 3292, 24217, 17104,\n 57765, 50387, 19488, 73641, 28222, 52577, 28380, 49881, 23235,\n 17556, 53401, 11679, 14005, 36450, 24805, 66059, 59499, 30403,\n 52654, 9927, 22297, 53901, 54367, 26284, 45525, 60242, 6193,\n 41267, 26971, 52640, 32529, 49066, 20360, 36596, 52786, 40433,\n 14050, 24236, 48851, 74101, 50228, 53133, 52482, 17461, 18362,\n 2585, 60680, 54865, 13375, 66636, 69851, 60732, 4980, 38169,\n 62346, 34976, 8151, 13595, 65330, 38541, 15511, 72178, 67296,\n 32498, 19717, 34830, 27001, 6310, 71426, 39749, 13916, 17968,\n 59695, 10735, 10922, 57713, 29543, 28924, 37482, 57851, 34386,\n 70914, 39232, 4064, 70598, 44584, 24770, 9770, 35946, 13770,\n 40996, 15857, 16868, 26958, 12036, 63426, 49393, 40837, 3547,\n 74451, 9295, 63732, 60053, 12397, 54913, 29382, 21205, 37467,\n 67434, 53465, 32356, 21047, 18334, 68937, 57268, 54014, 6377,\n 7193, 47390, 57946, 71612, 18800, 41629, 43124, 73904, 40161,\n 36720, 74465, 44090, 42996, 65468, 57836, 7968, 31327, 49270,\n 20430, 44167, 42070, 5294, 66996, 84, 57272, 53687, 32540,\n 17196, 26374, 59922, 14271, 60612, 61173, 54794, 12230, 36641,\n 64958, 19045, 15473, 22645, 48525, 5099, 14183, 32162, 23918,\n 50455, 69216, 66939, 54720, 11255, 6098, 61891, 23407, 64895,\n 36787, 33821, 23660, 72492, 8787, 39239, 20869, 5229, 45813,\n 58024, 74007, 57788, 29291, 43263, 44132, 14768, 8991, 34419,\n 19346, 17218, 40013, 59517, 54678, 43593, 68891, 44223, 67954,\n 45293, 67731, 14894, 64816, 74354, 8130, 71674, 64398, 66032,\n 55911, 34168, 3616, 39178, 19075, 16216, 14037, 24399, 16682,\n 40219, 13629, 58975, 71886, 40784, 37498, 4695, 32755, 41297,\n 29781, 30090, 16627, 3387, 62534, 71883, 18561, 46491, 54747,\n 18209, 21074, 74232, 1153, 44506, 45848, 21665, 16934, 59435,\n 70201, 51898, 26544, 42703, 40341, 73476, 73550, 34897, 59885,\n 73850, 64389, 16134, 43031, 15707, 32560, 2736, 35705, 55537,\n 35630, 6903, 759, 53476, 14184, 32174, 13757, 25578, 2580,\n 64500, 54120, 67792, 40656, 65748, 38034, 4487, 4103, 50691,\n 1548, 59147, 14642, 22151, 2168, 60590, 10070, 32311, 71512,\n 33371, 7295, 68239, 56668, 39844, 51527, 45396, 54731, 55381,\n 64585, 53812, 68509, 8010, 67110, 6319, 47053, 61431, 68759,\n 14421, 9834, 16836, 4440, 29180, 25663, 67019, 11468, 37756,\n 65382, 65210, 70182, 30156, 55936, 53572, 3837, 9701, 65716,\n 50804, 16416, 42863, 26651, 25101, 50060, 29162, 18354, 61406,\n 1016, 33251, 27195, 4767, 4513, 18820, 54553, 40115, 54282,\n 55391, 68791, 64544, 39943, 52372, 48474, 18165, 27447, 42965,\n 5382, 69568, 57826, 29588, 73863, 25701, 36271, 4946, 67015,\n 62068, 39587, 48013, 7598, 58913, 28462, 67513, 42195, 12416,\n 35424, 68591, 29077, 7858, 5312, 12145, 3421, 67139, 12610,\n 52143, 27141, 63633, 15010, 73824, 11406, 56518, 26219, 67176,\n 16406, 32758, 62855, 27604, 46318, 46632, 12236, 56817, 46388,\n 44953, 41372, 49421, 27299, 11481, 61614, 28719, 21241, 34678,\n 27916, 24823, 38701, 58858, 23545, 10590, 1471, 36069, 4032,\n 49534, 32816, 63344, 66673, 41392, 38449, 69805, 31206, 32781,\n 57980, 53984, 40498, 37986, 52565, 37863, 3240, 9280, 10580,\n 11052, 21313, 54410, 6878, 4852, 6096, 57984, 28778, 49147,\n 15563, 62670, 22978, 31214, 19150, 18344, 64333, 50880, 31013,\n 40384, 17119, 43460, 49834, 40092, 63834, 1098, 17701, 36036,\n 6316, 5301, 14441, 27656, 57659, 35668, 12300, 21343, 15332,\n 60730, 68828, 18093, 51313, 34785, 59059, 43888, 16737, 23792,\n 16459, 39845, 30844, 37525, 67424, 35727, 47787, 23164, 38301,\n 51309, 53409, 70878, 65549, 52121, 54089, 39190, 4850, 27567,\n 42960, 21012, 3073, 61990, 18388, 14857, 65801, 58799, 1860,\n 36420, 6819, 22595, 16895, 27290, 61178, 64689, 10129, 10808,\n 19493, 22909, 39886, 57351, 20624, 71550, 30112, 40199, 57927,\n 63763, 40237, 67596, 40758, 22226, 64811, 34000, 61917, 62082,\n 13165, 13588, 53513, 45233, 44937, 3577, 65433, 49463, 58146,\n 64503, 28896, 10957, 37319, 69285, 22765, 26912, 18834, 2147,\n 57959, 18523, 47218, 57588, 7554, 54189, 25899, 18672, 10610,\n 58071, 35007, 7135, 50609, 40286, 67814, 69328, 36190, 3812,\n 56391, 14561, 46745, 39902, 43252, 48377, 3259, 41422, 36316,\n 66376, 11238, 52619, 10155, 866, 36050, 67001, 61388, 37062,\n 40429, 15449, 42198, 67204, 35573, 67099, 43196, 47500, 66596,\n 7336, 45040, 24148, 54825, 8624, 21079, 41534, 57129, 52221,\n 40343, 33186, 6255, 7603, 46564, 48640, 12470, 21831, 21434,\n 25351, 26241, 46319, 18923, 53414, 70881, 30244, 2232, 18380,\n 46362, 22783, 21210, 7916, 23566, 3975, 27854, 53709, 52316,\n 39859, 3285, 7198, 400, 69999, 16956, 18481, 66814, 74718,\n 52980, 24440, 41163, 51574, 27900, 67169, 40172, 36186, 49555,\n 24981, 5694, 46165, 71862, 64392, 71019, 46932, 37511, 64925,\n 1324, 67905, 55925, 9816, 22461, 48675, 55701, 29569, 51259,\n 59815, 39895, 4499, 48261, 70376, 68738, 30254, 13602, 54560,\n 69806, 11946, 70488, 46367, 60689, 1349, 33138, 10741, 23232,\n 6050, 34352, 39925, 823, 68312, 53124, 34501, 17716, 51841,\n 14718, 33842, 2300, 60930, 41862, 58469, 13471, 70458, 6980,\n 33307, 50566, 27231, 53149, 8251, 2467, 29021, 35709, 70356,\n 30796, 73872, 56631, 26327, 71221, 43103, 61012, 30721, 57498,\n 11430, 44847, 65003, 58752, 15932, 13533, 23179, 44108, 36686,\n 43608, 26110, 11586, 44218, 7250, 40713, 17557, 2368, 44387,\n 53954, 15721, 11119, 40798, 48951, 37647, 50212, 29080, 46594,\n 3021, 8960, 66692, 21985, 51061, 16267, 8867, 16260, 41447,\n 59951, 2643, 50660, 59848, 54498, 28329, 3709, 74249, 59203,\n 55011, 10938, 70963, 36398, 37729, 51808, 25251, 72083, 14626,\n 26445, 14891, 61642, 51612, 46384, 5792, 69141, 40167, 52083,\n 25470, 772, 10292, 9053, 34265, 4745, 71580, 5771, 52757,\n 30614, 30816, 72603, 34666, 25439, 38281, 51637, 51757, 8054,\n 69369, 74316, 61983, 4544, 41769, 32409, 45740, 39937, 24267,\n 10159, 55948, 10337, 27778, 9092, 28127, 33071, 4653, 12488,\n 25742, 13220, 62489, 4910, 18553, 24868, 39374, 9688, 19429,\n 71235, 72258, 73380, 5082, 44832, 38134, 2855, 27521, 67260,\n 15533, 73105, 4307, 41713, 20418, 24294, 55123, 53083, 59986,\n 1227, 61367, 59554, 3673, 5966, 44722, 54165, 38238, 46596,\n 8822, 71658, 74218, 37354, 21730, 63139, 64493, 50300, 16943,\n 11740, 23573, 61289, 72050, 20362, 38362, 53547, 57522, 2361,\n 16343, 39166, 13953, 72828, 29802, 38040, 73709, 45843, 8846,\n 4476, 62898, 606, 16282, 59155, 67267, 59175, 13102, 73911,\n 68496, 15677, 22702, 27457, 74668, 24454, 35739, 67201, 58330,\n 41265, 60795, 13475, 5609, 2610, 161, 21641, 29988, 22232,\n 13474, 13338, 45763, 14405, 32502, 8427, 36387, 6355, 27341,\n 59321, 21378, 11727, 60525, 40636, 15483, 71895, 13169, 25325,\n 3449, 37090, 4949, 33689, 62092, 41431, 71188, 37290, 63292,\n 53181, 56916, 31787, 22755, 31799, 19675, 17204, 26202, 42778,\n 12270, 71664, 3304, 70744, 66610, 42143, 69889, 68746, 56830,\n 58306, 42945, 46471, 32204, 71336, 31982, 6418, 70640, 49859,\n 26090, 68332, 14498, 68242, 64915, 41394, 74253, 70748, 7305,\n 67710, 46514, 63216, 14881, 49037, 51237, 56672, 36804, 30787,\n 63958, 33132, 95, 33995, 21486, 11895, 38872, 35432, 7480,\n 35254, 73757, 54243, 16976, 7338, 72900, 42639, 1142, 34399,\n 5355, 16297, 59101, 42400, 3793, 27429, 15810, 56103, 25859,\n 47155, 25946, 30246, 11364, 45842, 27252, 32482, 40983, 11358,\n 38118, 64805, 64171, 6366, 29600, 44128, 64667, 53550, 24145,\n 60379, 69401, 44486, 58066, 11778, 8143, 46654, 23359, 2200,\n 25553, 71887, 71534, 40444, 7809, 41273, 30304, 21853, 19851,\n 68417, 42014, 64199, 39038, 16680, 23409, 2757, 29725, 60490,\n 50930, 57136, 784, 21408, 21748, 61376, 61645, 3176, 53423,\n 3363, 22842, 15979, 51203, 10061, 35583, 64200, 61780, 58888,\n 26387, 37726, 32631, 10080, 7540, 37712, 32401, 15096, 72070,\n 57831, 60483, 35447, 69196, 50606, 11466, 48228, 49767, 18617,\n 36409, 4660, 52928, 29572, 2218, 4899, 70578, 35646, 14655,\n 5909, 69736, 70901, 23301, 44006, 18586, 11010, 6155, 48163,\n 15138, 22738, 10157, 14322, 40222, 35390, 21593, 23535, 46888,\n 44652, 19116, 52684, 32400, 745, 10460, 70324, 29981, 71385,\n 67095, 19209, 35815, 38935, 65059, 6582, 69089, 45389, 1666,\n 26120, 25255, 49982, 43014, 36784, 8919, 16293, 17796, 2470,\n 4860, 48356, 46583, 37803, 54942, 32158, 38628, 15267, 43458,\n 20392, 19713, 7438, 43572, 55860, 14935, 53064, 46336, 20143,\n 25151, 8358, 15695, 54178, 22539, 44152, 17641, 27952, 12039,\n 4840, 43473, 25441, 15792, 14810, 14409, 9647, 21550, 27659,\n 69587, 245, 61209, 19865, 10023, 34575, 36047, 19729, 65871,\n 33420, 67992, 19253, 7623, 44622, 73516, 18477, 62998, 72037,\n 52937, 300, 44866, 9694, 7658, 53833, 30218, 6546, 67166,\n 71663, 8351, 25760, 59377, 17252, 64257, 35492, 1827, 16321,\n 33184, 8250, 54462, 5779, 14383, 52767, 448, 70549, 44716,\n 9776, 14128, 15614, 59845, 68814, 22264, 33676, 63667, 3749,\n 14931, 40615, 20412, 69180, 3443, 41046, 32177, 9075, 45724,\n 52371, 7996, 25809, 73765, 43150, 61437, 37019, 43906, 37006,\n 35816, 16213, 42581, 1490, 15668, 42217, 73089, 21681, 26770,\n 23525, 65256, 1885, 13000, 3506, 8905, 23990, 48943, 7546,\n 55357, 57135, 2799, 7692, 14054, 16839, 33433, 5844, 48641,\n 64819, 5877, 60652, 60816, 35682, 16490, 9182, 12865, 58800,\n 10270, 13995, 30977, 50553, 19036, 21370, 56905, 26032, 59374,\n 31926, 8661, 28152, 42253, 9143, 28070, 57128, 50198, 12675,\n 67489, 24837, 67118, 68137, 31449, 38577, 32065, 13838, 7139,\n 16166, 15175, 31273, 71307, 28581, 2294, 67078, 41752, 7636,\n 33133, 68658, 52882, 66016, 19017, 25180, 54031, 54615, 59617,\n 68481, 10680, 38188, 27802, 38944, 54662, 25950, 62945, 43597,\n 72101, 13623, 37632, 65687, 64382, 34894, 46659, 33742, 30170,\n 53028, 14312, 10076, 13772, 43250, 41782, 420, 68672, 3828,\n 39405, 28454, 12803, 54405, 69171, 67345, 69459, 26005, 68679,\n 47294, 54304, 56522, 16458, 32064, 32539, 4062, 15281, 27809,\n 64966, 53027, 34414, 19310, 41245, 63069, 51566, 21129, 36048,\n 64195, 26460, 49159, 38886, 35306, 28738, 23805, 15886, 27869,\n 30693, 38269, 52961, 8189, 36113, 17147, 4350, 35953, 56428,\n 14814, 54217, 57470, 41699, 22927, 17011, 64005, 60492, 25751,\n 2781, 59836, 52547, 58179, 67202, 11834, 45160, 50205, 19685,\n 28829, 54570, 54997, 39678, 55914, 56596, 15116, 63613, 33680,\n 3767, 3302, 60474, 70516, 56650, 59362, 14729, 12413, 16852,\n 35660, 29125, 34280, 31628, 9134, 41804, 26020, 68313, 72853,\n 48827, 71018, 34760, 60026, 72893, 73870, 23346, 23587, 3851,\n 13403, 28187, 11418, 52541, 50778, 13948, 18928, 25110, 42173,\n 62621, 63833, 19994, 5344, 71768, 49488, 61270, 30082, 3401,\n 41945, 53226, 45644, 24715, 28451, 34185, 71770, 45283, 65478,\n 16258, 23951, 58130, 45793, 32096, 9407, 63592, 1157, 1452,\n 45479, 54868, 2102, 39579, 3100, 18301, 12493, 59037, 29013,\n 38876, 4372, 21421, 70592, 17292, 23021, 15184, 3984, 19920,\n 25926, 44989, 26335, 26273, 4341, 56848, 3560, 72691, 14198,\n 4297, 7539, 22026, 11815, 55964, 67308, 60741, 960, 28928,\n 57870, 62627, 36834, 25279, 27442, 70068, 28989, 30880, 31468,\n 32403, 21673, 21328, 657, 31791, 70119, 50351, 29919, 72777,\n 60166, 74351, 72036, 41429, 71290, 11180, 57181, 38343, 74374,\n 63230, 2449, 53009, 51379, 15763, 37627, 11192, 5839, 44371,\n 12586, 45381, 54238, 13829, 17242, 4377, 871, 29456, 56260,\n 4734, 60631, 65805, 14860, 4434, 52717, 35116, 62474, 13560,\n 66166, 60211, 24486, 39712, 26269, 35236, 17616, 8146, 59916,\n 48482, 70513, 72469, 1475, 5958, 36747, 57355, 44439, 1952,\n 70235, 51728, 8514, 3034, 35593, 27630, 49097, 9813, 44851,\n 1397, 72106, 34917, 56314, 55017, 11069, 24792, 71890, 17006,\n 1022, 53585, 58573, 74624, 49022, 53235, 68677, 59055, 49436,\n 39082, 47567, 11153, 74772, 26236, 11331, 31277, 45841, 15901,\n 2432, 49039, 38313, 68114, 33044, 52297, 54698, 30167, 49900,\n 1793, 14897, 55090, 10843, 68603, 43515, 74302, 68221, 2091,\n 67800, 5036, 26300, 33467, 49085, 15921, 23905, 8665, 19753,\n 12041, 30772, 50448, 57335, 71903, 9137, 46282, 59801, 3955,\n 40963, 28296, 31850, 74358, 73909, 15240, 69149, 51383, 23592,\n 68290, 29133, 38587, 20061, 31469, 10395, 28496, 11594, 14091,\n 16532, 3609, 16315, 34132, 16300, 34991, 34887, 3468, 66706,\n 9124, 11085, 58652, 50634, 62124, 43462, 12870, 53721, 48881,\n 47291, 30821, 49790, 11107, 48915, 66384, 27036, 38152, 33714,\n 52088, 7325, 34343, 59067, 66849, 55814, 31993, 47925, 48334,\n 30610, 49904, 46950, 21651, 19569, 54236, 39443, 3766, 959,\n 37639, 3205, 33923, 36627, 29403, 43340, 37200, 47209, 53885,\n 37191, 68083, 33565, 25951, 31015, 32203, 23865, 2114, 37028,\n 63548, 6303, 30251, 27494, 18466, 4383, 73093, 63078, 71597,\n 26062, 2293, 36032, 73631, 30081, 25977, 23521, 69054, 69327,\n 58161, 32047, 15644, 24728, 65261, 55136, 57981, 50860, 16730,\n 66893, 72080, 1859, 17953, 38114, 6785, 43317, 25173, 37907,\n 67845, 50280, 44893, 40978, 28185, 19950, 8790, 29214, 974,\n 7216, 29705, 65658, 48583, 34555, 12239, 21400, 50867, 13281,\n 47482, 72908, 53002, 40682, 3842, 12808, 8045, 29685, 64504,\n 2274, 66640, 58263, 3660, 28583, 47371, 26296, 40717, 67564,\n 24950, 68651, 65547, 1697, 37351, 51160, 23906, 17373, 54983,\n 27602, 60235, 28184, 21330, 58517, 14151, 20051, 27720, 46506,\n 26896, 34400, 14803, 70654, 56932, 2409, 27590, 32780, 63632,\n 25465, 14199, 37720, 4393, 63775, 12970, 46067, 51410, 13707,\n 864, 40406, 41721, 60868, 46183, 15145, 68507, 66479, 73926,\n 42614, 58973, 36582, 5765, 64004, 71344, 9513, 2157, 29752,\n 22664, 10328, 60137, 26874, 73202, 4439, 160, 22122, 66993,\n 34038, 66815, 66237, 43902, 9765, 6723, 17563, 44054, 26772,\n 14847, 61533, 34258, 56877, 1581, 25629, 25595, 44444, 63538,\n 52451, 22883, 58537, 44309, 43494, 57627, 27305, 32367, 36800,\n 64497, 54426, 16724, 65498, 72997, 38389, 54554, 13656, 1266,\n 67096, 21249, 32072, 36519, 65653, 3268, 49730, 14367, 42417,\n 37921, 12669, 55060, 15640, 12995, 55601, 65303, 3735, 18844,\n 20227, 42333, 30011, 52142, 14218, 55002, 29355, 66309, 70553,\n 30698, 15249, 71935, 12587, 32992, 62465, 57069, 11093, 20055,\n 13166, 11005, 25917, 7221, 34307, 29027, 41661, 4078, 41148,\n 6481, 23793, 7575, 4359, 7788, 36189, 33450, 23743, 11291,\n 2111, 44443, 24548, 9244, 16037, 69140, 50978, 50758, 69971,\n 37590, 44107, 1350, 53937, 28293, 24951, 71865, 3643, 64391,\n 5341, 39149, 67881, 66920, 23855, 17224, 28995, 48556, 53857,\n 53680, 9214, 9917, 466, 23891, 61545, 50621, 61852, 4335,\n 1596, 58981, 13308, 27199, 34944, 2522, 64390, 64629, 40752,\n 26863, 59837, 3483, 69571, 14771, 61929, 72868, 64048, 10895,\n 74223, 74325, 50866, 42659, 71072, 3926, 34532, 40839, 71797,\n 47418, 5140, 3365, 61368, 56972, 69543, 36437, 30219, 52296,\n 61229, 53728, 45317, 31782, 12290, 19614, 34706, 13079, 57757,\n 50230, 5423, 14423, 69644, 74368, 38176, 6622, 12393, 43507,\n 72688, 22566, 32017, 35057, 48972, 68896, 47558, 34302, 51966,\n 41049, 12092, 53846, 51063, 65181, 54198, 20656, 39365, 301,\n 59460, 74676, 55044, 42799, 27770, 45709, 37190, 33415, 59528,\n 64719, 53305, 553, 64770, 21308, 55883, 25003, 8184, 28964,\n 38003, 43941, 70899, 49172, 72503, 9174, 69300, 58427, 47440,\n 72292, 55679, 60071, 16849, 15423, 42771, 17307, 47069, 65089,\n 69980, 41062, 12503, 64136, 14177, 62502, 13440, 36314, 20003,\n 50435, 14324, 16763, 25568, 54587, 64863, 32450, 56223, 38490,\n 30892, 54438, 43920, 58447, 40319, 16392, 20271, 17808, 69445,\n 30018, 8353, 24940, 60750, 51426, 52447, 40324, 24746, 19167,\n 22104, 72030, 23468, 69556, 1867, 44998, 2783, 25278, 72118,\n 38527, 9949, 23151, 14425, 60832, 17546, 21719, 3197, 29395,\n 57315, 29508, 40345, 54306, 64100, 38392, 19004, 40421, 52535,\n 3168, 30310, 28119, 60060, 68792, 44417, 50902, 1422, 38316,\n 70416, 68916, 23189, 14092, 71945, 49107, 12399, 25920, 34693,\n 59154, 32004, 61926, 36348, 6186, 22885, 25347, 36906, 14863,\n 57936, 20802, 61069, 41622, 22528, 33836, 23487, 41656, 43227,\n 70441, 7205, 53546, 25853, 10119, 34079, 39020, 25238, 43244,\n 30492, 57275, 16178, 27444, 17744, 56015, 44959, 4295, 20175,\n 55813, 26492, 64013, 12674, 13418, 68884, 25983, 423, 36930,\n 49665, 25828, 39686, 66543, 35947, 1050, 11324, 72659, 4798,\n 36007, 5276, 62779, 56112, 43570, 28628, 41940, 70852, 20389,\n 52044, 5187, 61679, 40887, 17876, 63235, 18632, 71724, 8648,\n 66041, 62956, 13409, 26966, 27615, 5275, 62295, 13833, 10104,\n 46180, 69602, 45536, 15506, 21200, 62963, 20020, 27120, 10368,\n 58468, 61355, 13794, 48646, 10384, 45006, 70434, 3897, 6552,\n 33059, 60665, 22289, 21024, 54949, 69618, 74165, 20385, 53388,\n 4616, 15592, 60894, 39998, 64944, 65019, 71692, 11204, 55410,\n 36979, 37203, 30837, 54979, 45231, 24080, 3284, 6353, 19449,\n 70160, 5329, 59821, 28511, 54277, 38729, 25592, 60170, 46110,\n 26552, 44975, 16736, 30143, 24013, 44949, 46842, 61744, 48840,\n 42919, 20847, 34832, 7708, 54207, 60770, 7299, 37128, 72796,\n 57593, 54233, 43559, 72082, 64558, 14394, 13621, 2216, 3321,\n 60571, 11296, 59479, 68085, 39527, 58311, 3822, 42002, 7790,\n 27745, 3915, 47247, 1506, 2277, 37910, 45168, 4807, 28988,\n 50456, 20200, 45864, 67802, 11851, 63402, 52095, 52084, 29078,\n 20393, 37732, 36207, 2269, 68420, 49133, 40806, 15348, 44030,\n 43170, 35671, 56494, 64447, 65295, 43815, 35413, 52634, 74068,\n 63113, 2, 55296, 18906, 62805, 59777, 344, 40296, 15868,\n 64419, 55265, 61579, 41572, 69357, 20865, 41822, 19062, 43961,\n 7942, 12538, 71579, 65780, 74722, 13921, 48043, 70365, 73896,\n 3676, 255, 31685, 14782, 14307, 50003, 10600, 60792, 69071,\n 42846, 16015, 11840, 45611, 47003, 16091, 72772, 44737, 23400,\n 52428, 4122, 32710, 16004, 27533, 16748, 7376, 9758, 59983,\n 15710, 35943, 46787, 71668, 15552, 18333, 32277, 43200, 69100,\n 31054, 2710, 18325, 12772, 71361, 29102, 71626, 71901, 17566,\n 55369, 8561, 33227, 3206, 67805, 13974, 69836, 8952, 6605,\n 25333, 30774, 32555, 70725, 1421, 55609, 30110, 21803, 11981,\n 37326, 37782, 26403, 44205, 65800, 52058, 22972, 58902, 17547,\n 57036, 56988, 33001, 59609, 36126, 31421, 38697, 36738, 71332,\n 68905, 10798, 23527, 47442, 7242, 32121, 66650, 61584, 50631,\n 26905, 65497, 43197, 3559, 52742, 32048, 33247, 51835, 36637,\n 52663, 32348, 70508, 6492, 27266, 28372, 28984, 19902, 38374,\n 59856, 64609, 20627, 72961, 53689, 12368, 11984, 3373, 10248,\n 65326, 30981, 42514, 21390, 16699, 74367, 2967, 7510, 3320,\n 73559, 41936, 57584, 43636, 46241, 58019, 41532, 65437, 16043,\n 73821, 56098, 12918, 33592, 11318, 17452, 61781, 6350, 29992,\n 56823, 26532, 49915, 41727, 61228, 21588, 32023, 17326, 13125,\n 43074, 59963, 8671, 42432, 28615, 27653, 63746, 29150, 12346,\n 46673, 50487, 66267, 70801, 27354, 56394, 69021, 67098, 64233,\n 55488, 74721, 22758, 40113, 64613, 65108, 15224, 8941, 19589,\n 4074, 50040, 24592, 16641, 394, 46635, 68301, 32176, 4176,\n 3455, 1064, 19136, 22799, 18651, 70536, 30078, 73970, 49501,\n 61092, 17658, 54523, 74777, 68175, 6076, 44962, 2033, 33607,\n 64131, 10569, 7364, 33546, 16673, 53903, 74306, 34865, 17026,\n 7890, 54472, 13185, 36581, 52631, 8127, 73693, 36807, 62904,\n 38524, 40994, 6543, 25206, 59208, 58942, 12363, 46625, 36658,\n 6578, 60952, 23685, 7750, 3256, 60745, 18485, 72703, 30334,\n 60735, 70670, 70846, 71356, 66294, 20899, 61511, 20582, 29390,\n 7399, 2099, 56303, 63068, 74594, 38916, 67076, 57934, 56998,\n 48392, 730, 40228, 67325, 47346, 65095, 58167, 46232, 28464,\n 45368, 28009, 28875, 57495, 15271, 17918, 69860, 9625, 9992,\n 16575, 63825, 67216, 41232, 7906, 37778, 62297, 9617, 17332,\n 4556, 2077, 6853, 48211, 3901, 28699, 16607, 58974, 16353,\n 57846, 54492, 43309, 14297, 63661, 31821, 18287, 6613, 73546,\n 21019, 44078, 11330, 70734, 57030]), 'K': array([23119, 2986, 9624, 13270, 31916, 50635, 46960, 22233, 40502,\n 4607, 4123, 15651, 38327, 11571, 46766, 5723, 26977, 32618,\n 46906, 36249, 1534, 20724, 22353, 31205, 45791, 47399, 19948,\n 40131, 11851, 31599, 41839, 27678, 25286, 50201, 4378, 23413,\n 52309, 12362, 20865, 45284, 45246, 48210, 32327, 36571, 32995,\n 26395, 176, 2492, 4954, 41058, 52247, 39898, 33577, 40116,\n 43373, 12605, 43346, 4358, 43179, 35751, 21405, 36012, 37370,\n 45467, 36775, 42064, 54686, 37752, 11000, 42818, 42312, 3697,\n 2433, 33929, 23742, 9464, 47509, 5922, 53058, 43856, 46203,\n 8177, 4579, 55505, 29787, 19942, 52936, 47776, 39145, 48030,\n 10008, 45640, 37822, 22755, 9270, 46036, 32451, 30126, 55562,\n 40722, 24723, 7487, 48831, 8755, 19791, 22948, 1851, 16417,\n 4547, 46315, 2976, 23343, 24934, 14674, 29261, 46441, 14387,\n 30775, 13325, 51754, 32280, 29639, 25604, 16075, 29246, 2175,\n 39385, 47120, 23294, 15587, 18929, 15076, 40169, 49827, 9418,\n 34483, 7752, 37255, 53832, 33044, 48633, 45363, 13214, 35297,\n 29629, 5752, 49654, 54894, 10550, 240, 33589, 52728, 26741,\n 52585, 5762, 38770, 34266, 5590, 28491, 34707, 43855, 6359,\n 23270, 26953, 22095, 17772, 52352, 12185, 3270, 15369, 6989,\n 41784, 34542, 42035, 25356, 44706, 10093, 45574, 11206, 60,\n 54633, 6279, 22669, 3381, 38393, 47443, 33808, 23258, 5189,\n 54969, 39, 29939, 1599, 23802, 17634, 55162, 10641, 41146,\n 38104, 10598, 16613, 32522, 31840, 15675, 31977, 28282, 43752,\n 49736, 13625, 45351, 17492, 45401, 4886, 49583, 34581, 47962,\n 35716, 19691, 11292, 46406, 52035, 21610, 28145, 31077, 7734,\n 19973, 2201, 10101, 25433, 21327, 38365, 17824, 25338, 1732,\n 45914, 38613, 24969, 32208, 28140, 24876, 30860, 28485, 46729,\n 11544, 8358, 31816, 4342, 39106, 8168, 8265, 137, 53559,\n 15629, 19750, 36798, 1720, 17306, 13178, 11022, 28684, 49859,\n 30204, 4442, 31337, 39453, 6356, 4693, 49893, 12158, 11037,\n 18156, 7500, 31672, 20746, 55667, 36045, 14396, 13206, 15765,\n 26484, 48671, 18982, 22158, 43935, 10833, 54354, 6848, 52730,\n 16907, 13399, 53163, 9957, 13090, 32444, 52224, 38174, 8206,\n 10624, 28608, 43351, 6638, 25637, 31870, 25901, 15622, 39398,\n 2070, 9038, 3170, 55712, 45873, 13598, 44515, 19042, 20741,\n 20702, 26097, 32986, 45081, 11384, 28149, 16180, 43624, 36961,\n 36641, 34914, 2759, 634, 42858, 28548, 55002, 27298, 52442,\n 30388, 15316, 6288, 25031, 8573, 44144, 21148, 26941, 39960,\n 48923, 43412, 2455, 19503, 37150, 28452, 8342, 27549, 20821,\n 16087, 16000, 5682, 12205, 17934, 46381, 41829, 51893, 42107,\n 27226, 32980, 14901, 7937, 37713, 15233, 29598, 46071, 9353,\n 53613, 14177, 50784, 26015, 33633, 22517, 18127, 50920, 7656,\n 32038, 7942, 37721, 986, 33225, 17960, 31186, 17340, 20892,\n 33809, 26209, 28934, 25161, 35197, 13935, 14592, 9391, 4851,\n 55395, 24557, 46015, 19425, 54137, 4791, 21505, 50239, 26617,\n 37296, 52538, 29566, 37590, 34204, 8176, 534, 42896, 35710,\n 6068, 16617, 51782, 24460, 121, 41015, 41355, 45510, 3445,\n 44605, 47047, 7802, 38398, 23644, 2238, 12613, 1136, 23314,\n 1287, 32048, 17544, 11094, 7935, 25827, 52203, 46886, 41631,\n 50347, 46233, 23829, 6348, 41189, 53357, 453, 41766, 46609,\n 43770, 13634, 23151, 1110, 51223, 8333, 29436, 30505, 32675,\n 18225, 23495, 31249, 40242, 31497, 35693, 33462, 3511, 22893,\n 38245, 3880, 7129, 39042, 45118, 28448, 31937, 19802, 21983,\n 41915, 46872, 11630, 34393, 982, 37567, 47433, 40354, 15498,\n 30601, 14125, 27408, 28205, 26835, 20989, 26701, 17006, 14833,\n 54102, 49528, 55618, 25666, 16929, 5684, 31988, 29833, 34269,\n 15835, 12791, 12517, 7170, 40813, 9561, 54283, 14133, 5438,\n 13362, 31550, 703, 55649, 55438, 20356, 49230, 27101, 25212,\n 10274, 22585, 24916, 3851, 30609, 4128, 1729, 32129, 6134,\n 25142, 46954, 42994, 39206, 26176, 45224, 922, 52461, 16614,\n 898, 4281, 34003, 7226, 38258, 43325, 15405, 53140, 36296,\n 32627, 10120, 48994, 27888, 44987, 36137, 37488, 47032, 19930,\n 54287, 20446, 49163, 28013, 6945, 16325, 38438, 29524, 41318,\n 7201, 15662, 29664, 41626, 40095, 45826, 44249, 29645, 34994,\n 45712, 4835, 52796, 39563, 53969, 32701, 16330, 3765, 163,\n 14971, 8522, 44463, 19918, 51358, 1218, 29280, 52975, 43087,\n 34603, 44066, 28274, 2832, 37741, 43145, 53978, 54970, 35090,\n 16404, 25419, 31943, 11018, 30149, 4050, 50233, 33360, 7252,\n 46489, 17771, 8584, 17789, 14756, 7796, 10403, 19717, 12867,\n 11906, 21878, 2785, 3323, 30367, 45029, 7568, 37302, 7701,\n 32305, 26846, 20235, 19928, 26184, 34090, 22723, 49737, 47641,\n 54050, 34935, 27381, 47720, 6826, 17468, 36855, 36167, 41864,\n 26596, 37156, 54753, 25730, 22592, 11154, 13780, 18564, 40868,\n 19600, 37620, 24649, 23499, 52691, 46851, 30523, 22037, 4964,\n 49739, 20463, 50034, 13241, 36100, 7756, 53370, 24873, 19126,\n 23240, 15757, 27372, 20910, 55006, 15290, 32842, 46792, 29533,\n 22763, 9249, 28959, 4727, 48403, 18109, 40110, 39449, 31487,\n 45824, 51265, 54309, 9867, 18619, 54480, 46892, 15131, 48551,\n 13009, 39856, 18335, 9725, 50791, 15952, 160, 32013, 26574,\n 45172, 3325, 19063, 24543, 30977, 40402, 10047, 35936, 41325,\n 7871, 3784, 45014, 26868, 52726, 34639, 11891, 15188, 49067,\n 28410, 28173, 50601, 38556, 41912, 5471, 23920, 32902, 6567,\n 23359, 41435, 6403, 35988, 34284, 35845, 4955, 8954, 3380,\n 18387, 31969, 23515, 8534, 54867, 49152, 53323, 45909, 13376,\n 41093, 7359, 27373, 27089, 48428, 39679, 5968, 40304, 48439,\n 10858, 2356, 3524, 54073, 46824, 32972, 9952, 45248, 5575,\n 30676, 39655, 16819, 3370, 49808, 23796, 5764, 55053, 27846,\n 44307, 41282, 53937, 8935, 8237, 6364, 8854, 45361, 12571,\n 16095, 40659, 45475, 38502, 53331, 11008, 49275, 40387, 48294,\n 55689, 48233, 27275, 13644, 52926, 7234, 1434, 40154, 54197,\n 190, 34913, 24384, 28337, 14380, 12860, 25386, 35810, 30082,\n 4596, 54719, 7084, 32216, 7483, 35195, 29713, 48269, 53348,\n 49219, 15758, 35205, 14646, 364, 929, 42995, 40546, 7312,\n 38532, 34799, 23346, 3509, 22666, 3890, 6298, 14862, 2946,\n 52345, 16424, 19005, 52775, 26948, 37062, 11940, 37618, 9568,\n 37653, 44575, 19422, 48702, 46169, 33983, 41964, 27485, 20980,\n 952, 25256, 40991, 36635, 42370, 16940, 9196, 5220, 44270,\n 379, 50152, 30370, 49580, 37364, 36722, 29179, 41599, 43734,\n 6121, 49656, 38646, 27113, 32207, 32008, 10321, 53103, 17923,\n 4485, 33405, 27899, 34745, 23959, 23442, 26094, 36627, 45976,\n 18275, 4889, 48775, 28948, 46243, 43575, 30207, 13837, 13131,\n 22699, 16468, 53522, 16189, 37044, 42007, 31202, 18717, 8486,\n 25862, 14032, 16174, 35212, 2053, 32101, 37249, 16725, 7477,\n 23845, 13862, 48260, 5219, 15417, 10208, 3634, 35723, 733,\n 18900, 49428, 29625, 31455, 47213, 27196, 26412, 48385, 22959,\n 2408, 17680, 15516, 10446, 48422, 9291, 47221, 24169, 25899,\n 32226, 45377, 55604, 52360, 55586, 38205, 50940, 53676, 4343,\n 38058, 20797, 15635, 26413, 36215, 16049, 27637, 38177, 7182,\n 16027, 26917, 25668, 17641, 22256, 15574, 29474, 18400, 6937,\n 31428, 16641, 52821, 18700, 47045, 5270, 48520, 44428, 39103,\n 18028, 35471, 8432, 33008, 52633, 1359, 55199, 6154, 51503,\n 17241, 15352, 49002, 45711, 38085, 53110, 34932, 13535, 40949,\n 33418, 10158, 15357, 40495, 41450, 18472, 53075, 37399, 15582,\n 24191, 53885, 47640, 6660, 14988, 42380, 1396, 18191, 447,\n 17355, 39876, 501, 55394, 4036, 54723, 2339, 47219, 38590,\n 2685, 2005, 40521, 15411, 10662, 13151, 55278, 52427, 7169,\n 36566, 54296, 45728, 31264, 36542, 52516, 42729, 22130, 52678,\n 34918, 6841, 7171, 7655, 36824, 20150, 25659, 3959, 29706,\n 31304, 49591, 54847, 55069, 43026, 29862, 42766, 6839, 14153,\n 26289, 50654, 2061, 23367, 52569, 3587, 36423, 50281, 47985,\n 9842, 29028, 54542, 114, 757, 3938, 1992, 37345, 26039,\n 49117, 40184, 2483, 3529, 25709, 22051, 21614, 37591, 8873,\n 10535, 44513, 34919, 40496, 19875, 35558, 2922, 36047, 32489,\n 33562, 27548, 8786, 32010, 48358, 37875, 37001, 28805, 45610,\n 50574, 20266, 47684, 10257, 32786, 40450, 21393, 45754, 18762,\n 21110, 5458, 36522, 13348, 20455, 14404, 45312, 36292, 28860,\n 30292, 1684, 5010, 38730, 22888, 4957, 385, 30980, 1747,\n 18408, 17400, 458, 19929, 42371, 27288, 36859, 15372, 46918,\n 25020, 41828, 53241, 13574, 28682, 11604, 41286, 9437, 38074,\n 24469, 19190, 9103, 40084, 1906, 23162, 10558, 34465, 23451,\n 15260, 7545, 53265, 6845, 13127, 40287, 14019, 11781, 13757,\n 30461, 32386, 53874, 9845, 18110, 27310, 18297, 28489, 4018,\n 35302, 38407, 19680, 6035, 29571, 44774, 54218, 23011, 43510,\n 3591, 28053, 24120, 1006, 8005, 47436, 45266, 45760, 117,\n 20241, 19596, 3516, 47576, 3034, 34595, 17786, 51226, 16797,\n 55096, 476, 28675, 3521, 7323, 48952, 18763, 36794, 38525,\n 18738, 1776, 4415, 21893, 41225, 18599, 28514, 31531, 10032,\n 12275, 44308, 36250, 3142, 6594, 50484, 29638, 55321, 16668,\n 9363, 55439, 7451, 37297, 52308, 40651, 40014, 28351, 52548,\n 29994, 55109, 29185, 1365, 3709, 36778, 30801, 23084, 3418,\n 52544, 42092, 44353, 27556, 25040, 49446, 47948, 46895, 9874,\n 26189, 33606, 50612, 26728, 49733, 36936, 22203, 50259, 51041,\n 50492, 16625, 44655, 21720, 49714, 13332, 49271, 51336, 24710,\n 18130, 3585, 43633, 50661, 55476, 36953, 40362, 17663, 33540,\n 33872, 51912, 55292, 29670, 42372, 24443, 9098, 30975, 10684,\n 27983, 50279, 23012, 14116, 3712, 20877, 51679, 8643, 48395,\n 49222, 15460, 38723, 14565, 14477, 26150, 22475, 13201, 17916,\n 51667, 23957, 8900, 20052, 44387, 5739, 29235, 42649, 3080,\n 29478, 10001, 51690, 44865, 52237, 40353, 33575, 19836, 27468,\n 52740, 7393, 9090, 28218, 48571, 34287, 3801, 38114, 19004,\n 3855, 54314, 26365, 13661, 9369, 2859, 29391, 6944, 8191,\n 19579, 42347, 30448, 17822, 17871, 21013, 36779, 5635, 5341,\n 33269, 36659, 9362, 956, 48032, 3695, 20282, 14048, 43487,\n 16841, 25766, 42978, 15798, 20286, 16373, 35932, 49398, 5952,\n 15162, 2318, 38909, 30927, 49495, 584, 34698, 17038, 282,\n 26887, 1149, 24851, 13916, 25127, 23885, 49123, 29284, 32653,\n 19700, 29584, 12223, 14657, 30765, 34609, 37251, 41271, 1655,\n 16360, 35671, 55111, 48384, 25434, 29828, 9411, 55701, 3584,\n 36963, 33126, 46548, 38996, 18553, 7824, 23261, 41945, 8361,\n 17726, 7833, 8532, 14825, 10308, 29554, 15769, 23248, 14240,\n 23935, 21065, 16817, 37093, 33266, 25787, 31192, 49044, 2975,\n 12706, 53568, 9010, 47704, 22694, 53554, 43530, 9723, 3902,\n 28494, 20979, 37452, 28873, 43028, 37909, 27783, 42331, 14256,\n 31210, 37519, 49052, 47771, 43884, 15619, 519, 25089, 4122,\n 41591, 55225, 38157, 46408, 11629, 15688, 34092, 44787, 3337,\n 29779, 14597, 33347, 45113, 41129, 49975, 15281, 30022, 11594,\n 41224, 9275, 10129, 46404, 11849, 22060, 19484, 27358, 46614,\n 13306, 47883, 24914, 47207, 13593, 54678, 32951, 19318, 48763,\n 48305, 10499, 51388, 39307, 29026, 29659, 51325, 38060, 54546,\n 42255, 33230, 37735, 10985, 8768, 16967, 16887, 41918, 10153,\n 28909, 17397, 31963, 46048, 2363, 13706, 47264, 43477, 12701,\n 11546, 19991, 51337, 47884, 40810, 3373, 34093, 48630, 40720,\n 20558, 32938, 30063, 38929, 19908, 252, 52603, 24673, 48488,\n 9696, 26248, 2100, 53561, 40969, 37315, 9481, 1824, 24789,\n 42113, 12753, 7139, 22496, 35161, 23373, 9522, 52129, 44140,\n 23118, 38159, 7050, 29979, 1011, 53770, 31612, 51302, 18774,\n 38000, 51552, 38726, 17929, 52169, 21928, 12679, 50952, 45337,\n 5805, 16403, 23853, 9757, 54797, 13605, 49751, 33682, 54640,\n 45670, 25565, 53573, 11489, 15615, 9316, 43634, 52083, 18889,\n 50561, 32046, 53428, 25187, 13899, 49038, 43968, 5549, 49008,\n 25872, 19529, 24653, 24510, 51113, 11395, 38780, 50907, 26612,\n 699, 45929, 47514, 21084, 37523, 26246, 1226, 37212, 1234,\n 5843, 10883, 20619, 7810, 50758, 41827, 15208, 19650, 10772,\n 19617, 39962, 34747, 50102, 49368, 51993, 23330, 30116, 54304,\n 15343, 29292, 39839, 47018, 29651, 49079, 15869, 7260, 46801,\n 9834, 39002, 32955, 17214, 39596, 53972, 49705, 14730, 21745,\n 36308, 21945, 43503, 12906, 36144, 9091, 27084, 4573, 17977,\n 55188, 51157, 27268, 45045, 38796, 39313, 46949, 54178, 20259,\n 20175, 10359, 25414, 28227, 31502, 32169, 12593, 2018, 24338,\n 37650, 2624, 36326, 50795, 988, 53877, 46460, 18692, 10269,\n 3084, 26046, 46855, 28964, 14076, 36940, 23213, 4580, 9556,\n 1809, 26776, 38846, 5168, 23424, 22021, 7113, 22181, 50540,\n 55156, 42776, 50806, 8271, 19661, 46811, 18866, 27165, 50391,\n 32849, 28542, 15950, 51174, 16035, 11035, 47607, 27961, 47325,\n 48595, 46930, 21085, 14721, 15826, 30401, 1901, 44678, 42150,\n 34957, 48750, 53416, 31749, 16495, 38645, 37430, 39276, 44153,\n 50801, 15207, 38955, 27594, 46179, 21956, 48374, 5256, 32678,\n 32619, 52667, 35483, 11012, 38542, 5664, 24844, 48673, 23462,\n 29969, 43866, 6862, 29685, 11305, 29212, 51090, 15068, 22193,\n 40769, 33411, 51284, 53521, 10195, 36734, 53899, 46290, 10304,\n 19690, 37180, 39043, 51916, 48066, 64, 51375, 50428, 3382,\n 14923, 5512, 36885, 51652, 2717, 19926, 21743, 46834, 36945,\n 28419, 42418, 25553, 17880, 11041, 30920, 55067, 55533, 2017,\n 53898, 31573, 43416, 30636, 30780, 33457, 26871, 40406, 36378,\n 33222, 53618, 12406, 22568, 52561, 1074, 34975, 15227, 21276,\n 43018, 41498, 29635, 42876, 6377, 6815, 8067, 9966, 16361,\n 10872, 8689, 1558, 53351, 19390, 13928, 36619, 24034, 53189,\n 21283, 17565, 32356, 40541, 21567, 7085, 25310, 14519, 26869,\n 54650, 9542, 14139, 54043, 395, 13524, 24431, 26757, 52687,\n 54038, 10537, 47747, 7821, 739, 3248, 2437, 5457, 259,\n 30091, 51747, 53647, 41519, 32091, 23171, 31975, 31973, 41529,\n 47878, 36255, 34079, 45931, 9451, 45500, 48492, 13269, 28516,\n 1793, 52249, 37611, 35659, 48960, 7202, 49343, 12083, 5062,\n 34463, 4517, 21001, 53919, 633, 17550, 31087, 49269, 55709,\n 41246, 39773, 37191, 10225, 48726, 25696, 3421, 5505, 38154,\n 40676, 5439, 205, 22262, 48812, 27586, 40505, 29991, 45160,\n 38250, 20035, 4226, 28309, 35630, 48404, 15481, 834, 43140,\n 35827, 39410, 34758, 7846, 54355, 18424, 32389, 30546, 39047,\n 17097, 42835, 18021, 23693, 23533, 19506, 2915, 41127, 10586,\n 3493, 18605, 49146, 18526, 1081, 43192, 46139, 35383, 17349,\n 50643, 26686, 48562, 44076, 43447, 40941, 4351, 603, 23913,\n 41882, 16023, 53391, 24647, 31464, 37016, 22769, 36998, 19877,\n 45580, 27561, 44424, 10569, 24738, 30466, 999, 6227, 1206,\n 18023, 7668, 45832, 48484, 32738, 33232, 9313, 18293, 32758,\n 29490, 17334, 35233, 37459, 44643, 42167, 47978, 31459, 54274,\n 18176, 52229, 1629, 50634, 33017, 48336, 39810, 39263, 14633,\n 39283, 47674, 36917, 26771, 5089, 13132, 28434, 33818, 41175,\n 7, 30267, 1917, 460, 22688, 43921, 45504, 10637, 40907,\n 52739, 9290, 15730, 22837, 54473, 5217, 44928, 17487, 3570,\n 26820, 52397, 28365, 34146, 20868, 616, 13918, 11744, 49833,\n 33068, 39762, 49797, 14382, 47501, 12500, 13323, 38410, 11582,\n 42438, 53544, 47888, 19284, 40382, 45521, 9748, 24011, 44060,\n 11378, 12584, 9512, 23513, 43474, 10175, 41905, 10463, 39863,\n 12821, 18921, 7692, 15154, 11201, 55368, 3221, 30293, 44130,\n 27135, 7878, 5855, 33535, 36416, 12642, 33284, 4411, 28005,\n 35869, 22109, 934, 22441, 18413, 55718, 640, 20966, 923,\n 3523, 23576, 30799, 745, 32218, 51238, 29729, 12358, 36508,\n 35435, 4777, 20640, 11236, 52139, 15603, 25252, 48485, 53139,\n 39728, 21715, 3764, 50119, 16598, 41416, 53151, 11214, 9942,\n 16883, 19337, 30202, 10083, 21408, 8722, 37745, 53281, 14182,\n 49518, 54020, 54794, 45049, 6465, 42698, 43525, 25355, 38853,\n 5917, 17071, 3768, 44592, 12822, 18362, 16178, 51030, 7033,\n 40790, 18970, 7684, 34200, 24776, 9646, 17185, 24643, 41703,\n 7319, 30936, 21929, 3722, 35100, 14085, 40209, 20949, 36112,\n 51222, 45083, 55516, 28991, 11213, 4008, 42252, 53552, 33448,\n 55462, 10523, 16322, 5364, 13080, 25804, 12597, 11765, 55277,\n 15973, 41116, 34043, 1493, 27056, 33922, 48894, 18083, 50847,\n 30191, 39378, 43384, 8557, 6433, 37684, 49845, 45144, 32395,\n 36302, 23020, 9309, 5050, 36521, 30133, 31106, 6402, 46980,\n 32624, 25688, 35681, 2855, 18274, 28363, 16120, 48237, 40776,\n 10108, 2522, 12204, 46861, 52659, 39114, 34164, 39787, 49595,\n 9306, 3438, 26578, 21205, 47503, 25737, 14216, 38611, 21927,\n 47902, 4907, 2352, 21976, 624, 25440, 4612, 21556, 11365,\n 28081, 25429, 36037, 6990, 31622, 48992, 18215, 53217, 20458,\n 42367, 36935, 32727, 397, 36453, 10544, 52341, 55311, 27828,\n 23414, 40768, 34176, 45272, 33243, 22265, 44229, 53121, 52464,\n 6115, 11341, 21948, 27251, 30682, 3679, 20526, 9128, 9361,\n 25297, 2231, 42789, 40155, 25278, 38887, 21926, 33489, 20924,\n 43604, 2138, 46628, 25961, 34699, 47116, 51544, 45587, 23490,\n 42778, 43308, 15221, 50438, 24635, 35661, 22936, 31109, 48761,\n 53072, 5294, 50604, 36997, 20895, 2939, 45708, 30173, 37259,\n 38743, 11497, 32273, 2243, 37042, 44067, 47855, 30819, 54505,\n 54822, 14362, 18693, 12242, 23268, 10416, 47685, 4972, 20931,\n 22397, 1423, 27119, 12161, 29140, 4424, 25945, 18846, 12347,\n 13684, 16235, 48303, 42464, 14874, 1609, 3374, 41184, 12238,\n 36046, 37871, 10664, 45298, 12534, 30419, 22089, 55253, 12471,\n 29372, 10020, 21934, 55408, 27610, 14201, 25620, 28352, 45731,\n 33272, 9767, 42820, 47302, 52997, 10280, 42237, 5494, 53018,\n 32081, 13715, 14716, 14455, 54932, 667, 13987, 6128, 996,\n 8113, 7772, 34037, 42972, 31689, 39458, 13211, 11646, 15729,\n 42979, 47276, 30511, 35788, 26852, 43999, 2751, 20456, 45562,\n 48051, 25960, 48905, 54214, 15122, 50007, 53703, 23028, 37891,\n 18240, 14059, 33558, 5584, 32712, 4544, 50073, 40400, 38912,\n 4817, 28752, 32713, 21418, 2315, 380, 48566, 3314, 44321,\n 18126, 39069, 33912, 26031, 26736, 3072, 26898, 9169, 38337,\n 14615, 19554, 3535, 12653, 55085, 41680, 27563, 1847, 37400,\n 32495, 45220, 17719, 52116, 39374, 4562, 18872, 34867, 3486,\n 31026, 406, 44174, 7520, 18085, 37361, 37105, 16521, 27049,\n 38485, 20880, 52319, 30647, 5506, 38361, 20699, 15618, 26972,\n 4530, 960, 15391, 7524, 24064, 3405, 23585, 54858, 13142,\n 18098, 52566, 4911, 43200, 22097, 46085, 37940, 5523, 1875,\n 46868, 43667, 29803, 41736, 54324, 22121, 3674, 22825, 21646,\n 4333, 37623, 30852, 7246, 14671, 20325, 31646, 21783, 37479,\n 3968, 55719, 45148, 1240, 43155, 31482, 27315, 27662, 40314,\n 47963, 20529, 10912, 50217, 41032, 26588, 44808, 9998, 31934,\n 52408, 40672, 47256, 27624, 45794, 19391, 14945, 14154, 29426,\n 34374, 51410, 37206, 39712, 39958, 53085, 13474, 4614, 7114,\n 12923, 28839, 24923, 58, 41801, 42999, 49377, 28957, 30970,\n 30575, 47041, 244, 37223, 36395, 2546, 15449, 52849, 53421,\n 22286, 23847, 23368, 42542, 21008, 52817, 29741, 19768, 35767,\n 5987, 49078, 42391, 27005, 53044, 25619, 6363, 43210, 39393,\n 7730, 6896, 30030, 3717, 18103, 31658, 54911, 23003, 33379,\n 8574, 50873, 39155, 33260, 49280, 16622, 48622, 2936, 21786,\n 21256, 24744, 25057, 11158, 41617, 5542, 21953, 46199, 28552,\n 46733, 638, 32785, 33100, 34011, 53900, 15313, 30862, 3351,\n 11405, 39150, 7269, 48607, 15204, 26337, 26009, 41067, 11956,\n 53639, 45745, 42017, 6765, 11398, 35892, 32519, 17452, 34738,\n 52325, 10037, 25997, 50569, 35304, 38152, 55650, 2349, 54884,\n 53760, 42348, 39903, 49253, 42834, 11448, 22103, 37285, 44477,\n 32056, 11854, 44562, 46870, 31997, 47163, 23233, 29756, 7081,\n 23329, 44244, 13267, 51273, 19522, 35712, 40840, 18822, 15412,\n 9265, 33499, 6173, 9636, 33015, 35109, 27743, 42691, 49202,\n 12484, 40684, 48049, 2766, 5487, 55605, 39165, 23225, 47680,\n 1021, 20955, 693, 21057, 9243, 33400, 27519, 42410, 33340,\n 54630, 34950, 40698, 1018, 13140, 45349, 35458, 21788, 22190,\n 36816, 2578, 51307, 25843, 45808, 38547, 42409, 27221, 41933,\n 43417, 20559, 48083, 4930, 6914, 4357, 43268, 5347, 29129,\n 8745, 51153, 41238, 7795, 53761, 44996, 20552, 44519, 41781,\n 50230, 36035, 20651, 53046, 7845, 3881, 21204, 2626, 22438,\n 33159, 19738, 42317, 22567, 54600, 42228, 7541, 40787, 13968,\n 33113, 7732, 6116, 11689, 127, 22385, 19780, 10595, 35603,\n 45575, 32742, 32167, 5821, 44091, 25441, 40996, 2618, 29147,\n 24101, 3256, 11764, 43828, 8962, 5929, 46503, 33940, 22414,\n 7265, 54211, 2983, 42567, 33890, 54521, 38162, 54246, 25685,\n 23244, 3357, 54878, 12366, 54312, 17823, 49624, 28176, 32720,\n 15100, 39751, 34095, 27016, 48912, 49412, 41117, 91, 22715,\n 10559, 35836, 54254, 37196, 36708, 18199, 6982, 24846, 2722,\n 37147, 24461, 41779, 46285, 27688, 20355, 49041, 51096, 31178,\n 26240, 44083, 34852, 24964, 55389, 30397, 6390, 31566, 6643,\n 29004, 26320, 31864, 16782, 53818, 34034, 32453, 9747, 31071,\n 6831, 38064, 16611, 48959, 33245, 35262, 33503, 14146, 30625,\n 9259, 14960, 46479, 26292, 16898, 140, 29775, 16248, 54568,\n 31273, 51405, 3816, 11687, 1863, 18276, 37471, 17890, 18398,\n 42287, 53148, 20173, 25693, 49448, 16634, 12501, 50581, 46793,\n 37343, 25229, 9777, 33785, 52539, 52272, 17172, 18817, 5799,\n 44820, 38863, 26294, 18290, 28122, 36681, 42176, 41352, 55345,\n 1923, 47816, 37538, 7900, 48504, 18745, 3836, 8205, 44131,\n 120, 46232, 30981, 12827, 18852, 38029, 36373, 25630, 35887,\n 41155, 13805, 41150, 38497, 55331, 15151, 25317, 14163, 38081,\n 49525, 32886, 29349, 31754, 47702, 44365, 54350, 1743, 4663,\n 21922, 39091, 10291, 1607, 39858, 7452, 40032, 12398, 39394,\n 31912, 13588, 49224, 53927, 19233, 25335, 28925, 23525, 33790,\n 48778, 19498, 1877, 47993, 32034, 48123, 39754, 18920, 45885,\n 39917, 54024, 32404, 27939, 11243, 29632, 25887, 21358, 48383,\n 2569, 37493, 31812, 44410, 47783, 48983, 23291, 33603, 4584,\n 12380, 40634, 47167, 44372, 11284, 31536, 37694, 26522, 16370,\n 26819, 44621, 31348, 18081, 10445, 43400, 19044, 36558, 30283,\n 28833, 32343, 16515, 30098, 23280, 6362, 35577, 55264, 23536,\n 47222, 1316, 11727, 15143, 18214, 53550, 45344, 13042, 35293,\n 42268, 49985, 34949, 25134, 17817, 42431, 39232, 53717, 17245,\n 50526, 54067, 34512, 37004, 7115, 10207, 50270, 41528, 36939,\n 46852, 50965, 22416, 44464, 40109, 15839, 41033, 51550, 31780,\n 1186, 7610, 36548, 8474, 38712, 49461, 47202, 52762, 8583,\n 46524, 13340, 10949, 35026, 44889, 55427, 46661, 27632, 48251,\n 47699, 55583, 23111, 2241, 30584, 33650, 41782, 50651, 47494,\n 8667, 23036, 35747, 26803, 22212, 50809, 11670, 38682, 48896,\n 40980, 16722, 32417, 46821, 15736, 17207, 53159, 54236, 15190,\n 50133, 28439, 22590, 40660, 13058, 50988, 42823, 6846, 42702,\n 23809, 34788, 12447, 29995, 48100, 20045, 46776, 45528, 45726,\n 42688, 23952, 53192, 14152, 6880, 548, 54109, 21615, 26211,\n 8490, 34689, 18799, 6036, 7222, 40333, 25738, 36524, 19010,\n 31839, 33003, 46604, 4631, 27096, 26657, 21422, 39641, 47327,\n 27867, 47804, 53523, 14731, 30714, 43022, 4012, 29677, 13451,\n 47619, 7666, 16997, 17309, 18893, 34601, 28059, 25856, 21240,\n 20223, 1963, 27523, 1481, 16276, 9662, 43663, 42352, 22675,\n 36007, 35334, 50972, 37984, 22344, 30153, 17165, 41865, 29920,\n 33775, 23407, 17135, 25753, 7249, 35648, 1147, 13832, 42526,\n 48173, 36861, 42070, 28823, 22938, 11948, 4681, 3360, 18096,\n 2073, 21767, 39382, 38149, 34824, 6040, 14614, 3777, 34538,\n 9594, 1663, 11261, 32145, 15400, 21636, 515, 32139, 17143,\n 23323, 10822, 17907, 50127, 17665, 26804, 32866, 53029, 33630,\n 46327, 42809, 38436, 2065, 44318, 29585, 16865, 5277, 26918,\n 37701, 51109, 19194, 24511, 48609, 41707, 7866, 54261, 29737,\n 39525, 31896, 9083, 6633, 49339, 10254, 24434, 19433, 22652,\n 1235, 279, 3658, 20024, 36595, 33084, 26671, 40059, 21583,\n 18853, 48433, 4732, 34267, 44237, 47198, 28555, 195, 6196,\n 25968, 5871, 18450, 33471, 25807, 3028, 31270, 5476, 16928,\n 6422, 2186, 39530, 28603, 13060, 49551, 37119, 199, 35157,\n 37389, 22514, 54026, 5079, 30248, 16664, 8629, 38601, 3298,\n 11225, 15545, 41693, 2056, 13508, 40254, 5809, 20359, 4552,\n 40463, 27921, 29416, 33268, 36079, 15128, 33985, 27866, 25255,\n 47563, 8868, 3586, 53404, 2553, 4763, 17085, 21036, 7001,\n 44147, 4740, 40112, 24138, 19184, 16060, 39524, 18677, 47837,\n 22905, 40097, 18460, 27111, 6500, 508, 37804, 5046, 9031,\n 35640, 722, 38474, 16831, 16175, 38553, 16003, 32382, 46836,\n 47215, 6087, 46550, 46702, 6580, 52386, 42768, 11938, 5943,\n 31045, 55569, 35955, 55177, 13907, 12420, 32625, 8898, 11142,\n 41823, 19413, 13394, 53074, 1098, 10164, 36858, 23751, 4529,\n 27392, 22906, 50512, 34970, 54792, 53181, 45230, 37270, 47374,\n 28511, 20198, 18433, 26834, 25182, 3425, 22460, 7156, 16406,\n 53933, 11050, 36157, 12030, 25611, 2515, 24378, 36822, 6070,\n 54167, 24229, 33060, 5751, 5719, 42782, 47471, 8953, 17620,\n 19658, 36359, 8736, 9148, 27953, 28576, 17212, 41281, 14392,\n 33053, 36513, 26262, 44970, 47789, 15435, 20077, 12512, 22304,\n 13326, 39637, 5957, 51783, 29752, 17639, 11596, 468, 34585,\n 15452, 47487, 5232, 42486, 74, 37642, 45437, 32567, 528,\n 36203, 1477, 41041, 26424, 20243, 49500, 55153, 51110, 6159,\n 3477, 46944, 17866, 24065, 29683, 13669, 33587, 45519, 21101,\n 5832, 27847, 45343, 43486, 46134, 52892, 47533, 25718, 17206,\n 29439, 41101, 9715, 18650, 45092, 2370, 26340, 2272, 7251,\n 13049, 3499, 33282, 27539, 48482, 32411, 16211, 24890, 53817,\n 35461, 22455, 47464, 9967, 1806, 42132, 17687, 18786, 37792,\n 30591, 6565, 17835, 3736, 50151, 48859, 53594, 23716, 37830,\n 49240, 1437, 52486, 12595, 27144, 42513, 16758, 8774, 50700,\n 30162, 5641, 7598, 42670, 1371, 45625, 45323, 11638, 10352,\n 29619, 13741, 474, 9937, 1139, 31966, 35401, 31876, 35510,\n 4392, 4588, 26396, 49941, 45648, 7791, 38256, 28042, 30122,\n 25885, 41708, 12524, 46417, 20212, 49255, 50350, 23385, 21803,\n 45586, 4010, 52904, 12075, 22902, 34199, 7400, 18967, 15916,\n 16941, 49856, 32110, 22643, 50010, 52856, 16959, 46110, 49384,\n 7778, 30143, 54601, 23882, 8315, 52567, 45788, 14991, 15964,\n 14236, 46516, 4966, 45818, 13289, 43059, 44317, 24633, 3317,\n 19248, 20046, 30536, 51536, 17008, 10924, 30726, 27252, 2893,\n 43505, 30490, 43507, 53086, 51827, 2772, 7586, 17603, 14574,\n 49119, 39772, 3866, 38687, 481, 39936, 16294, 30627, 41411,\n 15432, 30146, 48596, 47668, 147, 34360, 28924, 8174, 12093,\n 31406, 14722, 1777, 23137, 14438, 32132, 43844, 31724, 23718,\n 45977, 27854, 1935, 45352, 26849, 16635, 16795, 20315, 14997,\n 32954, 21691, 51183, 2616, 40755, 9774, 20, 35687, 26352,\n 25325, 2131, 9545, 28323, 32737, 15287, 19180, 28543, 55176,\n 15031, 28524, 40294, 50536, 8808, 26421, 5369, 54300, 2524,\n 13037, 24423, 12246, 40851, 31366, 41122, 54781, 51366, 4655,\n 21244, 31945, 34632, 36690, 38463, 24393, 12149, 10972, 11506,\n 16685, 53918, 33380, 22711, 17381, 8480, 11524, 29522, 12,\n 26931, 31492, 31120, 8158, 37692, 23085, 33155, 8070, 10231,\n 13205, 37990, 51064, 41562, 25983, 44829, 6375, 8091, 16364,\n 35378, 40047, 41424, 29496, 27262, 17909, 43824, 25058, 46046,\n 22635, 7422, 4431, 25425, 33648, 535, 14790, 13543, 55677,\n 53182, 51453, 47580, 21311, 39240, 1178, 36052, 13499, 43861,\n 40235, 23914, 53427, 13072, 23486, 49315, 48717, 1620, 9014,\n 52154, 27216, 48247, 18652, 49990, 34468, 18752, 13870, 54472,\n 21606, 15148, 41268, 37705, 8587, 11972, 27117, 26937, 53727,\n 27545, 12309, 8478, 16752, 30942, 30598, 50511, 27003, 51996,\n 37395, 11782, 3328, 34773, 43897, 43345, 787, 51166, 40375,\n 38119, 39821, 52514, 53726, 31564, 28932, 27261, 52552, 42533,\n 26516, 30561, 40351, 11518, 19967, 13024, 42928, 36256, 457,\n 38450, 45850, 23039, 6664, 24436, 40957, 25337, 333, 41719,\n 14072, 4162, 43786, 53129, 15592, 47738, 51455, 796, 54702,\n 39546, 46730, 49426, 21047, 9195, 45341, 32869, 25954, 38703,\n 19316, 23628, 50093, 44550, 49718, 40326, 38995, 28730, 47631,\n 38573, 37068, 42037, 6533, 46635, 620, 42274, 49837, 12643,\n 407, 8767, 25547, 22332, 55223, 33725, 43473, 46269, 55372,\n 15034, 1409, 55041, 19727, 22381, 45488, 12628, 16843, 24915,\n 17887, 25411, 46103, 13951, 7360, 20478, 27293, 13113, 23629,\n 26148, 51967, 40263, 54210, 18635, 4695, 8090, 41916, 28095,\n 33667, 8626, 54291, 22143, 36578, 31215, 23067, 9130, 4996,\n 24055, 4790, 31910, 54065, 4337, 32572, 33034, 17279, 36239,\n 54958, 40206, 12998, 18279, 2346, 52771, 27019, 12007, 46814,\n 13180, 38735, 48650, 39988, 30943, 35119, 50592, 35740, 31171,\n 17927, 5989, 24505, 25588, 28076, 38451, 29433, 13388, 11810,\n 30989, 24762, 32814, 13876, 17065, 54113, 18315, 13820, 39671,\n 30768, 36436, 15982, 12838, 23198, 4167, 23956, 52227, 29543,\n 3167, 53137, 46411, 15557, 7738, 41226, 53033, 11958, 45585,\n 8539, 47442, 4352, 37040, 51480, 40481, 30651, 34148, 31679,\n 11705, 40583, 19640, 17506, 43293, 54191, 37827, 38980, 36937,\n 52043, 42631, 45539, 52940, 26693, 39806, 19892, 26548, 20008,\n 1107, 35488, 45470, 12529, 15120, 29897, 45945, 9149, 17467,\n 26502, 13163, 2949, 48226, 20124, 35166, 26447, 1067, 9562,\n 47003, 36668, 16138, 16411, 29005, 23683, 3924, 32464, 38962,\n 14311, 7058, 4855, 25152, 21451, 15536, 36726, 27703, 26128,\n 26652, 26439, 34553, 29299, 33500, 25535, 901, 34476, 19994,\n 37401, 13833, 41233, 34800, 6624, 19589, 18669, 48970, 9993,\n 41037, 11028, 10085, 29001, 34693, 51314, 25796, 1088, 9139,\n 52915, 27987, 54839, 936, 3467, 5244, 5879, 43115, 37897,\n 19375, 11754, 16608, 41024, 26480, 42795, 1618, 27223, 44735,\n 25643, 10625, 29839, 18175, 13683, 48591, 13786, 41034, 50174,\n 19940, 14105, 39020, 13256, 49602, 45295, 47387, 44346, 18644,\n 14798, 45125, 33434, 14474, 37090, 32381, 45206, 37299, 10908,\n 20860, 19150, 6019, 2813, 23483, 28362, 30443, 51383, 31287,\n 40946, 28519, 16595, 27013, 4453, 12201, 38551, 31725, 51328,\n 10780, 6581, 9984, 37812, 14397, 33680, 26601, 33162, 28971,\n 1627, 33727, 10271, 40697, 10968, 5388, 8795, 5540, 29048,\n 43756, 27129, 50475, 50396, 22914, 24732, 10572, 8500, 10864,\n 16091, 5524, 24106, 48892, 17531, 33948, 3044, 9947, 42260,\n 9813, 25744, 22565, 6955, 43385, 24073, 39119, 54628, 43927,\n 13324, 54294, 30148, 21352, 11218, 15849, 7091, 7410, 19992,\n 13733, 27354, 42221, 44, 50787, 46193, 16439, 8044, 25452,\n 26718, 13494, 28364, 25707, 19424, 42601, 1717, 3255, 18327,\n 25227, 5470, 11208, 18234, 43912, 29660, 46856, 5748, 40866,\n 28755, 6461, 14840, 49000, 49945, 33889, 5699, 1040, 9323,\n 42342, 46654, 30170, 5271, 19536, 22165, 51054, 40143, 10545,\n 32265, 18727, 8938, 5907, 21960, 25575, 11382, 51937, 36697,\n 46119, 18259, 24548, 17863, 32817, 19876, 13333, 43057, 32290,\n 44396, 39516, 51862, 41188, 40926, 41974, 40764, 49658, 15175,\n 25549, 2822, 1118, 21910, 23765, 35395, 36628, 47425, 1037,\n 18088, 50301, 40794, 8215, 12476, 3947, 25574, 15638, 50036,\n 8493, 32722, 49248, 47173, 46438, 6771, 46057, 50054, 33349,\n 30666, 28379, 17064, 1232, 50186, 6011, 6505, 5004, 47270,\n 22701, 46950, 38197, 21173, 14006, 12401, 15094, 13531, 41762,\n 46386, 18948, 39271, 48026, 45838, 1239, 6095, 27775, 11688,\n 52551, 28569, 44833, 50055, 22249, 15299, 36630, 50440, 25163,\n 5082, 24178, 23840, 15289, 42384, 4044, 37274, 26987, 2445,\n 46495, 48849, 14974, 25048, 7872, 61, 19236, 18863, 23688,\n 14436, 10765, 52634, 21988, 33898, 16744, 14827, 46538, 50452,\n 40117, 17931, 41105, 45704, 47564, 14322, 12244, 24043, 33714,\n 28554, 38620, 16355, 39238, 53665, 45645, 54595, 42108, 24445,\n 10920, 26198, 29430, 47758, 34971, 27033, 5854, 48883, 18363,\n 31207, 28610, 26726, 32197, 54095, 8375, 47742, 33467, 6639,\n 23440, 39668, 51351, 6421, 49014, 24798, 25974, 51945, 4191,\n 40763, 1257, 35925, 2401, 41353, 30859, 43261, 1380, 22302,\n 49813, 39582, 40559, 48605, 49072, 17943, 4650, 6388, 5551,\n 4060, 26641, 32978, 22324, 25818, 41376, 4057, 4515, 21616,\n 41065, 51842, 292, 11354, 41860, 32637, 21572, 6913, 10100,\n 9262, 40510, 129, 17301, 27844, 24755, 31388, 38065, 38654,\n 4409, 12079, 14294, 7498, 36730, 54751, 3990, 21255, 43988,\n 27079, 52356, 30488, 28508, 51922, 30762, 4686, 29059, 7364,\n 20612, 44767, 103, 36915, 46096, 46304, 5610, 2756, 14435,\n 40227, 17514, 43635, 8695, 19136, 5520, 22477, 29727, 6383,\n 28022, 37222, 39606, 31225, 30323, 50161, 13188, 424, 28204,\n 6965, 38990, 35497, 22818, 22850, 43391, 46900, 46389, 36981,\n 51720, 47312, 43506, 7053, 23818, 25219, 50112, 43453, 17096,\n 44395, 37258, 29965, 10433, 43280, 20158, 46883, 27599, 49081,\n 41102, 11147, 45427, 30921, 1252, 47510, 19708, 8128, 8881,\n 26960, 39122, 35490, 24987, 24779, 51693, 37108, 24617, 41069,\n 10074, 21297, 6613, 54661, 27060, 41389, 53756, 20380, 37107,\n 50963, 33967, 54399, 22720, 18121, 53595, 30829, 13487, 24490,\n 54825, 9219, 38935, 36063, 43504, 2428, 55271, 51304, 26111,\n 46076, 7313, 29894, 33039, 46921, 19798, 2150, 5383, 22939,\n 27993, 28104, 4286, 4026, 38593, 21570, 36162, 24540, 53084,\n 2738, 54462, 34540, 32079, 31510, 27051, 44612, 17557, 4591,\n 34969, 20206, 14475, 16535, 38951, 1231, 17399, 26445, 4213,\n 44369, 15897, 38467, 30214, 35609, 35484, 33701, 54675, 10296,\n 41494, 29137, 47996, 43234, 47584, 43645, 35215, 32695, 52565,\n 38552, 35801, 8338, 4837, 12557, 23042, 34243, 35568, 36881,\n 13016, 33981, 32548, 10548, 33651, 16342, 32219, 30796, 31350,\n 25930, 29462, 34325, 588, 42547, 40484, 23201, 38731, 25790,\n 54927, 12729, 51086, 27415, 25544, 34209, 41305, 8190, 19855,\n 4953, 45966, 19458, 7130, 35536, 41540, 46884, 37975, 19153,\n 26889, 45240, 31027, 32538, 52710, 51680, 25739, 40133, 39137,\n 21623, 44590, 44210, 1751, 35260, 24321, 5437, 13953, 38030,\n 42383, 33117, 27726, 44123, 23184, 11229, 36938, 26323, 23128,\n 40258, 11595, 54150, 28188, 2561, 21211, 48628, 26144, 9085,\n 51019, 2787, 54721, 35311, 29492, 16733, 43014, 54390, 5066,\n 52749, 38976, 23666, 21430, 52689, 53388, 39572, 21229, 26086,\n 12168, 29049, 46157, 29529, 8414, 5580, 24814, 51105, 23725,\n 27231, 6511, 34158, 4520, 48039, 49988, 8562, 13126, 54627,\n 6457, 15077, 44012, 2579, 31554, 14447, 44286, 28138, 46555,\n 18642, 36400, 34944, 50658, 39965, 28757, 30870, 37756, 23963,\n 6234, 4714, 4912, 10022, 2794, 38459, 2948, 2622, 4701,\n 47004, 3461, 55445, 53791, 16065, 43777, 30123, 36425, 7640,\n 49494, 20511, 500, 2982, 44583, 23806, 5075, 16420, 36000,\n 47588, 2198, 41222, 44845, 10162, 15796, 37381, 37238, 50772,\n 12632, 33292, 30348, 36341, 53278, 10244, 32443, 39752, 17896,\n 16432, 4967, 11821, 20752, 49336, 25726, 41215, 267, 43102,\n 37041, 43125, 5529, 1485, 2653, 4624, 13509, 3667, 22828,\n 25289, 7751, 38495, 19541, 46382, 9443, 52417, 39046, 24437,\n 26133, 12548, 33065, 28382, 3151, 12708, 6744, 50348, 53441,\n 45767, 39012, 37781, 41082, 27894, 42974, 31164, 54625, 50008,\n 28530, 24780, 46847, 28546, 45902, 47133, 11155, 27796, 11211,\n 12091, 52543, 4495, 34552, 8080, 7623, 32547, 42993, 33819,\n 45548, 52012, 35509, 18320, 52531, 39727, 49922, 34837, 4180,\n 28886, 50831, 39785, 28758, 6315, 22940, 4475, 49593, 15333,\n 8955, 54045, 40572, 1156, 39843, 52836, 26902, 7063, 4582,\n 40587, 3288, 52754, 47756, 26762, 38507, 36130, 38222, 53804,\n 20810, 53732, 35695, 26781, 314, 50146, 54563, 45381, 29517,\n 6589, 39750, 10849, 52906, 16326, 38307, 41396, 47199, 18778,\n 48106, 49719, 8824, 2955, 42249, 37440, 31380, 33537, 34819,\n 36404, 37492, 40302, 6295, 55700, 11801, 32287, 15304, 26845,\n 15912, 16913, 38751, 7712, 13695, 35583, 18911, 40318, 28656,\n 54582, 40519, 15534, 32741, 12307, 10082, 12542, 34179, 27628,\n 6979, 17736, 21176, 30382, 44351, 44328, 36952, 54799, 53249,\n 50487, 15946, 21273, 5885, 51389, 28128, 32716, 20896, 26903,\n 23468, 14714, 54532, 35873, 19409, 39819, 28714, 10579, 1415,\n 1143, 16992, 6490, 41402, 49586, 22971, 34324, 21442, 8965,\n 21450, 51930, 1159, 33442, 19552, 24784, 8609, 31079, 32788,\n 45093, 50271, 20357, 12900, 7825]), 'Y': array([10060, 13775, 17991, 9102, 12057, 7285, 31115, 9307, 28016,\n 8564, 6681, 6827, 736, 14727, 13274, 19367, 2798, 3509,\n 27856, 29773, 7151, 17338, 27746, 15025, 1153, 7326, 3142,\n 19953, 22929, 14100, 9331, 12485, 29419, 11165, 35296, 16376,\n 5536, 3003, 19060, 235, 32896, 13187, 20692, 15398, 14036,\n 18935, 1030, 10262, 15062, 24116, 15066, 28951, 22886, 22628,\n 1710, 14074, 17224, 19672, 7377, 18447, 25801, 33019, 13114,\n 31605, 15077, 8031, 35409, 24396, 12445, 25550, 23583, 22320,\n 18685, 19424, 26796, 3684, 32844, 9923, 1379, 3168, 7739,\n 6649, 12628, 19477, 18815, 5679, 6994, 4688, 2739, 33257,\n 10624, 25400, 23072, 21266, 26734, 24456, 7013, 2811, 13348,\n 2831, 31539, 6597, 26793, 25201, 24287, 23956, 6332, 18084,\n 24263, 20650, 25218, 10199, 19094, 24916, 7092, 14860, 15867,\n 9572, 4832, 21916, 23931, 35725, 23927, 4249, 16342, 222,\n 25266, 12791, 35201, 35449, 9624, 5518, 31943, 5039, 17503,\n 14988, 33446, 5289, 2747, 15634, 17458, 14772, 34554, 21841,\n 19500, 14134, 3389, 14924, 28379, 7206, 33887, 27595, 19681,\n 11079, 9606, 29390, 15932, 22577, 8554, 26609, 3078, 15035,\n 13323, 7592, 18645, 14854, 25548, 2491, 21626, 30522, 1804,\n 907, 24888, 2340, 28046, 22744, 17841, 15487, 4974, 31417,\n 20562, 6688, 1892, 3313, 16619, 27447, 31329, 5905, 9725,\n 20902, 10845, 15722, 14032, 2065, 17614, 36128, 28284, 11961,\n 13791, 9989, 7319, 19179, 3395, 33551, 18719, 33130, 34878,\n 20317, 28063, 21863, 14406, 13152, 21633, 18951, 800, 4707,\n 17398, 27906, 29401, 24619, 1066, 17678, 24486, 32477, 10469,\n 8099, 27246, 30740, 36076, 35808, 13070, 32212, 20223, 31397,\n 10917, 8117, 20199, 25930, 6961, 5034, 18375, 5668, 36040,\n 15444, 6147, 15661, 6930, 7181, 4294, 26783, 10467, 13915,\n 33801, 10790, 34858, 13241, 13224, 24485, 1752, 18376, 29059,\n 6643, 4651, 563, 3983, 10850, 5327, 21455, 12931, 36063,\n 35420, 26715, 14876, 18050, 5165, 36096, 2335, 12655, 25734,\n 21873, 652, 6086, 21377, 384, 24086, 11157, 30356, 8794,\n 31672, 28047, 25360, 34739, 14737, 23019, 7470, 29026, 26436,\n 1515, 18760, 889, 229, 30068, 10925, 20188, 27389, 27372,\n 24600, 28000, 6537, 28201, 9086, 32999, 22081, 7224, 9154,\n 9429, 31956, 30762, 4383, 12188, 2069, 7010, 15800, 18161,\n 5996, 29849, 26411, 30234, 31116, 19338, 25035, 11286, 20198,\n 14976, 14971, 694, 6179, 8931, 36106, 26984, 8455, 19921,\n 34136, 24394, 16540, 13974, 24464, 25147, 34192, 34348, 16866,\n 8863, 33719, 17597, 8136, 17218, 9881, 35219, 13642, 1404,\n 11947, 26033, 12859, 8107, 116, 16771, 31124, 26801, 7981,\n 24712, 15725, 22251, 26043, 10759, 15303, 21109, 3016, 26117,\n 15396, 8321, 22554, 7371, 12164, 10762, 5706, 5209, 35702,\n 19464, 1365, 2886, 8010, 26510, 23360, 12527, 7401, 16953,\n 36057, 6852, 18898, 3324, 9147, 25644, 35216, 35528, 23462,\n 14481, 34933, 35988, 18588, 21125, 34049, 15671, 25679, 239,\n 27017, 742, 19468, 29855, 13132, 27111, 22408, 28444, 20293,\n 18220, 25628, 14676, 10196, 29802, 32833, 28526, 5270, 12567,\n 8743, 28763, 21567, 9775, 34576, 13837, 30556, 5558, 34219,\n 5462, 28115, 377, 18853, 29035, 12990, 18767, 32498, 14243,\n 11333, 17363, 1830, 19604, 13506, 33150, 31533, 12441, 24843,\n 26820, 9786, 31601, 10146, 12332, 26948, 6769, 27829, 2265,\n 5331, 18491, 34822, 1255, 25912, 17530, 14437, 15329, 11094,\n 757, 11584, 20421, 17548, 10537, 20710, 13773, 15299, 7917,\n 32799, 32745, 12566, 17594, 5601, 21999, 13868, 15829, 11479,\n 10502, 26840, 17453, 29704, 20376, 17203, 21747, 13548, 19634,\n 8947, 26020, 32660, 29004, 6765, 25700, 36005, 17140, 33222,\n 26485, 18614, 30008, 624, 6825, 15037, 8232, 31203, 16044,\n 13476, 21589, 5153, 7491, 3304, 4071, 10288, 27127, 27850,\n 15411, 17052, 11683, 31563, 11146, 21398, 28856, 19894, 31982,\n 13785, 3608, 34233, 5044, 3244, 24894, 12330, 22114, 34553,\n 16546, 20960, 5075, 33823, 24747, 19834, 11911, 11014, 85,\n 4805, 7148, 27617, 5411, 25424, 17420, 22771, 28329, 33925,\n 32975, 24348, 31312, 14867, 25202, 3938, 22191, 32813, 19177,\n 33039, 2409, 6293, 31508, 18926, 2596, 34651, 28210, 19933,\n 24803, 11066, 24880, 22486, 14795, 2927, 25164, 8808, 12912,\n 20390, 19229, 23887, 24232, 15674, 33278, 15799, 33149, 9145,\n 20812, 13963, 17611, 31708, 6035, 17818, 1790, 17037, 7297,\n 12776, 8271, 34283, 10560, 35946, 5579, 1612, 31325, 21191,\n 28041, 13164, 28301, 33155, 23315, 8274, 13214, 19588, 31394,\n 1924, 31210, 14052, 17247, 27229, 32177, 1825, 3525, 8187,\n 30225, 28543, 19043, 3143, 17350, 8822, 25246, 4074, 1668,\n 8484, 33996, 2360, 6152, 22624, 32040, 21181, 8664, 22895,\n 10161, 580, 5174, 2385, 21809, 7876, 31969, 27668, 21810,\n 10159, 5821, 27979, 5906, 9911, 8735, 18157, 12761, 15288,\n 14229, 15214, 26634, 20808, 18580, 28594, 22797, 17728, 8809,\n 10967, 10866, 26352, 25193, 26703, 10626, 11658, 35276, 11484,\n 28412, 5391, 3659, 13525, 18976, 6612, 3533, 2863, 15225,\n 30951, 6479, 17938, 20907, 17521, 24241, 11237, 33552, 27008,\n 28077, 6453, 4750, 15961, 21500, 15539, 16009, 16504, 23508,\n 34047, 30875, 28044, 22798, 22685, 35527, 15509, 26571, 27044,\n 16998, 2224, 25934, 26168, 34253, 26580, 13338, 18483, 8532,\n 19164, 32, 15756, 29102, 22649, 16826, 28522, 27019, 3896,\n 11423, 31057, 842, 20176, 20835, 36176, 4458, 2498, 10952,\n 7199, 2144, 24603, 33855, 17644, 27091, 22991, 7051, 6586,\n 17321, 34581, 20438, 3421, 29176, 23443, 35205, 6824, 30497,\n 6785, 6528, 14436, 31033, 18315, 1971, 10902, 31374, 2238,\n 21084, 879, 24530, 7589, 11177, 5132, 28508, 8130, 1791,\n 11318, 33275, 27953, 6905, 28494, 27221, 33930, 3014, 3404,\n 15044, 22407, 2474, 23982, 21684, 14157, 16857, 29699, 31123,\n 26227, 27193, 28836, 10030, 26887, 7305, 17110, 30334, 15717,\n 35767, 5875, 20832, 23378, 17645, 23413, 109, 32237, 7187,\n 22595, 26075, 21632, 13741, 28416, 31448, 5016, 517, 3741,\n 35806, 1130, 21996, 24952, 23034, 6743, 14553, 35043, 12535,\n 16288, 12252, 25363, 36025, 10654, 18894, 12525, 22433, 21903,\n 24674, 25660, 9826, 9947, 9508, 29203, 6692, 54, 9106,\n 227, 13165, 32938, 8499, 12367, 244, 30436, 32298, 29307,\n 8017, 2373, 16982, 30086, 25899, 21666, 11287, 22117, 12185,\n 4425, 25446, 4008, 8585, 20818, 28787, 14430, 2602, 22517,\n 2413, 9418, 34565, 30972, 31693, 21512, 9484, 13253, 28081,\n 33973, 29594, 7078, 31743, 21175, 22013, 8346, 10113, 18333,\n 4447, 17151, 32274, 6005, 27786, 33233, 11062, 9074, 23661,\n 6214, 24198, 26946, 28664, 9016, 22902, 3322, 19653, 21689,\n 4171, 3133, 34372, 2995, 2136, 2044, 17331, 32293, 15542,\n 6910, 30079, 3682, 32950, 21993, 72, 9460, 5642, 20406,\n 12198, 35598, 4137, 13206, 8503, 7686, 7820, 12539, 10299,\n 21066, 32014, 33573, 26696, 15591, 16675, 33383, 26389, 31674,\n 10881, 19815, 29768, 26641, 28825, 9340, 24050, 4158, 10243,\n 11698, 5607, 16861, 990, 35208, 20854, 1036, 30707, 1721,\n 23796, 8307, 23695, 2931, 27357, 27604, 23463, 29715, 20735,\n 15713, 28857, 24607, 9143, 5695, 25476, 21901, 26281, 12058,\n 25597, 18053, 10257, 17056, 22394, 20694, 9419, 30452, 34957,\n 14880, 18809, 7002, 15610, 15721, 4946, 23439, 15357, 27978,\n 28052, 11410, 22801, 1165, 33950, 22322, 34278, 7155, 15914,\n 12135, 9122, 34599, 24284, 36047, 15672, 15894, 2267, 16882,\n 9852, 16273, 6334, 22457, 18008, 17591, 29586, 29596, 31019,\n 23322, 29308, 17196, 4569, 6963, 23693, 15458, 31809, 10758,\n 8164, 3379, 18737, 988, 1318, 10201, 20302, 29319, 33902,\n 5390, 17674, 23256, 16251, 6375, 14226, 9282, 3483, 24826,\n 9622, 26745, 29370, 1638, 19733, 878, 30881, 16936, 3772,\n 7210, 34889, 29357, 29678, 24369, 33048, 18965, 22508, 32384,\n 17741, 12595, 9705, 13713, 29811, 11682, 1, 33067, 3950,\n 29172, 16003, 6030, 34255, 17576, 14533, 28642, 30433, 7869,\n 7323, 15949, 21332, 29485, 24850, 9600, 5439, 18887, 20178,\n 10122, 26149, 1400, 24860, 4018, 6579, 29167, 6551, 17600,\n 24829, 17408, 6472, 16063, 24069, 11098, 33177, 18890, 32479,\n 22930, 12917, 31245, 25341, 3314, 27031, 19381, 34815, 15294,\n 32285, 35080, 34603, 4624, 19752, 6531, 33534, 12044, 25862,\n 10468, 25188, 33051, 31909, 7636, 12734, 20359, 20928, 3282,\n 11282, 16909, 8561, 11369, 1323, 28141, 3616, 21394, 17519,\n 20983, 11225, 11645, 34242, 33675, 5586, 27039, 17032, 1582,\n 20122, 28609, 10660, 34587, 16036, 35969, 15812, 15854, 20249,\n 25129, 20268, 24657, 33074, 26277, 9742, 11783, 16752, 28479,\n 12814, 8502, 8237, 25647, 14978, 27917, 17300, 31942, 27463,\n 25932, 9521, 9492, 17651, 32305, 22173, 22064, 18738, 31066,\n 35843, 14693, 1851, 5998, 25176, 28316, 4382, 19724, 20311,\n 12966, 20890, 22753, 776, 20345, 13621, 24301, 16669, 1054,\n 23647, 15548, 663, 1884, 11646, 35919, 5943, 20018, 2526,\n 17872, 2284, 30660, 32626, 31935, 26769, 13254, 23622, 23788,\n 16185, 35423, 26123, 30956, 16862, 24397, 15049, 24259, 18450,\n 5731, 12263, 10296, 32534, 2830, 12233, 10731, 27842, 4318,\n 23697, 15496, 3893, 11712, 35266, 9880, 23116, 24553, 873,\n 22112, 7064, 15199, 4606, 5724, 5145, 19097, 35403, 29965,\n 31717, 4534, 24341, 31961, 23848, 16545, 2269, 11256, 16525,\n 16989, 33701, 13708, 14458, 11880, 2748, 34943, 7127, 6430,\n 19544, 19682, 2920, 23429, 6887, 23016, 17090, 25926, 29316,\n 14453, 22650, 13332, 23547, 24430, 17851, 5502, 20643, 30157,\n 11210, 24899, 7961, 35312, 8811, 29499, 36034, 35698, 16057,\n 8461, 25617, 11099, 11395, 19538, 4766, 13054, 25090, 3243,\n 30686, 12271, 32604, 9162, 14774, 11937, 10466, 12156, 28157,\n 11978, 2361, 25838, 35691, 4921, 14150, 31005, 13539, 308,\n 34600, 21040, 10035, 34265, 36064, 7536, 35863, 4389, 12586,\n 4841, 14665, 24166, 18547, 14600, 10227, 25032, 12032, 7457,\n 4574, 22385, 34074, 9433, 33444, 15186, 6663, 17808, 1104,\n 10291, 2943, 12145, 17445, 31267, 22324, 19249, 8708, 12869,\n 36011, 11321, 26305, 5865, 25664, 21217, 35303, 22571, 19955,\n 2795, 33274, 16097, 20681, 20594, 23957, 16749, 13858, 31600,\n 29590, 2735, 36021, 29503, 19482, 23197, 4908, 34147, 30614,\n 24120, 27359, 6603, 30510, 13191, 34840, 32157, 36101, 4189,\n 23897, 18805, 5844, 201, 13580, 1802, 6444, 9195, 23391,\n 26092, 970, 30191, 15923, 21715, 23169, 28160, 7369, 32349,\n 33672, 26704, 25433, 30310, 12930, 10943, 33090, 27705, 23915,\n 22901, 19560, 11204, 14815, 2882, 6013, 25153, 2580, 24435,\n 8669, 34705, 15223, 34601, 16889, 19673, 12177, 35799, 20066,\n 21045, 141, 26631, 7574, 24076, 1922, 5842, 33992, 23006,\n 31733, 7364, 5880, 3028, 11076, 6674, 9288, 1699, 21419,\n 23980, 32408, 27870, 21541, 8061, 19093, 29542, 12099, 11845,\n 7999, 1210, 25751, 34753, 15156, 31639, 6781, 28771, 20790,\n 21582, 7910, 14366, 4790, 33841, 7562, 10840, 9673, 5247,\n 6870, 11472, 10545, 16975, 8883, 35399, 31931, 25870, 2990,\n 13683, 27049, 29670, 18671, 25063, 14846, 13973, 1992, 24445,\n 18427, 24816, 1301, 25974, 10733, 33381, 19837, 18731, 11155,\n 24609, 18929, 25748, 10754, 30574, 31825, 35222, 5879, 18999,\n 11316, 19613, 2342, 24383, 3931, 6318, 8614, 10735, 26195,\n 6192, 33944, 26011, 5684, 11972, 32743, 7932, 3846, 1314,\n 6557, 28182, 30844, 2695, 7726, 7117, 24655, 19801, 11212,\n 24787, 24786, 26054, 21080, 28556, 6440, 6855, 16085, 8396,\n 1900, 24444, 19902, 21140, 5746, 31678, 21132, 18290, 12678,\n 30803, 13706, 5061, 20556, 29239, 2421, 20995, 690, 8789,\n 20609, 11556, 35693, 13288, 9787, 9097, 17488, 14868, 5739,\n 33982, 3048, 16606, 854, 13397, 10005, 13107, 34193, 23056,\n 22606, 16212, 1154, 25307, 29354, 19586, 27212, 25616, 21677,\n 32549, 6475, 6044, 22785, 9728, 3280, 16078, 15039, 23775,\n 11535, 20255, 15071, 7887, 17378, 8529, 20659, 26588, 1403,\n 33787, 5743, 13025, 24297, 16460, 5099, 16433, 10638, 18068,\n 440, 33725, 27418, 28039, 23320, 23652, 28087, 15175, 18856,\n 14604, 12365, 30668, 27720, 14231, 35852, 31217, 4573, 35947,\n 21155, 5512, 12277, 3512, 2988, 24358, 21869, 26975, 30834,\n 11087, 6514, 19320, 17415, 5106, 6911, 1948, 26766, 14166,\n 24910, 16806, 34016, 1712, 1467, 20744, 23452, 22800, 2338,\n 9780, 5795, 35365, 18119, 28712, 23091, 32084, 21828, 33931,\n 26539, 15899, 33227, 28384, 719, 28968, 28383, 9246, 7089,\n 33485, 28886, 25439, 32681, 17327, 25529, 22377, 12742, 21196,\n 35746, 26102, 24040, 1326, 11238, 29208, 1423, 2977, 31466,\n 16558, 30382, 34845, 27150, 32886, 14697, 28189, 6558, 16528,\n 2242, 29748, 7675, 18269, 19336, 21278, 19084, 16449, 3061,\n 553, 4446, 13798, 3860, 16330, 1800, 2440, 13229, 12226,\n 12780, 1127, 26864, 23395, 20160, 30840, 34417, 1831, 5817,\n 15335, 30493, 18067, 34701, 12668, 8558, 8771, 5012, 14425,\n 11202, 14019, 27327, 12039, 497, 6468, 3859, 16142, 12731,\n 11221, 32996, 27866, 9251, 12910, 6383, 19260, 9804, 2370,\n 4301, 31831, 27679, 1004, 11312, 13565, 29477, 8874, 21765,\n 17317, 7817, 2548, 1006, 19850, 8096, 4830, 12759, 4094,\n 1814, 16664, 6543, 7603, 21504, 267, 12144, 31721, 18292,\n 8250, 9939, 1860, 9276, 24681, 9166, 21583, 16567, 20575,\n 18035, 18746, 13417, 34338, 1264, 30228, 3849, 16652, 2081,\n 34125, 27831, 1527, 22339, 18808, 27765, 14093, 27636, 11290,\n 24759, 11631, 10623, 27575, 36125, 21604, 6159, 25603, 13185,\n 13341, 13204, 30879, 8933, 9228, 26179, 15995, 12820, 18254,\n 17391, 31322, 33501, 18470, 22301, 23955, 12726, 19315, 21945,\n 31048, 27510, 16855, 31701, 30508, 31404, 19862, 21791, 18079,\n 7488, 27426, 30266, 35743, 11245, 908, 28174, 22332, 28854,\n 5268, 5922, 35754, 27601, 9950, 18695, 31239, 27231, 19474,\n 25182, 12920, 21209, 22845, 80, 8154, 20509, 14866, 21434,\n 22574, 11122, 3390, 6291, 30350, 34710, 12719, 984, 8698,\n 6360, 10585, 5517, 30843, 28452, 2867, 7032, 17898, 27730,\n 14710, 34493, 13920, 8567, 5293, 14187, 28685, 13895, 12461,\n 28510, 12841, 452, 33656, 24857, 13093, 3209, 4312, 7054,\n 31689, 27093, 4047, 31873, 23318, 28211, 16623, 262, 161,\n 2291, 1568, 662, 35279, 12046, 16788, 5324, 19195, 33572,\n 620, 1252, 34819, 16134, 17113, 20298, 32055, 14797, 12111,\n 469, 20481, 23525, 19265, 2020, 9558, 32834, 31489, 4105,\n 4408, 8229, 34782, 6803, 24543, 8327, 26473, 34459, 11167,\n 6651, 28717, 21022, 15663, 25168, 5071, 18011, 20113, 12090,\n 20156, 12709, 6529, 8217, 12615, 15906, 16498, 8609, 35472,\n 2819, 11046, 740, 26476, 5269, 24503, 7293, 13315, 4481,\n 32275, 25832, 19578, 8012, 3297, 2730, 11693, 11897, 11767,\n 32316, 21729, 7091, 6633, 2280, 14357, 12499, 21585, 16039,\n 9336, 11105, 13490, 17026, 16387, 9827, 7865, 16139, 18594,\n 6253, 2169, 24280, 30968, 29798, 10354, 33686, 100, 19806,\n 12424, 28703, 35734, 7121, 33088, 9272, 30742, 12402, 11361,\n 17466, 33860, 26833, 572, 9176, 4842, 12063, 22942, 9660,\n 33882, 30053, 33219, 26603, 14237, 4803, 26066, 9159, 9739,\n 11582, 17718, 32457, 32470, 32663, 24243, 1833, 615, 19783,\n 2203, 9556, 15290, 26627, 24350, 19842, 16992, 32332, 7738,\n 34899, 14260, 35065, 8078, 9570, 8966, 4836, 30563, 21087,\n 176, 28030, 24963, 34099, 24595, 3955, 26264, 30445, 17828,\n 2901, 24572, 9976, 762, 22297, 2214, 2808, 22436, 4258,\n 18546, 13068, 21201, 9206, 3654, 22148, 5620, 3009, 1359,\n 10405, 1028, 24220, 29663, 3039, 7601, 2315, 3492, 14217,\n 16445, 8275, 13110, 30300, 30093, 20767, 12495, 17833, 2009,\n 13067, 4990, 22518, 19704, 7746, 8339, 2307, 24106, 28435,\n 3245, 23227, 8224, 14386, 5077, 27192, 13646, 1121, 30930,\n 33661, 2847, 33730, 554, 25080, 8112, 24173, 23080, 29784,\n 4399, 18422, 31834, 10827, 25161, 14899, 21258, 32434, 31740,\n 17732, 3845, 29472, 19168, 16442, 30684, 19412, 9578, 6977,\n 26522, 12949, 744, 23985, 9247, 31388, 24531, 7550, 32845,\n 35331, 32435, 28083, 19683, 27909, 18551, 600, 5118, 24885,\n 1481, 4671, 2410, 6888, 6727, 27646, 14936, 34438, 21682,\n 22679, 6277, 5901, 20938, 24004, 27504, 1124, 25268, 5139,\n 30662, 18415, 32499, 31777, 7198, 2313, 9986, 1766, 12635,\n 32452, 32253, 34408, 1425, 23405, 7353, 10129, 28176, 22084,\n 4770, 11855, 31764, 8473, 23751, 18012, 15836, 1309, 23558,\n 15974, 27597, 22566, 16709, 34524, 34755, 28893, 33037, 1985,\n 23348, 15588, 23921, 426, 15157, 7291, 29252, 1047, 2815,\n 1338, 19304, 16522, 22188, 21246, 1706, 27400, 3522, 31410,\n 13838, 10846, 32526, 26694, 24776, 27392, 16692, 12321, 9164,\n 2253, 3764, 4136, 13770, 19119, 20209, 5972, 936, 2129,\n 17337, 16332, 6577, 33909, 514, 764, 3912, 34583, 10320,\n 21111, 15393, 33112, 35476, 34912, 11173, 1572, 6554, 5505,\n 27313, 19429, 27154, 34955, 9514, 6840, 23123, 35310, 18908,\n 30128, 18732, 28086, 20461, 15960, 13422, 11097, 26312, 13112,\n 20507, 31986, 6031, 14592, 35907, 16136, 5187, 20029, 34466,\n 26459, 27534, 3648, 6436, 18643, 1631, 13200, 22900, 4462,\n 16130, 6652, 11170, 3201, 32588, 35415, 10157, 35098, 19356,\n 32759, 7583, 19508, 6677, 5519, 25381, 22335, 16388, 33900,\n 35136, 15382, 8640, 11953, 31071, 8235, 34560, 23651, 25504,\n 7403, 7587, 32256, 4130, 16630, 34290, 32787, 2979, 16849,\n 6636, 29694, 4674, 7118, 33734, 17207, 30917, 29069, 35533,\n 2050, 19306, 23385, 29509, 26737, 22665, 13317, 23582, 23048,\n 34544, 25609, 8353, 33116, 11453, 23639, 33879, 34971, 28206,\n 19083, 3886, 26208, 13750, 708, 30091, 22656, 29867, 35435,\n 7340, 23146, 28629, 22487, 24620, 26070, 31454, 31113, 21683,\n 4835, 3937, 13748, 14884, 28852, 6962, 16770, 275, 32650,\n 3619, 33629, 10237, 14712, 12815, 6934, 3043, 10198, 23886,\n 17232, 8150, 10477, 7115, 19851, 738, 2533, 22789, 33242,\n 14919, 5559, 1620, 30826, 15555, 19817, 21259, 14617, 1189,\n 4761, 16386, 17136, 12043, 32823, 21885, 15684, 21827, 5764,\n 7568, 28068, 1366, 3152, 21937, 14950, 21690, 13073, 15259,\n 28918, 13249, 20203, 29219, 30651, 18918, 28120, 30100, 25330,\n 19646, 9551, 31196, 26432, 8341, 23752, 24838, 10509, 27904,\n 23280, 22992, 32018, 6457, 21118, 27937, 22238, 14731, 27461,\n 11606, 3419, 4172, 14558, 5798, 11595, 33942, 3174, 10506,\n 28888, 15144, 21152, 4884, 925, 14214, 28098, 22644, 18156,\n 25685, 17414, 12836, 35591, 22984, 3581, 12199, 23641, 5263,\n 15951, 7843, 11291, 26741, 13343, 22107, 13018, 14612, 18753,\n 16165, 8757, 22909, 30755, 23781, 26770, 35396, 9687, 15061,\n 25687, 26426, 17827, 23675, 131, 27349, 5361, 27760, 25209,\n 33014, 2033, 17250, 16724, 28533, 24779, 12005, 11982, 27650,\n 9608, 7553, 31490, 10747, 2706, 8315, 33716, 6951, 31929,\n 12056, 5469, 14292, 9408, 29483, 18602, 15338, 15879, 33304,\n 13250, 29652, 35478, 24208, 34812, 8404, 30153, 10195, 8051,\n 31646, 7025, 20237, 17686, 8989, 35302, 22410, 23977, 10505,\n 28773, 8836, 20232, 19823, 22229, 32583, 175, 5200, 13452,\n 30770, 22405, 34216, 10465, 16104, 3910, 31621, 5600, 28226,\n 23136, 1578, 8176, 10954, 813, 7791, 28986, 27072, 27952,\n 6001, 9411, 31907, 27012, 34003, 27210, 29403, 10559, 23849,\n 25206, 10618, 36043, 15676, 18592, 5622, 9483, 28802, 2681,\n 17401, 31656, 22632, 8906, 10233, 2613, 23252, 27994, 24830,\n 35764, 21668, 13602, 26191, 14477, 6992, 14615, 24844, 16682,\n 22257, 26053, 25118, 14746, 16812, 32319, 4727, 25896, 23371,\n 32903, 5946, 6948, 9013, 24258, 11275, 29286, 34741, 1544,\n 22576, 11254, 20532, 6944, 21882, 18716, 5024, 28008, 16274,\n 26551, 32667, 35729, 28313, 17000, 28256, 13535, 3408, 23838,\n 18922, 22027, 15354, 35404, 10232, 6101, 11708, 28015, 26685,\n 29214, 1561, 26351, 7845, 6623, 1259, 26158, 15388, 26760,\n 34305, 26832, 29040, 9205, 8971, 18118, 21619, 13447, 5883,\n 32238, 27861, 17301, 21431, 11424, 5765, 2110, 21970, 3345,\n 12796, 2934, 18958, 30663, 4633, 4814, 35197, 19305, 16133,\n 3374, 12362, 19490, 16177, 21634, 22098, 12287, 29363, 6647,\n 458, 5058, 31707, 28133, 1505, 14792, 29442, 13805, 34191,\n 30141, 36073, 14263, 31611, 31063, 10460, 15292, 12238, 10535,\n 28962, 16113, 26895, 5298, 12250, 31819, 4286, 27068, 9737,\n 34662, 11864, 22086, 35483, 30349, 20737, 34769, 25354, 8850,\n 26209, 9432, 203, 7806, 3196, 14979, 29213, 3718, 34379,\n 17204, 673, 21386, 4438, 22347, 12618, 24974, 15340, 17592,\n 9479, 32122, 28908, 13443, 22617, 27485, 33676, 23460, 5337,\n 12040, 20386, 19258, 16879, 11862, 887, 13851, 24309, 28389,\n 12394, 23577, 28400, 19643, 9759, 3841, 15129, 10011, 35155,\n 1110, 480, 13894, 16939, 6063, 28483, 17920, 26899, 5953,\n 2555, 28777, 28952, 15677, 3439, 30635, 17694, 33778, 31967,\n 15394, 17387, 16696, 15877, 25396, 4079, 16903, 23509, 24589,\n 4440, 18275, 16463, 2189, 24563, 34566, 18769, 25131, 4725,\n 17549, 25669, 26403, 27076, 34725, 25556, 10531, 32676, 8129,\n 12387, 811, 31451, 2806, 1490, 7395, 22908, 33419, 32897,\n 6213, 31339, 17257, 17581, 26733, 28567, 18717, 13801, 33505,\n 20925, 23763, 1468, 14828, 30165, 7496, 6732, 8873, 13914,\n 24781, 24170, 12925, 28213, 23867, 33223, 36008, 6247, 9366,\n 7525, 20256, 1938, 11024, 32790, 6693, 26373, 1745, 30267,\n 5999, 50, 18493, 27968, 14859, 17041, 8720, 31653, 29501,\n 23342, 18638, 19376, 5717, 23148, 9729, 22724, 26027, 30921,\n 26720, 10910, 23774, 33239, 11047, 10224, 21471, 1669, 525,\n 10492, 27081, 30617, 7413, 13546, 25927, 13522, 7246, 3481,\n 1869, 21475, 10486, 27240, 18023, 30570, 9275, 499, 2784,\n 34359, 21680, 2048, 9676, 17601, 406, 588, 6357, 31170,\n 22383, 14782, 24935, 20811, 18244, 21345, 17417, 20017, 30944,\n 1779, 25215, 19049, 26957, 2228, 6792, 7841, 13234, 25640,\n 12104, 13292, 2165, 34475, 9022, 24831, 28023, 29247, 17668,\n 1898, 8665, 4220, 24010, 5454, 9841, 20905, 12846, 17031,\n 13149, 12361, 9353, 805, 18609, 29093, 29010, 10583, 26621,\n 765, 24628, 29283, 28261, 644, 13028, 15122, 17006, 24092,\n 24176, 14203, 31279, 13246, 15744, 21081, 9071, 3032, 23494,\n 21864, 16402, 19685, 25295, 14224, 7598, 21192, 5206, 17757,\n 31827, 6225, 31381, 3939, 31470, 18070, 22776, 34046, 25397,\n 13436, 30799, 23803, 17958, 19785, 30365, 6282, 25792, 24075,\n 24353, 26526, 7667, 15541, 34522, 15150, 32500, 20774, 2519,\n 4751, 19008, 8039, 1798, 16706, 20134, 35110, 4714, 34399,\n 12843, 4663, 18263, 832, 13902, 22871, 17628, 13015, 21762,\n 25455, 28998, 30532, 18963, 10680, 32447, 18041, 7295, 12189,\n 17405, 21852, 13002, 26257, 11089, 3261, 4283, 16255, 27963,\n 30278, 32289, 15638, 17302, 30498, 36045, 32858, 31555, 12612,\n 4903, 3013, 20457, 19918, 34162, 1664, 22898, 15258, 5914,\n 683, 9323, 13528, 16801, 12296, 23304, 11654, 11193, 20038,\n 1811, 24671, 26776, 21094, 20545, 16894, 3355, 24925, 31676,\n 13385, 22704, 21497, 30063, 9003, 23663, 12724, 20416, 8964,\n 19565, 35694, 21600, 19022, 33265, 16295, 2817, 21577, 5923,\n 2132, 10842, 9214, 8080, 19123, 10409, 10295, 22658, 26200,\n 18482, 20571, 10068, 13544, 31877, 31149, 16932, 20761, 18621,\n 15642, 17035, 24723, 31662, 11251, 4871, 22546, 22234, 1697,\n 20391, 9144, 34906, 19430, 20452, 33708, 35450, 15648, 9658,\n 28582, 23749, 32843, 13613, 31955, 28011, 328, 4828, 13827,\n 12304, 22103, 5776, 28108, 21236, 14401, 18925, 7384, 22032,\n 26906, 32325, 4375, 10770, 24318, 19747, 13492, 30139, 28285,\n 27956, 23081, 8969, 26804, 8221, 34168, 12619, 16045, 24347,\n 22474, 35357, 2821, 9028, 35099, 2005, 8427, 23249, 29591,\n 10150, 9309, 7643, 16289, 15902, 15651, 11371, 31476, 26629,\n 10114, 12840, 14696, 1774, 31879, 13598, 23129, 31052, 35214,\n 19829, 15088, 21736, 28523, 34480, 19871, 30954, 21146, 16171,\n 35830, 10860, 5609, 5837, 8163, 5811, 17550, 6121, 34197,\n 28424, 34148, 32313, 24608, 18150, 7106, 2318, 23190, 2317,\n 4404, 8724, 24063, 24665, 20951, 17983, 7805, 28616, 12508,\n 19937, 34145, 12137, 27819, 10070, 18840, 7504, 32204, 31504,\n 30490, 5814, 17934, 8194, 27545, 33605, 10726, 31316, 1902,\n 12631, 26749, 25444, 8475, 7180, 12852, 21960, 34339, 2714,\n 21453, 12493, 11738, 22154, 19881, 3806, 7021, 778, 5633,\n 29076, 2869, 16010, 33776, 9621, 29706, 31945, 5766, 18997,\n 28698, 17072, 35510, 10928, 33891, 35454, 13270, 31303, 2112,\n 11928, 15034, 19036, 26648, 2908, 25368, 6722, 26527, 18574,\n 3623, 5105, 32092, 14158, 23121, 838, 17192, 35887, 31841,\n 6131, 24646, 4518, 34529, 14738, 12273, 30672, 23117, 31728,\n 4778, 17187, 20794, 7454, 14949, 25402, 33264, 18559, 11016,\n 2993, 31270, 24583, 24898, 17018, 27505, 20612, 32758, 2970,\n 14488, 23076, 9165, 8760, 33528, 21794, 1224, 12596, 32059,\n 15096, 121, 26275, 30536, 24740, 19264, 1341, 21169, 23928,\n 6344, 33507, 34536, 171, 19324, 32497, 14247, 8770, 31640,\n 8178, 1605, 189, 24554, 3384, 34258, 23426, 29585, 892,\n 19566, 1637, 11728, 5611, 35070, 25599, 4625, 29737, 2620,\n 21064, 5232, 30564, 22791, 21740, 28282, 30028, 2881, 7723,\n 8828, 7376, 3155, 29688, 20991, 12683, 9561, 34481, 29572,\n 34771, 31383, 23814, 17904, 17542, 7041, 5761, 26377, 4005,\n 12097, 7235, 8411, 30017, 35342, 12018, 15592, 6077, 1245,\n 32266, 30173, 23507, 28940, 20037, 32846, 15857, 34412, 8085,\n 23233, 22540, 30133, 18538, 10886, 27683, 35183, 2382, 232,\n 5104, 7884, 4783, 31246, 33064, 29392, 14319, 15171, 3198,\n 12234, 26610, 3181, 6833, 7729, 7612, 33818, 23655, 20425,\n 29756, 21389, 23572, 6965, 31920, 10625, 23905, 18229, 22248,\n 7875, 25905, 8169, 21757, 15108, 4825, 20287, 21648, 5416,\n 10867, 20695, 8092, 35857, 8676, 16337, 14041, 17836, 7886,\n 20049, 14563, 532, 24247, 29566, 7627, 23565, 15285, 23673,\n 8005, 19939, 13707, 25543, 25954, 29484, 24043, 16802, 24969,\n 5378, 17284, 18267, 15051, 998, 25369, 33921, 318, 10164,\n 29397, 18558, 2293, 4026, 11474, 23130, 18561, 12262, 13180,\n 6203, 23010, 25718, 8198, 32794, 30181, 2418, 18131, 31351,\n 5063, 26981, 8407, 25935, 22323, 2745, 29745, 6267, 18718,\n 24760, 1361, 24319, 1469, 20111, 3635, 33249, 33684, 32224,\n 21693, 24379, 16018, 27208, 129, 30195, 6412, 2628, 22951,\n 4043, 21532, 14992, 26807, 8526, 31634, 10965, 28364, 31283,\n 12054, 6120, 15391, 32192, 14478, 18147, 9779, 22520, 7076,\n 26616, 18151, 24933, 16148, 4985, 8528, 35672, 29762, 696,\n 31305, 17124, 30428, 3861, 31189, 18564, 9677, 32488, 15583,\n 19818, 371, 21556, 6678, 12480, 32276, 28927, 31687, 827,\n 30185, 1823, 33608, 12149, 8149, 18943, 35428, 404, 21710,\n 64, 31291, 17047, 1148, 217, 27249, 8316, 17573, 32456,\n 10072, 31221, 4187, 4792, 18181, 22533, 13126, 23092, 1673,\n 1419, 33281, 6699, 7701, 19667, 22296, 14526, 3310, 20777,\n 24992, 10384, 29982, 7947, 34793, 2122, 31628, 12231, 22215,\n 24512, 19345, 18636, 2750, 3661, 10383, 20936, 30923, 9185,\n 29648, 14742, 31696, 31111, 10552, 3037, 1659, 2875, 22754,\n 24051, 11112, 30708, 15065, 6574, 29649, 17236, 22958, 16137,\n 2757, 16898, 17096, 35346, 34039, 6611, 5428, 6465, 22730,\n 10821, 35113, 33953, 5608, 26842, 11948, 20152, 7600, 28385,\n 18655, 11576, 22970, 3170, 31318, 19458, 35709, 14004, 12718,\n 20477, 19598, 13849, 22193, 995, 12305, 10387, 4983, 10454,\n 13554, 496, 21917, 29554, 34222, 21664, 841, 33202, 29826,\n 15007, 30597, 14434, 8983, 24689, 11930, 33179, 30539, 30961,\n 6982, 33761, 10397, 26134, 17419, 24269, 21835, 11875, 32045,\n 35824, 35059, 18332, 2097, 30566, 8847, 19529, 30583, 16657,\n 27252, 868, 34642, 10964, 3540, 27022, 25062, 25482, 29355,\n 19533, 12866, 31087, 16813, 5657, 20719, 15822, 33133, 11370,\n 4186, 2870, 4338, 24982, 11669, 25873, 34901, 22795, 32932,\n 27227, 8752, 24183, 31438, 21381, 1360, 9774, 1161, 12951,\n 6898, 14788, 20569, 7378, 22315, 6117, 29349, 5417, 26964,\n 34413, 18683, 7573, 829, 27716, 29513, 19628, 34593, 26553,\n 25823, 13995, 5157, 20, 22381, 33412, 20637, 18973, 6157,\n 27615, 1319, 19358, 26061, 4883, 3270, 14176, 28570, 26602,\n 29808, 32035, 10716, 32405, 17791, 11481, 16584, 4179, 22221,\n 27037, 21946, 380, 20228, 1519, 904, 29288, 15415, 13217,\n 8207, 938, 17545, 29480, 25258, 30331, 5503, 1675, 20843,\n 14897, 22684, 1222, 17583, 8510, 20369, 5446, 5330, 19253,\n 31915, 17027, 74, 26420, 35139, 5220, 17293, 20055, 6640,\n 32801, 1911, 34078, 6064, 35685, 31659, 7615, 7755, 34514,\n 5098, 3223, 18513, 30865, 27660, 4859, 8034, 30986, 33247,\n 30726, 2011, 9077, 28772, 26503, 8488, 19886, 28509, 32748,\n 18604, 11974, 18339, 16974, 34960, 14314, 32123, 17755, 27990,\n 25000, 12692, 7604, 26829, 36031, 34111, 8509, 24618, 2051,\n 6242, 6185, 27112, 33329, 36123, 13239, 36154, 28101, 32708,\n 28554, 18349, 4826, 29318, 16955, 20724, 23806, 14585, 10217,\n 5796, 33464, 16529, 1871, 8020, 31885, 20579, 17673, 34015,\n 3642, 11304, 32893, 19263, 4065, 29844, 28990, 28764, 21503,\n 20521, 27600, 17775, 35708, 31959, 35905, 10890, 24423, 8675,\n 34930, 28644, 23298, 35836, 1622, 3960, 5124, 6505, 24135,\n 4485, 14725, 31736, 8628, 16069, 2807, 8530, 5171, 14829,\n 30160, 18291, 23267, 9352, 22843, 8037, 3863, 29481, 13010,\n 18270, 31455, 25435, 18962, 23374, 25711, 8325, 13346, 27839,\n 14758, 34444, 13608, 6068, 18178, 9665, 15567, 22454, 2577,\n 1035, 33218, 29304, 1803, 14443, 23873, 31683, 34807, 1383,\n 3954, 27004, 34547, 14287, 14259, 27512, 1776, 36102, 24694,\n 16699, 20912, 3022, 23787, 2541, 22667, 407, 34982, 5374,\n 33560, 1856, 2350, 23472, 34465, 6721, 22882, 15298, 23864,\n 11384, 8763, 15177, 8439, 3493, 18395, 16279, 13309, 2319,\n 11023, 18496, 17116, 29842, 5683, 21540, 4923, 26525, 25292,\n 6154, 30098, 1587, 17319, 29734, 27560, 8270, 7561, 31319,\n 2583, 34738, 9730, 18432, 28943, 27796, 8251, 10563, 18921,\n 30406, 19227, 10815, 214, 29321, 30812, 16356, 24325, 32573,\n 31658, 7938, 25816, 10177, 31479, 12711, 16348, 16359, 126,\n 16948, 7245, 35087, 21608, 20523, 12901, 22429, 14332, 6252,\n 30095, 14075, 31517, 7269, 11203, 30637, 34498, 18304, 34363,\n 31023, 7126, 20091, 4818, 14650, 22295, 28453, 34164, 13302,\n 27108, 12845, 17177, 28850, 32481, 30275, 8335, 34821, 272,\n 31774, 1159, 3775, 1266, 30131, 10914, 1216, 17452, 20172,\n 30229, 20340, 34779, 29840, 8033, 32007, 10861, 33905, 27493,\n 9382, 9239, 3287, 29278, 25978, 7986, 17449, 23883, 9638,\n 22925, 28754, 25512, 29122, 21129, 11276, 31045, 12207, 24637,\n 3570, 10336, 32492, 22316, 24288, 12821, 26380, 771, 18101,\n 16883, 17084, 30892, 26786, 9539, 35081, 804, 27630, 27259,\n 1187, 5328, 34318, 29538, 7176, 2925, 26836, 14765, 20315,\n 5380, 22456, 4195, 9105, 26379, 16209, 11351, 12506, 16945,\n 25668, 24231, 31944, 16349, 8764, 5311, 9955, 32388, 33367,\n 18272, 21976, 31444, 24811, 22928, 2346, 24879, 22586, 29581,\n 10782, 30942, 28393, 5253, 31199, 26362, 28010, 29124, 631,\n 2251, 9396, 12775, 20863, 11335, 33454, 6569, 33359, 25531,\n 11320, 27303, 27466, 8243, 18524, 32954, 31960, 28219, 13699,\n 30768, 25518, 20881, 11955, 4276, 33396, 17547, 607, 9177,\n 28978, 19111, 25949, 1448, 779, 73, 11003, 18330, 3150,\n 30632, 25115, 29258, 1650, 7303, 27069, 32141, 18932, 25009,\n 14069, 881, 34835, 33072, 2883, 9284, 30988, 12700, 28921,\n 13842, 20103, 18439, 26083, 20116, 31161, 27483, 23942, 5045,\n 30979, 1657, 34153, 15740, 12689, 27743, 7099, 34433, 20705,\n 17147, 6882, 3723, 12650, 3735, 23972, 35721, 23592, 12826,\n 7610, 24112, 35286, 13909, 35872, 27677, 8447, 22937, 7273,\n 33757, 8955, 8642, 202, 27598, 6805, 3626, 13079, 27218,\n 27448, 35578, 34254, 19761, 36181, 23032, 4128, 21819, 31110,\n 5925, 2183, 17953, 6458, 19666, 14300, 27613, 20722, 29146,\n 33826, 2790, 25708, 5487, 12971, 19290, 11674, 34315, 30127,\n 34094, 23319, 16906, 27129, 25205, 2878, 3799, 3749, 14778,\n 26343, 9238, 16920, 9359, 31650, 15810, 22159, 10366, 26897,\n 32115, 4963, 11939, 16959, 28399, 18883, 16247, 6750, 977,\n 1976, 12929, 2381, 18029, 302, 3649, 11337, 9657, 31684,\n 34754, 23741, 31994, 8968, 14296, 18873, 18952, 9392, 15794,\n 30035, 4378, 29612, 27846, 11397, 20516, 34633, 16718, 3705,\n 11563, 8541, 2971, 17881, 8265, 19942, 10608, 34518, 16202,\n 22957, 7352, 2627, 9744, 14885, 18773, 22965, 3469, 3852,\n 4906, 15351, 28623, 3317, 17803, 21719, 27277, 8689, 26688,\n 8281, 8685, 26541, 16129, 42, 4617, 26301, 8506, 2268,\n 26845, 29034, 10205, 29599, 17917, 19252, 1487, 4090, 24053,\n 13275, 35942, 29367, 23744, 32454, 7425, 15296, 18722, 35240,\n 18063, 18565, 2954, 27685, 23708, 3887, 18842, 2355, 18172,\n 25785, 7062, 6167, 5078, 10106, 10313, 1446, 22692, 19055,\n 31962, 16110, 21235, 35633, 22727, 9262, 30268, 12450, 14809,\n 23417, 30728, 4488, 23505, 12784, 17738, 10350, 35111, 22651,\n 19555, 3984, 20690, 30470, 6880, 25815, 5974, 5774, 32373,\n 2026, 32494, 27340, 27139, 15889, 17272, 34537, 19279, 8867,\n 17619, 21763, 24955, 21615, 16520, 17620, 15304, 16693, 5360,\n 10598, 7705, 8939, 7624, 11389, 35789, 1853, 2584, 1164,\n 21661, 8741, 35621, 17787, 31195, 31401, 4240, 17956, 2839,\n 29981, 23706, 35782, 15965, 10188, 2116, 24907, 35565, 22788,\n 5858, 34195, 28497, 13178, 5908, 28237, 13701, 13606, 8491,\n 23944, 31367, 12102, 27689, 22751, 5958, 34503, 1382, 5231,\n 22181, 26723, 31629, 7281, 17527, 34072, 21900, 10996, 31619,\n 23626, 8562, 12517, 24962, 15449, 22445, 23343, 35241, 15847,\n 1952, 11690, 9591, 12624, 28499, 28895, 21602, 8310, 23512,\n 8304, 35376, 16800, 25721, 20482, 7935, 468, 19270, 29251,\n 21048, 33328, 3233, 32490, 29032, 15635, 13116, 21470, 11551,\n 17634, 9187, 13324, 15562, 9023, 26936, 34322, 17283, 5750,\n 6417, 33960, 29060, 17160, 21250, 24244, 28595, 22424, 6819,\n 25587, 7969, 33582, 10794, 35714, 5841, 15774, 11656, 1845,\n 9543, 27764, 31415, 18891, 13687, 11524, 17007, 23780, 9626,\n 30887, 30838, 31990, 33440, 16591, 34820, 22400, 24835, 7654,\n 16095, 13396, 31999, 4772, 35221, 4642, 1274, 5451, 27137,\n 1762, 15920, 1672, 33678, 25835, 5754, 28447, 21982, 15136,\n 17768, 35797, 5551, 33455, 19506, 15072, 6254, 3721, 18409,\n 32678, 1588, 24228, 255, 29640, 25499, 32946, 28528, 32541,\n 30547, 21372, 6793, 257, 8446, 11440, 21625, 30149, 27490,\n 29646, 30049, 22582, 27814, 521, 10165, 9768, 30683, 16541,\n 16691, 22522, 32026, 1197, 23375, 17908, 27687, 17141, 14690,\n 25271, 25321, 27174, 15527, 7040, 12118, 13694, 25357, 28324,\n 12388, 34567, 17969, 26544, 28937, 21067, 33503, 16952, 10767,\n 21449, 4821, 27929, 34765, 1758, 12819, 2405, 30186, 33333,\n 18879, 32104, 34036, 26592, 14279, 16893, 27670, 19705, 11707,\n 25526, 24285, 12003, 7526, 8812, 8736, 13923, 1646, 3455,\n 32764, 18430, 24523, 5262, 5386, 13817, 26116, 19635, 26442,\n 7131, 16935, 29441, 30237, 27163, 4589, 32432, 12809, 25674,\n 33481, 3007, 22752, 10235, 10240, 4192, 20619, 238, 21288,\n 25540, 18355, 843, 15834, 30659, 18528, 6804, 30501, 22197,\n 9605, 3357, 34067, 24057, 20833, 2279, 12317, 16233, 10445,\n 489, 5578, 15512, 792, 10875, 15436, 21920, 14964, 24127,\n 1260, 1099, 34830, 20324, 18680, 14569, 24095, 28694, 29028,\n 9142, 19839, 20858, 4768, 27200, 19740, 13203, 23090, 3962,\n 28351, 9298, 9896, 6406, 4245, 10494, 15701, 10953, 33033,\n 1088, 23939, 1126, 5255, 27366, 3274, 27599, 27578, 9148,\n 18826, 7135, 3207, 19821, 24283, 11447, 23824, 15213, 9830,\n 6257, 32532, 15571, 6839, 5183, 6613, 32126, 20685, 20269,\n 8517, 13259, 35337, 22933, 5867, 25137, 24391, 34196, 12911,\n 22226, 29979, 19622, 20175, 5927, 15020, 31775, 18266, 33814,\n 33429, 8523, 26761, 917, 5326, 21745, 31271, 13148, 8998,\n 29297, 22172, 35736, 2114, 30179]), 'T': array([30941, 14343, 31519, 52512, 39496, 52656, 32964, 28460, 14720,\n 42869, 9634, 45314, 40744, 52781, 6280, 14001, 52986, 1904,\n 32268, 53520, 37377, 54409, 34591, 48339, 35433, 54965, 5556,\n 26668, 634, 45857, 52698, 41800, 48455, 16862, 13519, 39494,\n 14190, 44356, 45255, 25013, 9628, 20604, 861, 34642, 38202,\n 53316, 1594, 54011, 42896, 24671, 44724, 24279, 19407, 54201,\n 37188, 53766, 20800, 30536, 50563, 30833, 16822, 1673, 40342,\n 11444, 53016, 42700, 13443, 15880, 21929, 35907, 51484, 37,\n 35859, 14751, 44687, 55508, 8444, 708, 51689, 29526, 19239,\n 9501, 29124, 10829, 21378, 36937, 53216, 3541, 30329, 11044,\n 45584, 55426, 16518, 124, 43045, 26050, 10345, 47262, 24683,\n 23777, 43600, 24701, 16013, 28594, 12942, 32204, 23036, 49521,\n 13999, 48058, 44835, 5411, 9789, 45159, 31470, 39126, 34578,\n 17464, 22909, 35681, 11238, 36366, 16145, 42702, 35737, 30319,\n 44553, 6758, 898, 48844, 9432, 12192, 43341, 31806, 13633,\n 41334, 51327, 12488, 23922, 14598, 473, 45786, 54916, 5572,\n 6815, 53528, 10968, 31933, 18137, 31019, 33846, 16986, 34140,\n 5328, 42355, 6414, 49671, 41250, 37674, 42450, 12765, 31909,\n 48208, 31374, 12752, 38330, 36697, 9837, 2558, 52981, 54922,\n 43813, 34403, 31232, 44922, 10528, 29492, 7161, 24983, 6412,\n 43608, 11406, 50045, 6629, 10761, 44980, 44526, 2233, 14591,\n 37354, 8340, 10411, 52038, 52071, 43367, 26818, 35293, 19044,\n 11256, 50193, 38320, 32560, 30690, 20732, 41746, 51566, 19431,\n 7108, 47399, 668, 34036, 42855, 2446, 12732, 43500, 44628,\n 21293, 27005, 36928, 2014, 26153, 33830, 22959, 40072, 38085,\n 30125, 12340, 4446, 28864, 41724, 44604, 11026, 31316, 55339,\n 53584, 19501, 49691, 38698, 46276, 6055, 21544, 52895, 43848,\n 16785, 14849, 1621, 31822, 47632, 17617, 27082, 39614, 28736,\n 23974, 50166, 4666, 43857, 27218, 44824, 54815, 54047, 51196,\n 22954, 7085, 6601, 53522, 24287, 31851, 35678, 28811, 41267,\n 32710, 49848, 699, 24163, 28473, 21759, 54925, 49452, 160,\n 30090, 22994, 17692, 20245, 1849, 23743, 23690, 15179, 45549,\n 6296, 32321, 29759, 15237, 48095, 21138, 51704, 15623, 4891,\n 33646, 6327, 46202, 48853, 10772, 5318, 50134, 51969, 27571,\n 14870, 52305, 21013, 42188, 8353, 51542, 43893, 26343, 2940,\n 44866, 45375, 33511, 22014, 4278, 10978, 15836, 48817, 14450,\n 5965, 32802, 7242, 43032, 41230, 28013, 11589, 39426, 54346,\n 51808, 9918, 9371, 24027, 46720, 25886, 40839, 40491, 25966,\n 38584, 8935, 15014, 46697, 8618, 54386, 41635, 48865, 7734,\n 19979, 675, 23929, 53683, 35897, 20737, 42497, 23814, 8826,\n 38525, 39434, 2597, 54606, 11826, 4122, 14330, 796, 33624,\n 15776, 20065, 16538, 33315, 15149, 19216, 51871, 16674, 6814,\n 7829, 48945, 43637, 4974, 40767, 42800, 33115, 28500, 516,\n 42589, 15040, 17345, 14762, 3945, 22021, 8761, 9993, 14744,\n 7826, 39003, 4483, 23357, 1597, 15225, 30129, 54452, 48894,\n 23060, 36848, 53496, 16216, 2151, 10844, 32541, 54978, 16255,\n 23283, 14823, 2622, 22656, 9680, 16709, 26481, 46641, 50704,\n 50671, 27526, 14376, 4143, 18294, 46045, 4651, 12620, 15215,\n 19680, 24569, 43602, 17601, 10990, 37823, 5455, 52831, 14167,\n 35076, 27486, 17514, 21259, 11483, 39081, 11221, 13036, 4254,\n 9194, 8088, 45710, 13464, 50265, 53123, 28448, 4556, 35760,\n 41045, 19941, 48479, 1183, 12505, 33525, 21251, 27172, 52735,\n 37695, 450, 53025, 32445, 47279, 47220, 24830, 35158, 51477,\n 33886, 31161, 13601, 2910, 16242, 30077, 17072, 1939, 25754,\n 42823, 21879, 54073, 25167, 51261, 48506, 43397, 5726, 2308,\n 94, 46548, 22544, 52391, 37189, 24820, 39596, 37055, 37016,\n 40937, 19654, 37546, 30491, 45782, 54432, 11987, 24872, 37275,\n 7882, 27207, 42908, 50337, 11441, 16620, 33139, 32232, 36483,\n 4818, 27819, 149, 8634, 39067, 10031, 6948, 29233, 13485,\n 10288, 26821, 8901, 17896, 41030, 1088, 47789, 36828, 13032,\n 34203, 13216, 34539, 9829, 32954, 35125, 439, 30990, 8284,\n 41392, 48885, 42986, 13235, 25623, 14764, 2120, 3751, 26052,\n 51153, 21697, 49759, 15006, 35291, 51127, 19853, 45269, 31694,\n 2437, 30484, 39228, 23939, 51345, 1463, 51215, 50071, 7874,\n 2602, 47510, 25205, 41450, 45274, 25363, 45109, 42603, 5698,\n 4019, 29652, 53328, 1659, 52840, 789, 6145, 18989, 16793,\n 13904, 2781, 19973, 44614, 13479, 10869, 45631, 31765, 26796,\n 411, 31207, 40030, 41933, 31799, 48768, 8176, 26012, 52023,\n 18346, 7268, 4642, 20399, 42827, 38217, 41081, 45751, 20795,\n 24563, 42087, 30193, 54652, 9446, 14772, 30616, 52417, 22284,\n 19137, 26409, 38534, 13370, 22610, 22434, 16136, 40944, 5243,\n 51828, 2360, 918, 13393, 44678, 51309, 18851, 13651, 37208,\n 51572, 36140, 10376, 32777, 42616, 147, 16078, 37410, 55250,\n 8837, 22292, 21671, 10914, 8708, 46666, 49223, 43770, 23550,\n 23100, 45037, 10239, 44386, 42995, 49831, 22328, 41943, 25198,\n 26648, 30316, 26775, 3448, 11943, 53693, 14094, 35045, 2166,\n 26407, 42311, 50804, 48543, 30631, 22388, 12293, 21008, 51164,\n 14105, 17988, 27472, 48352, 46966, 50002, 19246, 52256, 50944,\n 48883, 29125, 37506, 43892, 27176, 29271, 49569, 7264, 48812,\n 36532, 31644, 51716, 32634, 32395, 11242, 22577, 47443, 26681,\n 5435, 2647, 46486, 22437, 28159, 26390, 8764, 9535, 30798,\n 32794, 15849, 20229, 51998, 31648, 11682, 21771, 24618, 21726,\n 44290, 46014, 5864, 28922, 39688, 48066, 26311, 44218, 24755,\n 22171, 42762, 55498, 33921, 7435, 22217, 24126, 53745, 13034,\n 32762, 41466, 16692, 1339, 12751, 6454, 19859, 40869, 54663,\n 2380, 47267, 32466, 6743, 41383, 48933, 21899, 12582, 3721,\n 47334, 35938, 33542, 47566, 29447, 15024, 49767, 25965, 1099,\n 28215, 35556, 54647, 51281, 9713, 55559, 22283, 13710, 20603,\n 53690, 4449, 30639, 1349, 51228, 4807, 39999, 33639, 45738,\n 11145, 35837, 37281, 21485, 49813, 45253, 26054, 2423, 25105,\n 39704, 759, 45234, 40019, 40713, 40531, 5961, 11902, 7879,\n 31745, 23822, 49928, 39925, 26806, 23903, 12319, 30199, 21038,\n 25619, 6812, 31296, 53862, 37163, 47007, 49918, 4354, 44089,\n 24218, 9793, 509, 12580, 45977, 32143, 33953, 18524, 45938,\n 2490, 45329, 17082, 53557, 33287, 29817, 39493, 4822, 2407,\n 51210, 3425, 15091, 50953, 14608, 45847, 11383, 10121, 13431,\n 4461, 23090, 53979, 25923, 23149, 52952, 45925, 46540, 24524,\n 34167, 47695, 2524, 5306, 39848, 41019, 36292, 49967, 30727,\n 54320, 19465, 52685, 19840, 53712, 43547, 13607, 20228, 35367,\n 26473, 16169, 47562, 44973, 3940, 15368, 46426, 25271, 55518,\n 14606, 27100, 43863, 48788, 16877, 53196, 54940, 29670, 47297,\n 35219, 46224, 47834, 41630, 7926, 24519, 46011, 12407, 10627,\n 12179, 11259, 14406, 2161, 44622, 21386, 19957, 42601, 53521,\n 43073, 31151, 49828, 28334, 20913, 42246, 544, 23901, 1226,\n 3284, 1145, 40848, 14334, 28737, 17372, 51033, 2044, 12465,\n 38917, 40753, 26614, 5504, 22659, 10736, 5960, 10009, 13941,\n 24361, 40546, 3107, 27780, 724, 2893, 50583, 12444, 3365,\n 51067, 20901, 11884, 42622, 41086, 27914, 12996, 3955, 28302,\n 10091, 49442, 1844, 30092, 41310, 47699, 25689, 18983, 19047,\n 7866, 10624, 20436, 52404, 16622, 29438, 17802, 30004, 30217,\n 24720, 11478, 24118, 53971, 53541, 30422, 37056, 21388, 46953,\n 51300, 8737, 36150, 50430, 53873, 6532, 8106, 35112, 5949,\n 8912, 30289, 22456, 45395, 44721, 1000, 39692, 5294, 51734,\n 19601, 15138, 19954, 12776, 22950, 19459, 20455, 9048, 31937,\n 19048, 14017, 47032, 35105, 26302, 27532, 15102, 4551, 16257,\n 43825, 53534, 38185, 41428, 47580, 53475, 45877, 47355, 8197,\n 35800, 5833, 13442, 25830, 50474, 21673, 47324, 42511, 14394,\n 45718, 19226, 18700, 50843, 52826, 26595, 212, 32553, 27019,\n 14778, 21295, 28742, 1447, 19958, 8541, 41862, 36885, 25337,\n 28619, 36755, 18717, 26110, 50600, 14853, 24210, 38139, 52902,\n 51288, 22601, 3283, 7314, 24367, 35387, 36244, 33234, 45224,\n 1283, 25690, 33691, 33601, 54165, 4041, 21460, 4134, 50449,\n 42127, 27632, 21956, 8496, 14905, 626, 16566, 20468, 4532,\n 41255, 38748, 36597, 31242, 6063, 27216, 16211, 18093, 22789,\n 37765, 52971, 8971, 44117, 38413, 37992, 41237, 16402, 44297,\n 51650, 44971, 19288, 43049, 14213, 15087, 33957, 3860, 50419,\n 20163, 43936, 34202, 29261, 47172, 32153, 6798, 49613, 3415,\n 36329, 34662, 37316, 10951, 42070, 53815, 48896, 27041, 13801,\n 29342, 43351, 8292, 45800, 26814, 6844, 36823, 5332, 37094,\n 41736, 7083, 19771, 14821, 15864, 9093, 27672, 32371, 51372,\n 3779, 27762, 2069, 10270, 22000, 14797, 38420, 25293, 18710,\n 42755, 11822, 42817, 1533, 51976, 42595, 34528, 25712, 51333,\n 39473, 39085, 36419, 44050, 17745, 37921, 10635, 50873, 9720,\n 14177, 39454, 24712, 256, 11944, 6678, 47042, 22918, 4552,\n 54029, 1297, 44128, 40398, 43364, 32614, 7283, 40910, 55054,\n 42336, 41611, 29129, 38619, 22859, 24549, 3725, 5112, 14156,\n 27880, 41939, 21320, 34929, 25295, 43675, 228, 3895, 3368,\n 26532, 39259, 52285, 53976, 18171, 29371, 25663, 42060, 31837,\n 25736, 20789, 35031, 8286, 26332, 21327, 30565, 46980, 52440,\n 39450, 23825, 21503, 52172, 41678, 4141, 38105, 9039, 48992,\n 8126, 44315, 17288, 48659, 39286, 44613, 16775, 23424, 50921,\n 37940, 47527, 6620, 14731, 49814, 29097, 2494, 44024, 50664,\n 48685, 23940, 18870, 24242, 7629, 18440, 37917, 12574, 49359,\n 8755, 14469, 9286, 52253, 51892, 23026, 34471, 4706, 24827,\n 3495, 8869, 52562, 18598, 53294, 19505, 6338, 35805, 12786,\n 55200, 26577, 24512, 51703, 40186, 28634, 39827, 6305, 1868,\n 595, 27637, 30753, 55383, 23916, 5283, 28209, 47916, 13592,\n 32027, 3839, 52865, 36381, 29757, 51666, 16591, 12359, 25779,\n 41198, 13413, 4159, 34064, 42841, 24199, 2796, 2885, 45210,\n 13595, 23910, 3628, 28889, 29194, 44302, 49874, 37245, 32715,\n 53651, 10509, 6113, 30270, 26329, 4555, 40751, 34941, 19781,\n 46723, 801, 12194, 18335, 34278, 19024, 38068, 22609, 42606,\n 26445, 40275, 38742, 11706, 28944, 49718, 25522, 2239, 41866,\n 52835, 32196, 20281, 21343, 8827, 35305, 39836, 2171, 37672,\n 44529, 34060, 26521, 11348, 11904, 17719, 35006, 12912, 53733,\n 25159, 4828, 46844, 9589, 53352, 28196, 43411, 35536, 50502,\n 31554, 19266, 28517, 55468, 53178, 31860, 15394, 19412, 16106,\n 6136, 6075, 26655, 1423, 11229, 53468, 16611, 17561, 44679,\n 52262, 4347, 29525, 38527, 50381, 47694, 49563, 38384, 46178,\n 53490, 54281, 12379, 4113, 16332, 51109, 4341, 30015, 20113,\n 21610, 52270, 29588, 40407, 26697, 23674, 38587, 54392, 20305,\n 38974, 12954, 22947, 36378, 38238, 32109, 8749, 10864, 30905,\n 37037, 9616, 13498, 38408, 50869, 47135, 37963, 34883, 38284,\n 8042, 31116, 54176, 42245, 52935, 49655, 12418, 31185, 33567,\n 40178, 33585, 7484, 46968, 15782, 39397, 1722, 47633, 38243,\n 10920, 6896, 44326, 40945, 18378, 37051, 10382, 15053, 25840,\n 25335, 21686, 27792, 23593, 47649, 28027, 43161, 9003, 43998,\n 52451, 52779, 47836, 33197, 36245, 7125, 51753, 53162, 23807,\n 30416, 9721, 38181, 26426, 12060, 13856, 17996, 38328, 41818,\n 53617, 18283, 53797, 32652, 46860, 21061, 14776, 9263, 2564,\n 33905, 14405, 48116, 30676, 18869, 31132, 26722, 35612, 28780,\n 862, 6535, 53992, 648, 47911, 21868, 47052, 16553, 51917,\n 19868, 45153, 38109, 51225, 16881, 23716, 6096, 55306, 46037,\n 40127, 9066, 21337, 40697, 1927, 44511, 44994, 53417, 19160,\n 44510, 13817, 37606, 28679, 6760, 8225, 52992, 38781, 43297,\n 27719, 12475, 25384, 14479, 27404, 40262, 48671, 22822, 8524,\n 48126, 25705, 20545, 17250, 41447, 51125, 12335, 26002, 15235,\n 54810, 23216, 37511, 33615, 50539, 36128, 55107, 47729, 52192,\n 5064, 53618, 28969, 8100, 53969, 51102, 385, 40321, 15079,\n 11641, 47951, 3870, 36904, 39288, 180, 38230, 2687, 22161,\n 6254, 35055, 25993, 49565, 42654, 42605, 2493, 13545, 46853,\n 11693, 11013, 20865, 44380, 26194, 1566, 28614, 32744, 18623,\n 20776, 11839, 23325, 5611, 14445, 30547, 25369, 20198, 29896,\n 15639, 45809, 11397, 36129, 35623, 3053, 9030, 38132, 28925,\n 16064, 33704, 49685, 23089, 2511, 34134, 32308, 34746, 4492,\n 20387, 17587, 37386, 16745, 42884, 30284, 18622, 40390, 14612,\n 18288, 2049, 15737, 34284, 9524, 20819, 17536, 30548, 21003,\n 14281, 34994, 29776, 48520, 5494, 37368, 48475, 7932, 51253,\n 42214, 19121, 51449, 12300, 13102, 1138, 47718, 17329, 26941,\n 46952, 4344, 33069, 27962, 36957, 19743, 48955, 14992, 23787,\n 22833, 14373, 45237, 30998, 24631, 32804, 43214, 28753, 18190,\n 48826, 4025, 26945, 49074, 18934, 46438, 47280, 50223, 3942,\n 40999, 20921, 416, 51466, 48942, 6250, 18217, 5933, 19278,\n 25495, 26757, 41856, 49250, 24651, 36638, 46950, 32647, 36915,\n 48736, 10693, 18076, 43959, 7412, 18364, 10463, 50534, 44956,\n 720, 2338, 13402, 53647, 11427, 17831, 17389, 4063, 19272,\n 12409, 41561, 12130, 19421, 32219, 5075, 15153, 49827, 30927,\n 15244, 30205, 47780, 4274, 24192, 13130, 18216, 26912, 11446,\n 11915, 31028, 2721, 37641, 14341, 46384, 52, 48731, 21979,\n 12013, 30787, 39656, 45186, 45242, 7103, 32488, 6361, 34383,\n 47037, 55280, 49910, 16083, 17497, 53085, 20424, 6734, 27957,\n 21370, 11671, 10107, 12311, 8881, 3099, 14247, 8915, 13017,\n 51163, 21765, 11021, 36002, 37817, 14265, 7045, 17157, 32118,\n 18998, 28490, 26706, 691, 8022, 34479, 19255, 26167, 259,\n 46907, 49995, 30748, 15289, 10229, 29825, 4853, 8823, 1800,\n 23885, 7947, 30824, 21140, 39903, 41400, 21738, 30609, 32279,\n 9986, 43555, 29464, 1177, 10154, 38493, 31381, 39684, 13313,\n 26664, 53847, 7792, 3961, 42143, 24527, 37455, 9149, 14227,\n 36474, 55334, 54137, 35470, 55310, 5766, 17067, 31722, 8495,\n 8561, 9826, 40343, 14832, 23326, 52655, 24383, 36372, 8181,\n 22015, 45875, 44375, 54251, 22428, 22109, 26230, 51746, 41811,\n 32030, 39094, 8123, 50898, 45753, 12838, 36550, 21122, 17207,\n 24405, 22573, 43550, 40351, 42944, 36081, 43373, 10360, 6312,\n 54187, 31972, 33887, 53326, 47788, 15262, 39918, 53695, 22923,\n 40457, 43142, 40308, 48665, 28355, 41277, 10989, 17644, 41360,\n 3211, 21159, 21800, 3519, 48895, 35177, 51694, 28074, 22838,\n 32318, 10180, 2613, 48337, 5830, 29721, 29679, 43965, 21925,\n 16899, 15118, 46623, 14698, 33323, 55149, 5352, 51431, 7199,\n 29215, 46646, 32404, 29885, 43807, 4206, 10407, 20033, 24208,\n 38947, 27156, 25815, 19492, 38141, 37568, 54510, 16715, 35407,\n 10790, 21996, 39349, 47857, 9131, 7049, 7840, 38250, 46353,\n 34025, 3920, 19847, 24034, 52445, 12972, 13883, 38234, 25989,\n 25350, 30789, 15478, 7465, 44444, 28108, 42854, 20071, 24306,\n 38732, 25802, 20234, 2302, 18223, 30874, 10669, 17911, 9823,\n 24509, 31423, 42387, 9057, 6085, 11178, 28957, 2427, 39310,\n 17751, 4726, 2396, 6793, 41345, 15540, 53142, 33871, 35972,\n 5744, 10839, 23542, 13957, 25481, 52359, 54918, 49325, 8196,\n 49102, 34795, 15406, 42368, 7300, 46097, 18656, 565, 44178,\n 21194, 50371, 53110, 13927, 29991, 41765, 43532, 24381, 2031,\n 40244, 49374, 23223, 54280, 46203, 31579, 21707, 30648, 22585,\n 14917, 21747, 13548, 24463, 1683, 22800, 11484, 9488, 38311,\n 47176, 16955, 20276, 31448, 38932, 35013, 22998, 20177, 33636,\n 17002, 36553, 7619, 23582, 22232, 40783, 25536, 17159, 552,\n 51819, 36670, 36157, 11892, 7079, 10219, 23989, 29597, 9338,\n 5056, 5935, 44632, 37998, 46871, 9252, 37867, 55117, 1981,\n 12443, 4337, 24124, 24439, 31772, 5969, 31017, 52775, 22128,\n 7564, 38316, 52498, 13002, 39100, 55411, 26094, 23219, 8646,\n 38785, 3399, 48267, 47666, 46685, 10971, 42313, 53081, 49399,\n 18477, 20458, 10398, 51883, 49208, 50998, 40311, 7695, 7907,\n 42813, 51021, 47234, 31155, 15771, 28874, 40210, 25919, 27666,\n 16052, 25379, 23269, 29446, 44489, 29871, 42625, 23354, 39690,\n 31289, 12731, 3558, 24464, 34031, 50212, 51827, 43870, 15797,\n 4255, 6376, 16138, 53684, 16863, 14246, 37047, 49038, 50641,\n 11536, 41795, 46444, 22523, 54858, 25783, 25029, 35913, 51784,\n 45165, 8010, 10966, 52376, 8391, 47837, 23685, 34220, 16202,\n 36053, 5904, 38648, 12172, 26340, 36446, 1723, 28570, 4031,\n 21059, 41027, 22668, 9402, 52620, 24362, 33749, 33906, 32682,\n 6656, 36790, 7597, 46608, 576, 49965, 23599, 53099, 37931,\n 23255, 43862, 29933, 48487, 4620, 20482, 8962, 44881, 24529,\n 38548, 14626, 50162, 54236, 13635, 53346, 29934, 9777, 39469,\n 43033, 990, 24588, 11116, 52525, 41949, 29663, 36890, 44912,\n 33268, 19270, 29406, 1813, 46841, 37470, 31971, 44184, 34158,\n 53559, 1416, 50248, 31794, 49448, 27861, 41977, 14180, 38631,\n 34460, 39475, 22057, 18915, 8606, 15788, 16400, 16489, 37549,\n 49749, 28973, 35347, 4908, 40844, 3792, 53446, 53863, 29096,\n 41777, 24636, 24290, 7357, 32149, 34033, 3276, 10029, 41864,\n 531, 661, 43282, 43375, 5006, 50749, 51607, 24441, 47012,\n 14310, 16382, 12320, 45217, 28596, 50044, 51319, 3378, 34935,\n 29385, 4375, 50967, 24882, 40747, 4963, 9168, 41186, 37969,\n 48091, 24076, 26191, 40873, 39088, 8188, 52048, 48728, 27048,\n 55399, 36120, 8166, 210, 18695, 33658, 31817, 42962, 46813,\n 47114, 52561, 26048, 23624, 24460, 21269, 22705, 31978, 42596,\n 10116, 11132, 42726, 14868, 34822, 37864, 9813, 45478, 9196,\n 55112, 8342, 23991, 17707, 16128, 39830, 11587, 848, 12696,\n 13176, 24278, 47498, 47802, 19828, 535, 50280, 43702, 43973,\n 28548, 28383, 41494, 29281, 43151, 14162, 32955, 14701, 42305,\n 16386, 21591, 6125, 55185, 36921, 17963, 41945, 12941, 1196,\n 40911, 49892, 54232, 41996, 36383, 6505, 50562, 52619, 2544,\n 21777, 50674, 15953, 36166, 55568, 39502, 18505, 50150, 2850,\n 47504, 51944, 3927, 54343, 46275, 9298, 17218, 15651, 26293,\n 54921, 39218, 7208, 22730, 45342, 17176, 12009, 37206, 48303,\n 7426, 18479, 16135, 42492, 5850, 15640, 36995, 49589, 7413,\n 53978, 13193, 49206, 34352, 31180, 36409, 6863, 318, 15444,\n 47035, 31186, 14193, 34466, 50547, 27465, 5625, 27259, 52718,\n 23016, 28844, 43475, 29844, 21810, 45262, 21360, 9024, 21205,\n 46568, 37137, 5026, 23549, 3928, 20872, 42579, 10160, 15057,\n 11681, 19353, 2211, 55453, 1985, 5517, 46909, 19079, 49499,\n 11840, 36031, 37775, 33659, 19655, 50807, 14101, 41778, 31334,\n 51840, 30584, 46827, 45139, 8993, 7399, 8336, 39811, 20271,\n 31865, 37634, 47833, 23113, 14893, 36972, 11719, 5532, 49352,\n 45640, 2330, 33098, 24079, 48254, 29078, 24048, 46113, 19561,\n 45705, 28471, 50830, 27237, 13476, 41465, 15405, 13632, 19744,\n 52745, 4504, 42521, 40101, 10884, 49437, 49692, 903, 48546,\n 47492, 4762, 16526, 28813, 13288, 21597, 6005, 12256, 29891,\n 48212, 26864, 47055, 30573, 1007, 28179, 39723, 6792, 49742,\n 27292, 16009, 30406, 4444, 52649, 53933, 31556, 18776, 41307,\n 50739, 25134, 28884, 29925, 6567, 10524, 47061, 14971, 9040,\n 25416, 21894, 16097, 47013, 51444, 2527, 44504, 9711, 37183,\n 45796, 14818, 45458, 18569, 34253, 35874, 6476, 26923, 37342,\n 41293, 43771, 46391, 17133, 34657, 48669, 6101, 5206, 7500,\n 5558, 15523, 24766, 37926, 2913, 42774, 18958, 2333, 15500,\n 2452, 31279, 46645, 20986, 3217, 10099, 28928, 31681, 28314,\n 12683, 55144, 9887, 5461, 24299, 8177, 32807, 52811, 6894,\n 51527, 36639, 8759, 48454, 39337, 9845, 4976, 4425, 16992,\n 40242, 13748, 24014, 39592, 53967, 54776, 20852, 35809, 2262,\n 11054, 7472, 4764, 9839, 32437, 46622, 50057, 16883, 37490,\n 43417, 5249, 29466, 4584, 29150, 49539, 41682, 49264, 27527,\n 35500, 21617, 42729, 42953, 19150, 33628, 15115, 4887, 25984,\n 27974, 42876, 34621, 22119, 28301, 10536, 51599, 53229, 26755,\n 42648, 37200, 37570, 53980, 13387, 29130, 49637, 52149, 37501,\n 40830, 26803, 20064, 36519, 26896, 23601, 43249, 46305, 34800,\n 8898, 14782, 52000, 5806, 24733, 39457, 47992, 31012, 39574,\n 19331, 10633, 33160, 44823, 9398, 21231, 6474, 48630, 21576,\n 15238, 30981, 30251, 1957, 14184, 7266, 1401, 26014, 4788,\n 43112, 28972, 36077, 18860, 22357, 40689, 14703, 1790, 34455,\n 27689, 23867, 33099, 52293, 12989, 27854, 23395, 46078, 26395,\n 3397, 20991, 22841, 46674, 47408, 16032, 26149, 36207, 515,\n 50570, 30454, 11327, 49029, 7816, 54306, 37665, 4518, 40426,\n 45523, 5658, 39370, 37046, 29479, 47467, 53234, 35474, 29327,\n 21367, 2543, 38342, 25617, 2983, 13918, 46311, 25502, 10231,\n 5836, 41592, 4257, 19227, 21621, 19586, 17236, 7076, 41350,\n 1289, 45555, 26872, 46086, 3769, 50384, 19854, 48927, 13432,\n 22595, 1518, 31133, 9743, 35521, 32884, 18781, 38778, 2912,\n 44598, 26397, 20029, 14785, 19287, 17118, 31962, 14548, 40265,\n 36503, 21191, 11153, 35364, 22635, 54231, 17016, 1314, 47196,\n 18772, 48796, 13821, 13126, 51792, 38026, 14798, 9714, 6889,\n 49611, 19325, 11967, 36810, 8268, 47520, 32151, 43760, 32267,\n 15324, 3844, 31848, 48030, 7888, 32815, 12987, 18976, 19597,\n 43052, 377, 41222, 17230, 22784, 25179, 37925, 7132, 47673,\n 48747, 15974, 2208, 37739, 35269, 15335, 34775, 23171, 52969,\n 15765, 41324, 16602, 8209, 39049, 39939, 16610, 34183, 43853,\n 19282, 33834, 52113, 5251, 20993, 15009, 16665, 9787, 42224,\n 47226, 8807, 32072, 15850, 24093, 5752, 4682, 13715, 1435,\n 43905, 15452, 11996, 6116, 14765, 27474, 55115, 13603, 24982,\n 16783, 25258, 36014, 1237, 27020, 5788, 10234, 16459, 49247,\n 11635, 15365, 29435, 33554, 11133, 10842, 38244, 15515, 31476,\n 50161, 5203, 31891, 32386, 13444, 18864, 1083, 14800, 24607,\n 9228, 4365, 48452, 36458, 27576, 5489, 22949, 9400, 45397,\n 28439, 11203, 8339, 30382, 22047, 21137, 24594, 42347, 37617,\n 42999, 39603, 48293, 63, 38271, 4910, 25418, 43967, 26951,\n 14939, 54006, 45941, 52350, 46283, 38251, 49646, 36347, 9454,\n 1503, 51528, 44710, 30241, 13158, 11052, 15992, 10514, 30738,\n 10002, 29941, 42144, 8361, 37292, 20726, 42237, 29997, 13844,\n 35244, 46479, 11226, 42767, 5500, 30209, 32661, 37581, 30571,\n 1969, 25915, 11459, 51076, 4433, 15977, 6782, 47050, 32509,\n 29516, 10146, 35849, 87, 31114, 32424, 37011, 40048, 36654,\n 42179, 5212, 36663, 22873, 16471, 55299, 42150, 3993, 8333,\n 1346, 25426, 32405, 3485, 36155, 38137, 24855, 43626, 26369,\n 54677, 38796, 14920, 49190, 14113, 4836, 35493, 22761, 29870,\n 38671, 42504, 7437, 42723, 43506, 15608, 30273, 55260, 14051,\n 13078, 12240, 49840, 17699, 43750, 33399, 32688, 12630, 15721,\n 43443, 24919, 14126, 5062, 41384, 53987, 41415, 24111, 11879,\n 25616, 471, 8701, 29594, 47081, 35686, 9042, 30960, 52884,\n 49548, 46074, 18708, 21023, 3340, 11455, 26338, 23831, 41093,\n 54913, 43195, 3187, 15744, 3013, 14455, 22583, 34244, 1786,\n 34482, 21805, 27822, 9838, 315, 55105, 26498, 18220, 11334,\n 23946, 16996, 18974, 107, 14273, 9212, 21527, 47300, 19091,\n 25528, 55410, 13884, 21082, 54285, 18494, 33445, 8545, 49310,\n 5429, 40779, 23590, 2215, 52452, 55253, 49407, 49252, 38788,\n 23681, 46519, 17836, 5612, 41056, 25181, 22146, 886, 31576,\n 24874, 55300, 40467, 48234, 26239, 40558, 5920, 17150, 32857,\n 36776, 37703, 321, 8696, 23217, 26608, 18019, 16740, 42666,\n 32305, 37210, 9907, 37769, 22097, 17820, 29341, 11473, 52639,\n 22363, 22170, 40567, 36288, 33810, 50058, 40412, 28257, 11677,\n 7978, 14886, 19783, 23434, 18774, 17096, 31512, 13529, 11492,\n 31862, 20860, 37932, 3017, 55064, 7179, 45453, 44889, 9399,\n 35042, 49388, 40176, 39486, 36214, 50279, 9419, 49901, 29304,\n 15879, 18801, 12520, 46335, 26894, 37875, 33642, 51829, 3288,\n 25036, 50615, 3295, 54161, 51160, 21754, 35412, 28556, 35117,\n 20012, 1591, 53688, 6053, 43573, 30141, 27729, 24591, 836,\n 46453, 45635, 35603, 12167, 17682, 37890, 18329, 34124, 19620,\n 48783, 30378, 13732, 53799, 13735, 36180, 17586, 36599, 2043,\n 14441, 11562, 13357, 13742, 50041, 30333, 51046, 12234, 37148,\n 50491, 1205, 43730, 25708, 5286, 27387, 22249, 54124, 114,\n 2274, 54041, 28327, 41971, 31465, 36740, 25992, 52906, 11717,\n 10903, 10535, 32840, 30431, 48009, 279, 36399, 9356, 10876,\n 14491, 1926, 13108, 34651, 35895, 42928, 7652, 43582, 6579,\n 33088, 5032, 7123, 37543, 962, 39885, 43040, 15015, 34502,\n 46904, 10366, 26856, 10780, 31336, 33745, 54290, 45009, 14391,\n 20058, 41505, 43106, 6857, 46717, 31924, 970, 5117, 37365,\n 42725, 15396, 50607, 7405, 36352, 51739, 54239, 22596, 35844,\n 51977, 10137, 2886, 29673, 51184, 13723, 1931, 28877, 51590,\n 17101, 43557, 35067, 5709, 51850, 14279, 27379, 24562, 41643,\n 51606, 7603, 48935, 40124, 9873, 27204, 28447, 29320, 30638,\n 32102, 21188, 33193, 13827, 46941, 3072, 48342, 6954, 32459,\n 53139, 39888, 10023, 13609, 34292, 2062, 52660, 5594, 9840,\n 11855, 17766, 52673, 13259, 4813, 47018, 45654, 1096, 54650,\n 3379, 17734, 51847, 36614, 52330, 44038, 9811, 52237, 4340,\n 16829, 55464, 921, 9107, 11496, 39821, 46149, 26384, 30357,\n 12818, 11914, 40722, 31274, 25292, 45073, 6356, 44766, 19143,\n 39642, 34009, 50397, 34683, 41149, 657, 46477, 32226, 42239,\n 43358, 36602, 16336, 30321, 2688, 26644, 30924, 16559, 3275,\n 21223, 48509, 16166, 12847, 1717, 52209, 21938, 47237, 29532,\n 45615, 33547, 24398, 9732, 27842, 33140, 12317, 33756, 13382,\n 16810, 14132, 48237, 54220, 19232, 24726, 20251, 22, 18323,\n 30449, 16086, 784, 12500, 19809, 46471, 45089, 48555, 19673,\n 30163, 22924, 19404, 11460, 54479, 53342, 7489, 39790, 18025,\n 36074, 18083, 18602, 11350, 54865, 23079, 335, 12813, 46691,\n 49609, 20918, 33804, 9699, 7697, 49429, 21951, 46792, 17518,\n 265, 55389, 961, 31189, 40578, 24821, 22869, 6406, 3987,\n 22966, 40429, 40586, 54398, 24785, 37424, 17095, 36573, 31083,\n 9325, 6543, 34465, 44937, 3194, 9231, 42199, 29711, 3098,\n 38712, 53337, 22571, 24307, 44200, 22148, 52168, 11873, 26855,\n 44362, 41528, 42806, 43318, 40635, 33798, 34552, 53107, 2443,\n 30915, 22409, 19928, 42973, 33507, 7320, 23620, 19053, 5188,\n 10885, 40903, 45765, 31990, 5921, 26219, 38065, 7547, 32122,\n 36334, 13221, 29761, 4745, 46142, 42925, 13954, 29737, 31508,\n 50564, 1896, 925, 26076, 14086, 47365, 15383, 35075, 17792,\n 19500, 3859, 42384, 50884, 34508, 34459, 7282, 23232, 38634,\n 42154, 33904, 5835, 18291, 8954, 30717, 52401, 44814, 34889,\n 45880, 1482, 20782, 54234, 24826, 5030, 43627, 15923, 32527,\n 35857, 28507, 11687, 29903, 4011, 48879, 39019, 722, 55361,\n 8430, 31566, 24086, 52983, 16342, 8000, 51047, 43822, 36391,\n 331, 40276, 6893, 50855, 36121, 47048, 39441, 1977, 6747,\n 39482, 35254, 48694, 16819, 53624, 5081, 44267, 50028, 46612,\n 47516, 36624, 41531, 49982, 30189, 38957, 43655, 35801, 189,\n 6301, 26541, 47796, 40099, 37548, 9618, 50991, 19145, 22503,\n 21666, 20061, 38715, 29155, 32237, 53751, 37892, 50465, 44517,\n 36122, 29085, 48042, 22832, 40000, 28999, 30467, 52820, 3069,\n 21983, 24428, 31977, 45661, 21926, 31326, 1074, 33442, 27201,\n 46249, 33047, 33155, 17530, 49553, 20533, 34525, 47411, 12449,\n 54831, 52751, 38996, 4355, 20280, 33918, 5898, 22121, 13238,\n 33054, 23536, 16534, 22005, 54675, 9894, 51959, 2191, 29619,\n 22520, 31935, 25939, 33533, 5273, 23305, 47703, 30158, 19514,\n 46128, 54125, 2964, 24151, 195, 55482, 28396, 45347, 29449,\n 37193, 37353, 24522, 40759, 36954, 12025, 48901, 42563, 40425,\n 24303, 54711, 7691, 17718, 44271, 38490, 15620, 692, 51667,\n 48578, 35255, 32413, 37667, 44298, 39315, 44007, 20259, 44782,\n 36500, 20237, 26560, 54115, 36454, 49381, 55515, 16015, 46347,\n 34488, 41582, 51195, 53275, 52530, 9289, 30640, 50672, 11618,\n 51231, 35590, 4380, 9067, 20322, 538, 17663, 24461, 23566,\n 44502, 28204, 50802, 2149, 54662, 1135, 12344, 54770, 38515,\n 5967, 3095, 1151, 54128, 974, 3464, 32551, 3029, 11761,\n 19824, 53529, 22178, 30477, 43728, 46782, 9367, 49410, 11190,\n 11962, 17353, 19753, 10833, 32110, 35784, 32702, 47612, 5791,\n 36240, 3509, 43748, 44227, 19636, 6427, 52949, 51188, 28194,\n 41398, 10104, 21100, 13153, 14972, 17850, 21136, 19220, 19942,\n 31358, 13712, 4472, 47321, 20879, 41920, 6374, 1, 29027,\n 50321, 47571, 3606, 5609, 16024, 10357, 28389, 10363, 39389,\n 47260, 24708, 12607, 28152, 46000, 19724, 8925, 51621, 44695,\n 10093, 43125, 111, 43439, 54688, 20161, 53211, 13700, 15819,\n 27918, 16201, 2270, 803, 3918, 23428, 51546, 25883, 19095,\n 42977, 36924, 19176, 45967, 2292, 9249, 52542, 46593, 55524,\n 31763, 51502, 17798, 53965, 43238, 55387, 31119, 33394, 53664,\n 19399, 10958, 24553, 38960, 51853, 5108, 6127, 16733, 6191,\n 24134, 38524, 28886, 8853, 13973, 220, 41758, 3687, 36101,\n 35919, 21961, 51594, 6411, 5575, 42830, 24540, 9682, 42047,\n 48679, 38080, 26819, 11281, 54446, 27288, 11887, 54238, 11756,\n 20090, 30858, 6165, 35093, 40942, 46159, 26687, 29167, 35861,\n 34095, 563, 16987, 24098, 53056, 43579, 8847, 42203, 38204,\n 10268, 49936, 11817, 15247, 52331, 23534, 41089, 11351, 17145,\n 40905, 29952, 38944, 51654, 29579, 23129, 17686, 50208, 28316,\n 10328, 20094, 27215, 34299, 21095, 8503, 43458, 44225, 9781,\n 403, 16249, 37514, 55573, 36508, 7164, 4959, 37588, 53136,\n 44334, 52395, 17104, 13744, 44196, 54993, 37382, 40076, 40317,\n 19170, 37092, 52157, 7783, 36476, 46446, 26000, 19351, 52967,\n 9330, 33448, 16065, 41468, 16648, 34030, 50815, 51389, 7677,\n 8052, 44784, 15927, 12135, 13025, 3992, 16395, 36836, 14844,\n 11874, 15911, 17201, 25769, 41727, 43365, 14695, 8920, 16315,\n 32619, 13704, 47861, 37460, 44505, 48713, 20687, 49305, 27644,\n 6440, 51846, 9397, 48283, 53836, 25437, 48291, 1355, 40936,\n 23096, 9341, 3146, 997, 1949, 42052, 2726, 23806, 15556,\n 47428, 41113, 49331, 36427, 9531, 36523, 24578, 43050, 28575,\n 54855, 19587, 15074, 41941, 882, 37438, 36259, 22277, 18069,\n 37898, 13755, 48800, 19118, 9851, 29627, 5134, 18184, 53086,\n 13559, 23241, 2782, 19488, 40819, 8644, 31617, 4303, 46034,\n 26187, 34630, 31552, 10537, 54465, 52932, 29736, 18309, 1658,\n 20084, 47173, 15387, 11852, 33954, 35011, 54551, 52089, 46287,\n 49287, 7506, 48086, 37645, 3818, 20264, 13854, 1030, 51416,\n 22033, 4458, 6397, 31314, 44776, 26046, 9062, 37109, 18504,\n 53413, 26835, 14831, 24386, 29572, 15314, 6742, 4335, 37625,\n 15223, 51341, 41139, 14206, 53583, 3989, 30088, 12684, 9869,\n 12923, 10719, 24061, 8410, 580, 10933, 10698, 20166, 17674,\n 45301, 37887, 16790, 32314, 9417, 44911, 12601, 29849, 8836,\n 50269, 26500, 29338, 39678, 45822, 33689, 19849, 32173, 24621,\n 50017, 8752, 5939, 44726, 46583, 32384, 9815, 19527, 26344,\n 16213, 17278, 23529, 8788, 17258, 16416, 31498, 11090, 36438,\n 13832, 29173, 33818, 2163, 54322, 12304, 21603, 14030, 28531,\n 51922, 6642, 11504, 2982, 15893, 22516, 14033, 29380, 10501,\n 32577, 43939, 42309, 20408, 52001, 8944, 12496, 12971, 3540,\n 5465, 12204, 44283, 46367, 8940, 52945, 48295, 52152, 26193,\n 29859, 5123, 24223, 23853, 47003, 33497, 34041, 3524, 42685,\n 52426, 45824, 24264, 32195, 40695, 4691, 22026, 30588, 12783,\n 25460, 3273, 28470, 50417, 47547, 4761, 15969, 30687, 38272,\n 21992, 36141, 23641, 18868, 23045, 31958, 48987, 49735, 25957,\n 27774, 18333, 10570, 25311, 15098, 3103, 42893, 27339, 20799,\n 18909, 37234, 37991, 13042, 49344, 38486, 53248, 33374, 33493,\n 2644, 43256, 4050, 16318, 46398, 19921, 14041, 39433, 50981,\n 5964, 14136, 47008, 2579, 38341, 23746, 26077, 28789, 42005,\n 18367, 49214, 18751, 29916, 13902, 10254, 13888, 28299, 1017,\n 45331, 18669, 46525, 1481, 26527, 27348, 37447, 42314, 40638,\n 3537, 40856, 20540, 448, 34409, 43657, 15305, 9808, 9774,\n 8201, 2758, 22564, 10927, 22150, 21585, 12750, 35052, 3231,\n 17487, 46466, 40595, 52585, 7353, 13419, 3315, 7181, 50433,\n 2800, 14830, 51676, 30543, 6939, 44795, 33033, 3774, 4804,\n 4795, 6925, 30510, 29707, 12120, 27119, 47150, 39759, 32801,\n 45141, 14714, 47896, 27875, 46321, 54837, 40402, 27682, 47482,\n 40087, 19584, 51363, 2661, 40933, 38157, 51378, 54130, 101,\n 18356, 31071, 14551, 32108, 37964, 51936, 28168, 40975, 46976,\n 45261, 21294, 11620, 43484, 25420, 37532, 9151, 19262, 16039,\n 4659, 11302, 35336, 46889, 42135, 12515, 30900, 9251, 14152,\n 70, 55505, 26156, 39391, 42631, 47749, 52476, 25832, 21803,\n 33732, 55500, 24950, 15158, 6582, 38734, 14197, 39182, 24296,\n 20437, 18492, 16196, 31845, 25325, 18902, 23956, 21021, 44238,\n 14421, 24532, 25229, 34058, 684, 39510, 44580, 14637, 36379,\n 15767, 51912, 12361, 50067, 5102, 13979, 45647, 3960, 46613,\n 24485, 15638, 16010, 40171, 9626, 19479, 35066, 28912, 40190,\n 5717, 30776, 39487, 35956, 27669, 30259, 53434, 27541, 48913,\n 1009, 36766, 2628, 26802, 24649, 15682, 38561, 3505, 34349,\n 10, 4947, 40752, 10326, 51137, 8536, 11517, 36613, 9104,\n 13728, 33302, 33296, 53299, 24473, 20383, 11108, 44425, 34305,\n 54643, 12207, 54665, 40483, 48527, 14638, 10942, 18879, 40123,\n 46633, 34390, 12743, 6475, 52014, 22720, 20710, 53643, 27606,\n 44422, 51520, 40229, 18630, 4339, 49469, 26543, 49192, 1216,\n 11426, 39205, 54183, 6160, 2874, 38790, 25455, 29710, 37707,\n 49582, 50398, 38872, 39062, 49376, 42636, 34005, 55035, 34184,\n 2631, 54707, 28338, 4830, 3937, 33498, 28511, 20792, 14061,\n 7173, 9554, 34748, 3171, 26297, 21588, 17352, 53365, 8767,\n 47659, 20232, 47253, 18952, 32865, 51450, 17577, 15803, 47682,\n 18882, 12439, 27735, 4879, 14909, 16011, 37545, 43920, 21975,\n 34529, 15414, 49439, 32817, 53042, 17826, 45098, 53077, 25622,\n 20664, 36455, 27986, 32935, 37996, 26598, 34375, 52830, 22258,\n 28376, 31660, 26242, 44878, 30695, 5638, 7404, 33162, 9597,\n 28546, 2861, 48965, 16847, 44464, 48243, 13087, 47942, 40296,\n 36498, 24451, 20227, 27447, 42409, 34669, 52993, 6455, 44667,\n 48867, 6831, 4015, 5077, 55356, 13230, 39817, 46582, 35705,\n 36154, 9825, 15211, 34331, 19741, 31815, 14789, 34425, 31265,\n 14914, 33775, 38087, 55377, 54129, 41075, 3809, 36735, 4329,\n 15386, 7194, 22561, 9934, 29122, 41973, 9757, 53741, 54175,\n 39687, 18092, 17166, 36802, 2876, 32877, 24537, 18443, 43623,\n 8821, 35920, 42969, 30453, 40056, 16848, 21963, 28827, 1870,\n 10888, 18639, 11563, 45218, 11843, 37654, 21670, 45220, 24375,\n 46310, 48575, 6189, 9319, 20656, 31354, 28175, 53447, 52203,\n 42204, 1231, 45700, 40780, 34155, 55537, 43051, 52073, 47154,\n 52278, 55012, 19874, 47026, 49905, 24758, 48481, 8580, 31704,\n 53535, 17976, 11335, 50977, 50224, 26905, 1954, 45947, 5643,\n 18707, 4203, 46441, 40348, 21958, 14867, 23793, 12151, 36703,\n 33556, 19793, 14984, 10459, 35332, 16184, 38478, 20642, 2183,\n 54324, 21133, 44173, 15463, 21425, 51617, 30822, 31739, 11097,\n 4059, 38154, 20199, 27738, 21924, 33293, 31562, 54772, 24783,\n 31831, 8793, 22276, 43946, 46647, 49544, 51200, 2799, 31015,\n 33345, 12795, 50116, 2774, 32512, 19439, 38501, 2016, 13189,\n 17179, 2491, 20973, 55160, 4690, 43288, 34726, 10809, 45817,\n 16871, 15869, 46264, 36990, 43913, 16310, 16444, 42035, 29224,\n 39879, 33213, 23792, 27369, 52400, 23299, 5512, 52136, 29034,\n 12493, 19886, 52842, 1495, 1978, 42760, 15062, 10983, 33741,\n 30315, 49458, 227, 33739, 36189, 43205, 11069, 42987, 15413,\n 22835, 53910, 17623, 21834, 19183, 52374, 19263, 24096, 29326,\n 491, 49393, 29519, 53090, 12083, 7944, 11233, 55388, 51783,\n 22563, 39840, 44023, 16891, 27811, 15045, 1696, 17366, 31323,\n 29503, 23425, 53819, 31709, 14763, 31215, 24966, 3199, 16784,\n 38806, 46440, 16528, 15738, 8034, 39382, 35131, 53185, 29810,\n 35571, 46987, 43404, 25743, 246, 17551, 8258, 12894, 45514,\n 52100, 48499, 37767, 53654, 50673, 42244, 36844, 7909, 37868,\n 15551, 16108, 5177, 28262, 35743, 50048, 31650, 47968, 39468,\n 24312, 46872, 55217, 44473, 23392, 24032, 6481, 15627, 2703,\n 11972, 6178, 9816, 52942, 3998, 24942, 46255, 33299, 36344,\n 14712, 3762, 40128, 38331, 53737, 7006, 38804, 15772, 38327,\n 30649, 21973, 28552, 10608, 13408, 32008, 27282, 17081, 44636,\n 18818, 3923, 39447, 1632, 41883, 41190, 29554, 49708, 4536,\n 21513, 19177, 24228, 53566, 15733, 1322, 24050, 7993, 13558,\n 8407, 32327, 39973, 11650, 29771, 23695, 8590, 36865, 8938,\n 33263, 6895, 11219, 17785, 48909, 29697, 34898, 27295, 47123,\n 42039, 8285, 30254, 50775, 19243, 48419, 2630, 35554, 5034,\n 40340, 840, 48315, 17818, 1974, 55254, 52240, 2182, 33746,\n 15401, 5544, 18006, 47151, 6035]), 'H': array([ 4810, 2221, 6250, 15759, 9572, 10603, 18958, 11198, 2421,\n 17888, 18965, 19117, 21497, 2368, 21863, 10345, 2714, 12048,\n 2331, 6842, 22980, 22059, 345, 18325, 6461, 7502, 5011,\n 17473, 20734, 21590, 17788, 19421, 10617, 8197, 18335, 20155,\n 10084, 22226, 21348, 19233, 6307, 2207, 1939, 21558, 21456,\n 3330, 3552, 6913, 549, 2197, 20827, 281, 22920, 11918,\n 14644, 2696, 3036, 1127, 7400, 6054, 15433, 20753, 8739,\n 17739, 9797, 7926, 21383, 19966, 5447, 993, 3990, 21727,\n 9674, 7995, 3318, 15277, 19687, 5480, 9695, 13516, 1411,\n 3934, 15543, 6791, 17081, 7476, 14462, 8503, 262, 2675,\n 5949, 11782, 17833, 12909, 16842, 17231, 20420, 17228, 12864,\n 15951, 11968, 20855, 19450, 13346, 18039, 19493, 14602, 10819,\n 12927, 19146, 9255, 14202, 10631, 4643, 2975, 13524, 12107,\n 3690, 20228, 5726, 18727, 8301, 8637, 12732, 17694, 6539,\n 6463, 5557, 11103, 4782, 14764, 21274, 8293, 15492, 19525,\n 18051, 1021, 22151, 7845, 10531, 23218, 12915, 9015, 16290,\n 15151, 17, 19303, 15998, 19961, 8580, 7140, 2721, 15847,\n 13861, 2012, 3631, 1616, 11788, 11661, 10696, 16739, 17197,\n 11866, 1006, 8357, 8787, 12423, 3370, 12669, 9197, 7788,\n 4074, 976, 12368, 4289, 3168, 10929, 23057, 8481, 5212,\n 2496, 13697, 21804, 2033, 21454, 339, 19617, 20037, 14772,\n 14446, 21176, 5620, 1182, 19999, 4085, 21289, 2934, 9066,\n 9582, 19349, 23064, 12281, 786, 6113, 22161, 13467, 22094,\n 20479, 22398, 22033, 15702, 21731, 13528, 13264, 18235, 6737,\n 11630, 18810, 3470, 10878, 15366, 9984, 14470, 21828, 10916,\n 22376, 18367, 4983, 1028, 1829, 11664, 22908, 2734, 16025,\n 14117, 11962, 17432, 192, 9073, 22186, 14721, 9272, 10533,\n 3759, 8581, 8419, 17992, 18085, 11997, 11697, 19734, 7208,\n 3601, 22922, 21350, 15496, 5986, 9124, 13393, 185, 3771,\n 16166, 8936, 3026, 10155, 20341, 16201, 1371, 13291, 3646,\n 13510, 15226, 9266, 21490, 8865, 10082, 4929, 7519, 4597,\n 6144, 21269, 12931, 4209, 10435, 6971, 19362, 1024, 17542,\n 14577, 4220, 16351, 8355, 7669, 1082, 23325, 1805, 1426,\n 11445, 1751, 4844, 13502, 19294, 18667, 3255, 18056, 10605,\n 18620, 6993, 1230, 3331, 11415, 2265, 11483, 16399, 15274,\n 13921, 20409, 6218, 12431, 8232, 20328, 10192, 5809, 8360,\n 13098, 5558, 11808, 8641, 7815, 10885, 18322, 6992, 11339,\n 9343, 19995, 1635, 19891, 2667, 2454, 21238, 10046, 18086,\n 14600, 4004, 1931, 3620, 11133, 16339, 11394, 21218, 4859,\n 21990, 6166, 9251, 7456, 19759, 11619, 3062, 11162, 14718,\n 9058, 21939, 10659, 19630, 22001, 537, 8398, 5639, 4807,\n 12764, 9772, 15780, 12954, 13694, 23214, 17759, 18595, 6268,\n 11543, 204, 14104, 6391, 20224, 5069, 9252, 1915, 8191,\n 17658, 17647, 673, 11201, 2448, 3212, 13974, 16985, 4272,\n 23357, 19207, 7483, 19743, 13221, 8847, 19038, 15414, 15046,\n 3379, 21771, 5320, 11326, 13481, 3826, 18061, 8100, 21355,\n 16217, 10451, 9081, 692, 16827, 13930, 1079, 17702, 8994,\n 3431, 9540, 23159, 274, 5123, 22246, 18156, 10282, 3811,\n 627, 13134, 1657, 13506, 2386, 14049, 14161, 7954, 2425,\n 15674, 7295, 14440, 18169, 39, 18814, 5484, 19721, 13933,\n 17225, 15164, 17767, 23340, 22502, 12654, 1379, 15645, 18934,\n 22272, 10474, 7768, 18618, 2621, 21852, 21043, 552, 16189,\n 4903, 2273, 12560, 3847, 4712, 16912, 20877, 17132, 8951,\n 7922, 14617, 15288, 18746, 12153, 15461, 9815, 10638, 14944,\n 4512, 773, 19546, 15591, 10244, 5396, 15272, 2541, 17052,\n 3484, 1303, 783, 1907, 1050, 3223, 8735, 9678, 14011,\n 6064, 21128, 11304, 17869, 4363, 16053, 20024, 9884, 16358,\n 1392, 17623, 4484, 19754, 18674, 19477, 18562, 19407, 19203,\n 6400, 16955, 21279, 23349, 13527, 14941, 4677, 8597, 3769,\n 6638, 7379, 8034, 13969, 14136, 6105, 8971, 7073, 7581,\n 22113, 7952, 7365, 4891, 10766, 6254, 15246, 14273, 927,\n 11860, 16966, 18437, 11444, 6619, 1381, 20829, 12181, 4044,\n 10024, 13025, 22328, 16590, 12345, 8049, 17773, 23, 15523,\n 8257, 2323, 17755, 3672, 6678, 3695, 10035, 16610, 15880,\n 23165, 4348, 14109, 3747, 141, 15908, 11264, 50, 11842,\n 1696, 7505, 7310, 11056, 1065, 20325, 16887, 11820, 7775,\n 984, 11346, 18986, 3040, 11434, 3190, 1408, 4964, 16726,\n 7938, 19068, 19864, 11387, 6772, 8075, 15857, 21923, 15323,\n 15732, 4064, 966, 15422, 4691, 4439, 20809, 12240, 20920,\n 6833, 21009, 22674, 15215, 14376, 13590, 8504, 16308, 20187,\n 16277, 2449, 2801, 13957, 12137, 15426, 7897, 1364, 4267,\n 13538, 21086, 8428, 15816, 5648, 13386, 6856, 3964, 2134,\n 1591, 11917, 11037, 1063, 19330, 6471, 23008, 8397, 6140,\n 20389, 11381, 18838, 13054, 4294, 9425, 16473, 9143, 8999,\n 2243, 15708, 19027, 18783, 7247, 122, 6416, 21164, 16034,\n 22594, 7317, 5089, 17929, 22848, 7013, 8603, 12390, 8964,\n 17911, 10992, 22050, 13218, 1799, 8954, 20461, 13922, 9486,\n 9327, 19409, 22029, 2315, 5843, 7144, 9085, 20435, 16901,\n 14570, 11065, 227, 10587, 7692, 8732, 6779, 8178, 1298,\n 11379, 4731, 6419, 14636, 10005, 12437, 21906, 17149, 720,\n 1722, 386, 18686, 12483, 5806, 19271, 13325, 23016, 20659,\n 14393, 9725, 8975, 19927, 11673, 7905, 12191, 9811, 1978,\n 14911, 21163, 3862, 18631, 17521, 6047, 12734, 12706, 21158,\n 9627, 9681, 13817, 3447, 15730, 13362, 13955, 5177, 801,\n 6948, 19629, 2505, 17404, 8264, 21564, 7534, 12814, 5261,\n 22097, 5286, 15229, 13141, 5228, 14713, 11300, 3028, 11485,\n 2096, 8934, 7524, 9604, 8294, 5947, 15320, 4901, 12796,\n 20751, 12740, 14627, 4767, 14418, 4454, 3287, 9493, 23310,\n 11236, 21076, 3293, 3476, 12360, 23300, 19181, 18510, 7053,\n 14947, 19394, 7227, 15952, 6534, 6009, 8218, 14623, 14796,\n 3541, 10784, 3808, 8657, 13752, 3637, 6104, 14102, 726,\n 12995, 10866, 12336, 7035, 16136, 18192, 20220, 1123, 2544,\n 17591, 20652, 4102, 18741, 22234, 16916, 7858, 20849, 15607,\n 10705, 17952, 12212, 2060, 6834, 20763, 18426, 22441, 5137,\n 8053, 8688, 1743, 6602, 15260, 20876, 5500, 8744, 3162,\n 20664, 14754, 7387, 14576, 13920, 15675, 20188, 8056, 2088,\n 21011, 21717, 5445, 9713, 13789, 9925, 17031, 23156, 9492,\n 10978, 15684, 11799, 1835, 17361, 4602, 12292, 4939, 5227,\n 21983, 13539, 1150, 20593, 20227, 5173, 4138, 22884, 15472,\n 13763, 12007, 1709, 21808, 5640, 2384, 13618, 13824, 16364,\n 9551, 14091, 15544, 21203, 9671, 5550, 6484, 17217, 11074,\n 4719, 647, 6264, 11112, 7993, 20476, 10368, 1940, 18857,\n 11069, 454, 21301, 21743, 20738, 11643, 16979, 8806, 22914,\n 16515, 21667, 22367, 9024, 10410, 19690, 14375, 22616, 15704,\n 12613, 1555, 22197, 16024, 8922, 17026, 5689, 14699, 2918,\n 17126, 22684, 1600, 22427, 22816, 11821, 8834, 4389, 18639,\n 16924, 8979, 19960, 107, 3144, 17147, 14394, 16496, 8038,\n 4970, 19014, 17900, 6457, 4344, 689, 3985, 11169, 16759,\n 4812, 11157, 1802, 10458, 2423, 17067, 8367, 7566, 795,\n 8253, 19828, 2135, 9360, 21520, 13036, 17616, 2193, 1256,\n 17230, 10754, 8875, 18561, 9216, 20259, 5795, 9806, 15436,\n 12882, 3626, 17272, 2545, 12639, 19817, 18486, 17629, 11818,\n 14389, 8813, 12268, 12131, 4073, 1763, 8317, 18926, 11471,\n 13511, 16426, 19639, 7544, 19549, 3505, 264, 4172, 8210,\n 524, 2604, 6414, 3477, 14105, 13911, 10208, 2105, 23207,\n 420, 17561, 6835, 5164, 3191, 15120, 13281, 17564, 1670,\n 13234, 7924, 1538, 19240, 18159, 7211, 11525, 294, 18414,\n 13021, 20926, 14450, 22285, 7787, 17431, 12397, 19667, 1404,\n 6346, 944, 15609, 3655, 19236, 8797, 11929, 9482, 5790,\n 21679, 21248, 21981, 4950, 17798, 9872, 3896, 10501, 8517,\n 20085, 15144, 19604, 1347, 21044, 17470, 15812, 14405, 11261,\n 11039, 14829, 5777, 6425, 18463, 1588, 52, 16218, 17559,\n 13048, 7521, 1509, 11025, 5180, 12353, 9229, 5529, 10456,\n 4604, 9174, 14417, 22472, 1543, 7796, 13493, 22572, 4685,\n 1489, 2938, 9160, 7319, 9775, 2189, 8029, 10363, 9512,\n 6661, 13716, 4910, 15539, 6718, 6792, 17597, 1564, 13926,\n 2808, 20357, 6711, 1945, 8492, 11993, 22578, 5765, 7799,\n 13312, 22802, 15679, 10529, 18073, 19985, 78, 3785, 2125,\n 13015, 23183, 17572, 698, 10642, 8626, 19448, 16950, 4941,\n 6340, 11938, 9550, 4099, 7530, 17636, 9977, 19128, 13148,\n 1260, 14307, 19694, 21465, 19402, 21379, 12140, 1860, 923,\n 4133, 2764, 21444, 14313, 15630, 16206, 15537, 5324, 5800,\n 4204, 21418, 1617, 17585, 5649, 10326, 14737, 209, 1953,\n 13550, 18761, 13251, 11987, 20277, 22342, 5449, 10724, 11732,\n 8482, 17295, 2077, 2683, 7077, 12660, 1854, 9974, 8941,\n 7609, 5568, 4451, 5762, 3177, 13507, 21530, 14924, 17977,\n 3325, 16737, 23158, 16332, 19938, 4051, 15866, 964, 18382,\n 7340, 8083, 20546, 15584, 20505, 17812, 4354, 15882, 7354,\n 9100, 3034, 9432, 2188, 10596, 21654, 2561, 14093, 21116,\n 20372, 4520, 8085, 18452, 16607, 13855, 17555, 14593, 21142,\n 6024, 3435, 11367, 8176, 21541, 3749, 15736, 16541, 10826,\n 14626, 21542, 2691, 23295, 16978, 6107, 10759, 14183, 15116,\n 1275, 6219, 15085, 22052, 7556, 1831, 540, 15334, 7749,\n 10313, 12928, 14763, 8024, 15981, 15547, 10831, 22620, 23166,\n 19974, 7460, 22021, 1736, 17663, 7347, 153, 9873, 19166,\n 21737, 7473, 1210, 2280, 19722, 15052, 4614, 18409, 9524,\n 6129, 11534, 16066, 11553, 8147, 4182, 3333, 11425, 23170,\n 12482, 19280, 18531, 3925, 9474, 6334, 5785, 13704, 23083,\n 10335, 9422, 3403, 18181, 20654, 10319, 9560, 2973, 3883,\n 16860, 855, 19507, 19201, 5572, 3012, 5956, 18247, 307,\n 9364, 13037, 17803, 17462, 13358, 5157, 92, 15090, 19892,\n 2249, 22482, 23275, 9268, 13753, 4743, 75, 4218, 14059,\n 16134, 3259, 17652, 9157, 8079, 18076, 9897, 19043, 15997,\n 10025, 626, 11756, 17889, 16156, 19850, 9193, 20917, 20254,\n 13558, 15233, 2914, 12567, 16533, 5909, 17382, 5127, 19376,\n 21928, 11453, 19076, 13643, 1424, 22319, 9192, 20499, 12685,\n 9653, 12724, 15826, 22208, 2731, 22526, 3033, 7280, 9578,\n 4212, 13668, 23260, 13687, 16167, 17210, 1040, 11093, 17138,\n 21733, 7900, 14756, 8092, 23254, 19429, 4031, 8082, 13333,\n 10768, 7879, 16952, 2755, 16563, 3481, 3357, 1064, 6805,\n 10782, 21231, 7743, 15992, 2557, 7490, 20733, 20583, 16141,\n 2090, 16628, 8472, 390, 3513, 19913, 20607, 4509, 404,\n 12214, 1158, 4183, 4857, 3011, 5433, 6538, 20709, 143,\n 5690, 18110, 18184, 7108, 221, 6170, 16124, 1452, 23182,\n 8407, 2087, 9240, 15158, 6262, 428, 9755, 10738, 9909,\n 4801, 15135, 12591, 13185, 12518, 17711, 3496, 247, 16483,\n 21538, 2353, 7746, 9989, 8022, 2867, 6011, 20679, 3993,\n 21740, 8659, 15305, 22708, 14742, 9853, 19510, 5370, 18514,\n 1881, 11043, 15311, 14674, 21089, 2665, 102, 8393, 12382,\n 11614, 6485, 3528, 7130, 19800, 6217, 14229, 5900, 20390,\n 6187, 529, 12491, 3812, 2953, 608, 1706, 13104, 22540,\n 5392, 10453, 10767, 6405, 22783, 5287, 1401, 18431, 16736,\n 4645, 22084, 7306, 163, 19709, 18877, 13592, 19494, 8609,\n 7703, 17277, 19441, 16450, 15545, 3905, 22981, 8427, 6408,\n 16334, 4444, 17316, 22335, 21754, 13261, 10850, 18492, 17632,\n 8104, 10277, 9552, 11141, 7256, 21223, 22746, 12701, 21311,\n 15249, 11334, 19261, 20743, 9677, 5429, 16234, 6740, 14925,\n 9129, 17122, 16526, 13385, 5962, 11340, 7833, 1773, 12964,\n 4701, 5738, 9456, 4189, 19256, 15359, 6996, 22372, 21999,\n 5192, 12188, 10730, 10079, 19275, 17007, 20331, 8781, 20778,\n 3708, 22611, 8436, 7219, 5878, 17245, 8594, 15599, 13948,\n 3199, 21941, 14476, 1110, 18903, 16540, 2312, 3452, 20776,\n 2160, 16326, 2023, 22166, 18748, 21503, 17224, 19822, 1493,\n 17532, 12252, 10548, 2741, 10971, 17709, 170, 2658, 3624,\n 22124, 5374, 18389, 14037, 5469, 7028, 20247, 4009, 19596,\n 10874, 19815, 10057, 8429, 15898, 11557, 23331, 11047, 16191,\n 19110, 6564, 2985, 15983, 20804, 19987, 978, 19216, 21555,\n 21892, 2038, 2964, 17786, 12772, 17883, 3256, 18291, 8575,\n 4230, 20036, 16086, 6778, 22971, 4516, 15664, 9728, 16579,\n 2420, 8754, 1560, 14163, 1124, 6282, 21344, 9644, 7511,\n 9905, 6826, 18515, 4928, 1174, 10607, 12094, 2080, 3398,\n 7197, 15190, 8485, 3371, 20746, 16676, 12584, 23258, 2542,\n 1927, 19522, 11694, 11341, 12611, 4925, 3664, 19106, 7847,\n 21970, 9579, 21975, 21529, 16007, 9223, 135, 15012, 6386,\n 280, 6545, 17972, 20902, 4748, 1749, 14023, 8065, 7686,\n 21471, 15801, 12125, 1104, 5495, 19495, 1248, 18508, 4142,\n 17738, 2366, 6199, 791, 20881, 20732, 3459, 16602, 13640,\n 13509, 16260, 14929, 18714, 15919, 16970, 19508, 13171, 14741,\n 12523, 12717, 18252, 12223, 20217, 11312, 9638, 17107, 10619,\n 11223, 18385, 14805, 9400, 2638, 2910, 10714, 21208, 21139,\n 21452, 12445, 12750, 19831, 12548, 10602, 8742, 20633, 10668,\n 13324, 4780, 8045, 22792, 10996, 16646, 22969, 12287, 11240,\n 16230, 19647, 9521, 8306, 18292, 6176, 10059, 3197, 2110,\n 3992, 11845, 19094, 9600, 13083, 17816, 22701, 18203, 12052,\n 3201, 5075, 17185, 17864, 16089, 8708, 21881, 17246, 5165,\n 14750, 4542, 21914, 13813, 1076, 13031, 6210, 12356, 10554,\n 15091, 9110, 11847, 12330, 18161, 12816, 12215, 6969, 18119,\n 18244, 16647, 3960, 3669, 21209, 2847, 7348, 22247, 10793,\n 14210, 19714, 8887, 467, 6451, 9782, 5763, 7, 3099,\n 19483, 7290, 14686, 15354, 16623, 4838, 4408, 12585, 22283,\n 1126, 5982, 4815, 20520, 15072, 6474, 1712, 8897, 12419,\n 1847, 5744, 18791, 305, 2, 1443, 9857, 8238, 18435,\n 4889, 14255, 16296, 5577, 7401, 2737, 3463, 7612, 21588,\n 1845, 4159, 4978, 6817, 4981, 5844, 9841, 20767, 9151,\n 14769, 11946, 13771, 4269, 15410, 10443, 15393, 20415, 8142,\n 11602, 8670, 22493, 6883, 4690, 1599, 21168, 6511, 11285,\n 16037, 9561, 1191, 20244, 5882, 354, 19192, 11458, 265,\n 18530, 19739, 4341, 6786, 2277, 12766, 9831, 17420, 11003,\n 9006, 12148, 5630, 8380, 12156, 11572, 16196, 3863, 4662,\n 12684, 1228, 7621, 4902, 17343, 23309, 16886, 3509, 21010,\n 22866, 3556, 857, 10333, 4736, 15887, 1766, 14081, 23250,\n 6699, 18860, 18847, 15623, 10639, 17401, 21595, 4836, 3004,\n 19670, 7148, 5371, 17633, 22837, 17801, 2224, 11972, 15911,\n 3775, 3918, 6081, 12471, 17509, 10553, 21491, 18604, 16537,\n 19360, 949, 18767, 8375, 6048, 18491, 2576, 8008, 20669,\n 2272, 17195, 16871, 18532, 1607, 9030, 15501, 8598, 7903,\n 2644, 8130, 7976, 20242, 19063, 18194, 7981, 6949, 14946,\n 16733, 11871, 1157, 12133, 10845, 22539, 7603, 14058, 2739,\n 18984, 6315, 4151, 12843, 6059, 17039, 4647, 8041, 13249,\n 1025, 15530, 22373, 6537, 7864, 13227, 1339, 8212, 18895,\n 7682, 809, 3707, 18973, 21845, 23220, 18786, 6289, 12829,\n 15069, 8985, 13575, 21507, 20948, 1240, 16570, 2361, 20830,\n 61, 1363, 1601, 2654, 4452, 18529, 3013, 3252, 20146,\n 7276, 2753, 18404, 11345, 21634, 13018, 23364, 2907, 17501,\n 18722, 15010, 8401, 2182, 23081, 22117, 9140, 1918, 19226,\n 15333, 22452, 13672, 18942, 3516, 20963, 8031, 4634, 6234,\n 8146, 2255, 9737, 8027, 5256, 5087, 8762, 5644, 11451,\n 8005, 11620, 14392, 1112, 1330, 17393, 8830, 5921, 7374,\n 5497, 7446, 256, 5527, 15304, 22576, 11404, 10786, 12969,\n 1370, 19914, 11867, 1497, 16182, 9159, 5003, 6132, 15123,\n 2335, 6866, 14550, 22295, 5871, 1212, 15836, 10517, 555,\n 13135, 9988, 13099, 20019, 21761, 2278, 5209, 15110, 6607,\n 15841, 20246, 11138, 5355, 12157, 19333, 12895, 11740, 4742,\n 8274, 18132, 19554, 11315, 9506, 12030, 21083, 6801, 17134,\n 12723, 13356, 22108, 5661, 1798, 22128, 18717, 13229, 15637,\n 9185, 5589, 9490, 7507, 2693, 2702, 10943, 16300, 13646,\n 22451, 14209, 491, 4617, 5990, 19057, 20235, 21144, 4654,\n 13642, 7840, 10376, 15940, 8858, 15665, 7058, 8838, 13613,\n 20232, 8406, 6747, 10336, 12043, 10967, 1251, 5783, 4931,\n 12069, 5541, 13512, 18446, 909, 13560, 7949, 1020, 1734,\n 22392, 18739, 18573, 7257, 7554, 21781, 13963, 7672, 16281,\n 21755, 11172, 8205, 14556, 20080, 2154, 13444, 7198, 3523,\n 15167, 12088, 16033, 13282, 14432, 13897, 8577, 5706, 22213,\n 14291, 16755, 563, 11396, 21185, 17088, 17997, 9170, 9379,\n 14258, 22740, 10047, 12832, 11303, 7785, 1977, 11935, 20007,\n 11507, 13280, 15946, 18393, 7790, 12113, 13272, 6385, 18345,\n 3439, 10519, 19470, 9045, 16666, 2920, 19665, 5941, 17681,\n 15099, 7546, 3986, 4278, 8181, 14588, 21339, 5437, 9173,\n 7950, 1558, 22357, 8683, 11073, 498, 10471, 15133, 17764,\n 2450, 1018, 16846, 20360, 13970, 10706, 761, 10816, 15896,\n 5662, 356, 16511, 7119, 7083, 19651, 8422, 3902, 4804,\n 18295, 20480, 9435, 3217, 20166, 20123, 6928, 15048, 11910,\n 11110, 17303, 16587, 2190, 14253, 3839, 3868, 15138, 3461,\n 2865, 21630, 17860, 10614, 9903, 13305, 13183, 11231, 8080,\n 140, 10698, 3629, 16959, 13419, 8607, 17370, 18412, 5154,\n 9953, 20492, 8676, 10950, 9574, 15043, 1848, 19707, 13918,\n 17598, 40, 5516, 1629, 22156, 1213, 8565, 7812, 13427,\n 1956, 482, 20904, 9826, 8040, 12998, 26, 11342, 19853,\n 20489, 7123, 10843, 4301, 8646, 14218, 3395, 18597, 5221,\n 20195, 11691, 16603, 19001, 4177, 5049, 9008, 11612, 10687,\n 14509, 11785, 9816, 11627, 10653, 6465, 16700, 21008, 3207,\n 18128, 17379, 9951, 21839, 18679, 4328, 11435, 19728, 20587,\n 1460, 6080, 19951, 1118, 9667, 16879, 14960, 10808, 14067,\n 14195, 775, 8559, 18465, 16890, 11454, 9478, 7181, 18209,\n 9347, 1177, 10441, 1053, 7085, 19171, 16786, 11600, 481,\n 4564, 19595, 21886, 12974, 19963, 18134, 2926, 8048, 7425,\n 8630, 10190, 9543, 15860, 21433, 1355, 5380, 21879, 17575,\n 7973, 11281, 9819, 9069, 12676, 10662, 10113, 15131, 1352,\n 21306, 12421, 9245, 17712, 14445, 15264, 21002, 16671, 3837,\n 16116, 13279, 21516, 6896, 13662, 17800, 1557, 965, 21855,\n 15286, 11537, 15358, 22219, 12758, 7677, 18059, 13355, 6519,\n 20359, 7671, 8090, 2814, 7605, 671, 20953, 18126, 7133,\n 6595, 6980, 15303, 18921, 20694, 236, 10990, 283, 8477,\n 1130, 3248, 9833, 21242, 9703, 10446, 4024, 12269, 14589,\n 20203, 7744, 3146, 14836, 10927, 9319, 18976, 17912, 14040,\n 2317, 23306, 19030, 10000, 13710, 14523, 18023, 5972, 4399,\n 2945, 14822, 20046, 10096, 2967, 1808, 13648, 20112, 9409,\n 20218, 19566, 1972, 8353, 17314, 7834, 8548, 4203, 13604,\n 9566, 12112, 8909, 13534, 19322, 15507, 21085, 13136, 541,\n 6822, 15162, 7540, 18628, 7096, 14028, 16159, 10258, 15744,\n 8907, 4164, 3329, 1128, 7010, 8919, 12466, 11621, 4583,\n 8384, 15775, 11611, 569, 844, 10616, 8433, 5959, 6330,\n 762, 21328, 23049, 8901, 10722, 16946, 14010, 9137, 19096,\n 6877, 14312, 520, 10477, 20883, 8200, 22656, 7795, 336,\n 10110, 9633, 4050, 18941, 15411, 11064, 19888, 5552, 15490,\n 15740, 13273, 20460, 18204, 3982, 1936, 10654, 16747, 22485,\n 10074, 21929, 4835, 817, 7748, 6753, 12168, 9357, 23052,\n 961, 14096, 9645, 11865, 19772, 1535, 6470, 20225, 3110,\n 14568, 22946, 13536, 23266, 9454, 9316, 14917, 1826, 1296,\n 14664, 7146, 11984, 20454, 10678, 20455, 23257, 1606, 9230,\n 2187, 13477, 1530, 19159, 16748, 8633, 5007, 21756, 6939,\n 18261, 18765, 397, 8361, 20065, 10937, 7747, 1259, 2575,\n 15932, 23255, 22363, 19592, 9195, 4949, 596, 14877, 11548,\n 5471, 15986, 17156, 13954, 9145, 11913, 11941, 3618, 2777,\n 12301, 10279, 7758, 17565, 22070, 11276, 11853, 13431, 327,\n 2624, 9018, 5807, 19688, 7649, 11693, 5149, 17375, 10106,\n 884, 2799, 3840, 18313, 21817, 16586, 8949, 2009, 22463,\n 10274, 11795, 6962, 4119, 3739, 6344, 10392, 7383, 3575,\n 6223, 18676, 23342, 22721, 5830, 8826, 14268, 4067, 11899,\n 4210, 18397, 12270, 7147, 2984, 10951, 20648, 21419, 11906,\n 11749, 12264, 9585, 11497, 12060, 14247, 18619, 9837, 23311,\n 18481, 22330, 5406, 7338, 10747, 12601, 8086, 8647, 20678,\n 18708, 18202, 8132, 11203, 3183, 4958, 6589, 7937, 8221,\n 7274, 17856, 18233, 18072, 12779, 21025, 12770, 19980, 2520,\n 10396, 17993, 17028, 8764, 9426, 20273, 19371, 4083, 14907,\n 12260, 17526, 8712, 6696, 18267, 14534, 13421, 15199, 430,\n 11232, 16357, 16757, 20683, 16379, 19559, 20951, 2802, 22217,\n 5438, 21417, 10073, 2191, 6000, 2551, 10655, 19254, 5778,\n 20434, 21461, 6185, 5342, 13898, 13619, 13830, 18440, 11161,\n 13904, 11658, 21059, 1358, 22738, 22056, 18638, 17038, 16123,\n 19319, 22722, 20638, 707, 18123, 3644, 12244, 6957, 105,\n 14595, 11549, 7751, 22964, 5294, 14517, 5813, 1045, 4913,\n 20238, 564, 18584, 6770, 8179, 15136, 19437, 8606, 12535,\n 20674, 178, 2145, 2213, 17737, 14318, 10221, 15813, 5312,\n 21829, 387, 19956, 23090, 13628, 15200, 3886, 17085, 11644,\n 18971, 6663, 17281, 6754, 20576, 22874, 19209, 14299, 5303,\n 23344, 12847, 295, 515, 15833, 18003, 11583, 10296, 8272,\n 9060, 18539, 7913, 7016, 17405, 17018, 22206, 14475, 16510,\n 11911, 3372, 6448, 17810, 19396, 9911, 6599, 13891, 159,\n 2699, 21732, 6049, 953, 14746, 654, 4132, 14649, 16794,\n 15643, 12910, 2390, 11518, 12970, 17745, 12026, 5533, 17178,\n 4304, 2628, 2676, 7643, 3455, 3682, 12300, 9606, 5243,\n 17891, 11598, 4684, 7876, 11062, 15220, 10818, 14757, 13912,\n 17324, 2546, 18856, 12786, 2636, 6873, 13573, 14041, 10675,\n 11991, 21284, 17971, 8748, 15317, 5939, 9231, 14029, 1160,\n 20760, 22314, 17750, 1609, 14872, 3667, 3564, 8948, 13840,\n 17175, 10515, 3781, 1459, 17110, 14937, 11843, 3080, 20211,\n 14225, 16907, 18135, 14800, 22287, 14293, 9158, 1877, 7478,\n 18408, 4052, 10702, 10318, 14251, 7353, 318, 19343, 2899,\n 10628, 16677, 7426, 21523, 13814, 199, 2210, 20524, 9670,\n 118, 9177, 7944, 4352, 915, 23319, 1919, 16796, 748,\n 23314, 23125, 7259, 5061, 12767, 313, 20861, 8044, 12619,\n 12488, 11221, 2251, 3806, 8456, 14185, 18526, 13275, 9675,\n 88, 13907, 11995, 19116, 20577, 19578, 14850, 195, 20912,\n 15064, 8364, 3842, 18268, 18552, 22440, 21599, 22053, 2202,\n 12127, 3432, 7588, 3147, 15238, 13848, 12533, 8528, 22163,\n 4755, 8693, 6551, 9067, 8013, 14467, 5136, 2668, 8439,\n 9588, 10306, 10524, 15671, 15009, 2344, 3462, 18218, 19511,\n 5138, 6827, 19185, 11594, 16100, 10806, 22183, 20042, 1648,\n 12298, 20352, 6865, 5925, 2770, 4860, 11794, 8536, 13696,\n 2824, 5666, 7366, 7618, 23267, 8849, 11217, 12687, 2083,\n 20031, 14841, 7042, 7555, 16402, 21957, 1771, 15875, 10827,\n 6989, 8561, 19114, 2632, 9546, 18187, 10116, 17822, 11450,\n 3566, 13731, 13101, 14535, 11335, 7989, 22421, 7325, 5766,\n 4550, 17939, 13831, 14902, 13017, 2091, 16482, 554, 13408,\n 9359, 1013, 6912, 16292, 21170, 3128, 11516, 13714, 6630,\n 151, 17458, 13936, 1161, 13212, 16246, 20115, 1705, 15727,\n 20191, 8928, 22116, 19591, 5936, 21703, 6261, 22693, 19706,\n 6977, 11660, 11591, 6501, 13316, 6122, 3723, 629, 19796,\n 21515, 3814, 14052, 12743, 3732, 16478, 8403, 20608, 11014,\n 1078, 4078, 8230, 15353, 578, 6025, 9206, 5594, 21416,\n 5042, 12441, 15214, 13155, 21915, 13629, 3345, 4435, 7294,\n 15867, 19833, 5817, 7127, 10033, 2738, 13082, 9433, 8955,\n 6008, 8681, 12862, 18265, 6885, 20412, 5860, 5981, 12819,\n 17259, 12512, 9171, 1632, 7271, 3409, 18467, 19126, 2849,\n 2560, 14328, 15153, 6179, 2758, 1689, 17908, 8203, 16048,\n 8959, 20061, 19406, 2552, 21898, 12499, 11235, 21326, 3116,\n 16119, 1840, 22831, 2940, 2589, 15351, 10920, 16774, 22910,\n 530, 1695, 17207, 15381, 17539, 14366, 12686, 18571, 13992,\n 23312, 21644, 15846, 8189, 17500, 14590, 18015, 7668, 13715,\n 11400, 12501, 15254, 10629, 10647, 17190, 7168, 15669, 11469,\n 3155, 17164, 8009, 82, 6036, 10684, 8510, 2852, 7561,\n 7596, 20035, 7436, 10903, 16045, 3446, 17958, 21047, 8743,\n 10578, 8233, 8523, 11895, 17617, 6165, 14621, 13220, 22951,\n 2874, 9041, 19502, 11443, 23272, 3892, 22099, 2745, 1732,\n 6639, 20637, 16031, 20076, 4011, 22473, 9991, 14073, 1393,\n 2203, 12402, 13754, 7487, 14813, 16425, 158, 1966, 4308,\n 19060, 15824, 18391, 21572, 1910, 9803, 1206, 731, 11351,\n 16813, 16394, 2952, 17294, 16713, 2236, 19263, 17060, 20996,\n 239, 11728, 5132, 20096, 21027, 4392, 15397, 20310, 19490,\n 10713, 21714, 20798, 8727, 8725, 6925, 620, 6280, 15054,\n 9057, 22326, 8911, 7512, 19346, 9768, 20330, 21250, 8809,\n 6927, 18650, 4624, 8513, 18757, 21023, 18640, 21097, 8356,\n 18711, 18060, 11447, 16257, 13773, 9531, 3317, 12715, 13894,\n 20451, 11075, 21290, 11862, 15096, 19755, 10062, 11669, 18660,\n 15781, 10482, 15711, 20730, 22541, 17870, 16454, 15070, 11019,\n 7953, 201, 266, 17252, 15356, 19264, 14525, 15203, 873,\n 4417, 4398, 18477, 19622, 21713, 21806, 268, 15884, 395,\n 21469, 8223, 4305, 6098, 3352, 21182, 22735, 23190, 9867,\n 11124, 16094, 19700, 11160, 11482, 4552, 3449, 12310, 11429,\n 13583, 14888, 21635, 10627, 17446, 21215, 20340, 9726, 815,\n 13250, 1027, 9370, 700, 23109, 13375, 17848, 12100, 14013,\n 3450, 21821, 8249, 1807, 16395, 10864, 15143, 5998, 18311,\n 1416, 22230, 4715, 10939, 20910, 5363, 15770, 7389, 13601,\n 23211, 14364, 18961, 2788, 6363, 15521, 3335, 121, 10894,\n 5946, 624, 17084, 3485, 21314, 16705, 960, 10134, 20843,\n 15480, 15811, 8177, 10932, 1022, 17461, 2085, 21156, 13464,\n 3989, 2066, 9079, 20039, 2021, 6518, 903, 11635, 16444,\n 15244, 10420, 16225, 3954, 9141, 1400, 16278, 1156, 8935,\n 3241, 14194, 13201, 10404, 5195, 2055, 22509, 22054, 10148,\n 22506, 12351, 22833, 2686, 19763, 21377, 3696, 19251, 2583,\n 4959, 11030, 11268, 9204, 17747, 8747, 15672, 21783, 3289,\n 2471, 23299, 16144, 6028, 16653, 9269, 4420, 11398, 2908,\n 9957, 21346, 22878, 13910, 5110, 9105, 3096, 2047, 5006,\n 14402, 16760, 22739, 6810, 11708, 21604, 680, 5454, 5170,\n 11683, 7592, 1597, 11338, 19078, 16651, 13436, 3804, 2507,\n 18234, 312, 3887, 11148, 20728, 4545, 10437, 16179, 1779,\n 3427, 16714, 11053, 495, 7651, 10100, 12632, 1571, 18488,\n 8209, 18175, 9914, 14088, 9822, 146, 14584, 1590, 12625,\n 12507, 2059, 9462, 20894, 324, 1916, 6722, 18610, 18891,\n 2929, 10344, 1673, 2112, 18943, 15280, 322, 23124, 18630,\n 10594, 15808, 2497, 20572, 17466, 22890, 17328, 12949, 14305,\n 3805, 21105, 13959, 1975, 8298, 8946, 22262, 2890, 18160,\n 2092, 9205, 10692, 11566, 15825, 21899, 6052, 12278, 20949,\n 22650, 11851, 22076, 16960, 12384, 207, 10422, 7398, 14482,\n 20210, 14244, 2556, 20656, 6001, 4244, 13460, 17196, 12342,\n 7983, 7327, 13880, 13213, 19184, 3968, 11961, 9004, 9053,\n 14690, 6057, 23374, 6209, 22939, 14167, 16247, 17562, 12484,\n 2627, 10252, 13671, 9812, 18652, 5387, 22794, 19990, 14424,\n 3504, 16237, 9983, 18957, 3713, 18398, 11924, 1455, 3145,\n 4245, 12906, 2049, 22150, 1724, 14730, 4803, 17213, 21663,\n 652, 9175, 8010, 17772, 14786, 3703, 7396, 3950, 22413,\n 1890, 21834, 9504, 8977, 17574, 12366, 16792, 3327, 6609,\n 23091, 8521, 13890, 5350, 20984, 21211, 12369, 11058, 22565,\n 5400, 10952, 4286, 7433, 18740, 4974, 18430, 21378, 8412,\n 4374, 271, 12010, 2711, 17756, 17608, 6169, 12869, 15756,\n 11504, 926, 16325, 13437, 1414, 3386, 12239, 5488, 22743,\n 8713, 3316, 19545, 1066, 20726, 20465, 12941, 20275, 14181,\n 22079, 6968, 5252, 10242, 4298, 17505, 10853, 8087, 7925,\n 21061, 3884, 11242, 21603, 1114, 21894, 6631, 1839, 9774,\n 17429, 13862, 1525, 6893, 15893, 11352, 9331, 15571, 17715,\n 4357, 11934, 19003, 23188, 19296, 2744, 8373, 8315, 14654,\n 15818, 4530, 297, 2991, 68, 4507, 15831, 1325, 15993,\n 5628, 7875, 18074, 5108, 1524, 12744, 18445, 8995, 9395,\n 11510, 7018, 22900, 2415, 12620, 15502, 8776, 1553, 5372,\n 16213, 3583, 16621, 16199, 21397, 3000, 17596, 20945, 8324,\n 10065, 11556, 1328, 316, 502, 17687, 16276, 20290, 13228,\n 3025, 18989, 11914, 743, 1775, 5522, 15817, 12926, 9929,\n 12536, 11937, 10608, 1195, 18451, 16075, 346, 9278, 9896,\n 12352, 4769, 8702, 21146, 1199, 12955, 4154, 22065, 22851,\n 14254, 22416, 7635, 379, 14034, 144, 17293, 23273, 17599,\n 22584, 11796, 11214, 19656, 22770, 11736, 10583, 15489, 746,\n 22270, 13791, 21235, 4126, 3524, 14385, 1518, 3194, 11121,\n 15794, 10645, 3916, 21876, 22345, 15344, 15213, 7060, 8167,\n 8898, 1122, 7253, 4698, 6878, 15967, 18372, 23199, 14974,\n 7313, 18342, 13023, 9142, 19562, 16538, 6895, 8204, 11416,\n 20283, 10249, 613, 15912, 22385, 6272, 8805, 17486, 16787,\n 16393, 9640, 997, 14647, 4179, 4155, 9709, 6214, 3538,\n 3945, 2810, 14407, 5236, 20793, 23229, 4002, 14379, 20856,\n 17214, 19783, 21196, 21020, 7798, 15204, 1168, 5018, 9801,\n 21093, 17910, 12948, 10545, 7779, 20092, 10906, 15497, 1141,\n 8864, 14942, 18615, 22761, 13186, 20715, 17070, 13182, 16869,\n 2708, 3054, 21625, 14675, 22646, 8228, 13647, 10550, 6184,\n 1484, 15196, 3548, 2642, 229, 7007, 5468, 362, 6746,\n 19385, 18734, 8097, 13724, 3603, 2253, 16271, 16369, 6137,\n 21794, 10286, 13232, 16525, 5775, 6650, 18439, 14411, 14733,\n 5754, 23313, 12123, 10787, 12076, 7361, 18805, 2291, 9111,\n 6902, 4894, 1941, 21674, 21273, 15729, 17779, 3542, 9139,\n 2532, 20785, 21903, 9994, 1888, 11427, 17045, 10460, 12900,\n 20410, 1633, 19436, 11896, 10462, 5517, 11220, 2159, 2467,\n 22909, 14362, 9495, 16258, 15922, 5196, 3800, 2484, 2771,\n 12024, 12556, 11430, 16374, 10624, 6853, 15971, 13781, 19979,\n 11960, 19703, 2789, 10245, 12362, 11399, 2579, 12383, 20272,\n 191, 10063, 22490, 8219, 1651, 9616, 9657, 9613, 13321,\n 9986, 13394, 7307, 3718, 21631, 787, 17394, 8473, 19277,\n 20418, 22716, 7763, 2133, 12461, 2002, 15448, 1715, 10205,\n 22529, 22893, 1960, 6987, 9133, 8945, 2330, 2565, 6295,\n 8703, 13, 17957, 20292, 833, 19610, 9786, 1034, 5824,\n 10852, 22659, 6963, 18565, 21396, 5053, 22588, 1310, 13337,\n 20772, 4492, 10896, 14603, 19156, 2646, 15355, 8584, 13995,\n 1453, 8701, 22898, 16688, 152, 15081, 4129, 12762, 2057,\n 15734, 14331, 14319, 9271, 23205, 6720, 9747, 17676, 11191,\n 11729, 23224, 14914, 19597, 12022, 16556, 7501, 28, 17355,\n 16208, 14809, 3366, 12697, 6626, 12152, 2445, 389, 13366,\n 7774, 9034, 18633, 3130, 5201, 20909, 7584, 11466, 13176,\n 3117, 18471, 11985, 4147, 23184, 13825, 20580, 7090, 3901,\n 16155, 6940, 19920, 18403, 12435, 22294, 16573, 22527, 11696,\n 11605, 1146, 19258, 16492, 6563, 22131, 10199, 3981, 7017,\n 18432, 19480, 15980, 444, 649, 14808, 7019, 1172, 1782,\n 13384, 22648, 21803, 17886, 4419, 16125, 10247, 14352, 7169,\n 20802, 4356, 20136, 14900, 12480, 13059, 18221, 17641, 14441,\n 5352, 1623, 22340, 22812, 21949, 17161, 18257, 8241, 5601,\n 745, 9516, 7961, 17131, 1054, 8057, 13466, 9818, 6571,\n 19306, 10472, 5588, 17905, 20386, 15195, 9511, 10001, 16312,\n 3991, 2941, 2061, 4541, 1964, 4120, 8444, 15113, 13300,\n 1892, 18158, 1542, 5314, 11362, 12648, 1906, 11033, 15211,\n 16731, 19389, 5984, 21734, 1017, 16717, 22610, 11650, 11675,\n 4116, 19866, 4076, 4461, 18046, 21521, 20937, 17258, 11876,\n 19798, 5710, 7191, 8715, 8568, 14852, 11230, 20623, 18140,\n 9487, 1220, 4234, 4069, 4774, 21376, 3384, 8672, 7719,\n 18199, 13782, 7291, 10917, 13299, 945, 18004, 36, 20522,\n 13760, 21606, 6741, 1403, 6247, 11247, 22316, 12161, 10153,\n 16940, 12555, 7825, 17079, 14736, 1038, 11532, 447, 13743,\n 6208, 6242, 21477, 11216, 4733, 5625, 20923, 20339, 2029,\n 17480, 15517, 6512, 225, 18601, 13254, 5845, 17881, 10305,\n 17049, 18356, 15488, 17056, 10007, 7955, 12597, 6852, 23007,\n 11159, 1762, 10918, 4618, 8720, 4489, 12017, 1218, 14281,\n 15560, 4680, 6886, 10506, 8338, 14404, 10760, 1757, 8635,\n 5360, 21689, 7958, 20264, 7542, 10254, 15132, 3515, 14166,\n 20971, 20475, 7047, 14538, 2733, 17336, 13096, 6719, 6367,\n 5663, 13381, 12184, 871, 2024, 16668, 2269, 15197, 6846,\n 20680, 13727, 17710, 9102, 12064, 10179, 19521, 22475, 4425,\n 13402, 22202, 1049, 13237, 2631, 13762, 436, 17183, 21626,\n 10417, 813, 14540, 5434, 2939, 17002, 4190, 14862, 20635,\n 3283, 13525, 20240, 8114, 22967, 4912, 3369, 23196, 4887,\n 7869, 97, 9967, 5695, 3845, 19697, 3483, 8808, 7843,\n 12029, 14497, 6139, 19356, 788, 12666, 13163, 1528, 19464,\n 3547, 21493, 149, 12418, 749, 19195, 14068, 12106, 9380,\n 6755, 8076, 13576, 14700, 20947, 6729, 22534, 20602, 17096,\n 18298, 21893, 12916, 11002, 4365, 22158, 15783, 12280, 8866,\n 3059, 17664, 5005, 13494, 5687, 6863, 6966, 19382, 22759,\n 8072, 241, 18167, 9116, 5542, 4457, 21161, 4403, 20895,\n 14457, 6333, 14999, 6395, 325, 8199, 2591, 16457, 7990,\n 9017, 21389, 1252, 14374, 5376, 18434, 846, 9136, 12851,\n 922, 3500, 21036, 13740, 11909, 15503, 5190, 7892, 13376,\n 1406, 11652, 17330, 8123, 5745, 20603, 9832, 208, 18540,\n 17442, 18824, 257, 5327, 18685, 16455, 14347, 10356, 4247,\n 913, 3243, 18881, 7531, 13462, 20406, 4640, 4255, 22439,\n 12313, 22928, 22869, 14027, 9635, 14367, 6143, 13053, 11552,\n 3807, 20962, 14349, 22945, 8496, 13223, 16279, 5596, 19875,\n 2248, 21612, 7188, 7655, 17452, 11921, 22779, 20098, 21659,\n 2817, 1547, 19540, 19854, 18077, 9952, 12840, 9358, 2749,\n 21123, 5348, 19398, 16043, 1869, 18885, 5937, 22957, 6618,\n 15059, 4499, 4141, 18505, 6286, 20675, 22867, 8566, 16233,\n 21545, 5803, 21652, 3024, 9617, 13175, 11397, 1729, 12691,\n 15404, 8486, 22096, 8376, 400, 16001, 11884, 7386, 19197,\n 16286, 15142, 14957, 12659, 10934, 5188, 4893, 10907, 9026,\n 589, 18144, 2252, 1265, 3946, 20108, 11271, 19618, 5826,\n 6424, 16126, 6241, 12607, 15255, 16183, 10312, 13847, 5846,\n 20741, 22987, 22022, 20462, 19515, 8440, 13156, 9239, 15796,\n 18042, 16771, 13631, 20873, 14541, 20173, 18511, 18318, 19719,\n 11999, 2115, 17332, 2367, 665, 3780, 12855, 11632, 13252,\n 23239, 19029, 2336, 21221, 3203, 12054, 10972, 7151, 11163,\n 20814, 13530, 11830, 8110, 21251, 1950, 16238, 849, 11234,\n 9611, 13000, 19276, 14460, 6585, 19182, 6516, 11599, 15128,\n 4585, 12147, 3113, 14546, 7214, 4839, 193, 9089, 6697,\n 14459, 20014, 17299, 1201, 14222, 13184, 5416, 4, 22865,\n 10989, 12945, 1786, 12621, 3487, 17622, 5257, 173, 5144,\n 19427, 15862, 1701, 7838, 17713, 5150, 2041, 1162, 17062,\n 20833, 20784, 245, 10052, 22112, 13498, 11520, 2769, 15519,\n 8746, 1962, 18018, 15681, 12403, 248, 13755, 6950, 8073,\n 22155, 6688, 17896, 6552, 5472, 6192, 16447, 704, 20117,\n 10054, 19012, 21443, 2906, 4905, 19252, 16106, 12813, 3532,\n 18009, 2350, 8316, 10526, 14425, 19109, 15646, 7757, 14514,\n 20888, 21282, 261, 27, 13692, 23089, 20630, 12993, 22434,\n 3633, 15036, 18374, 11880, 870, 812, 21736, 706, 7882,\n 22085, 8738, 12143, 12664, 20071, 9037, 16823, 19064, 22292,\n 18694, 14673, 22136, 6099, 23242, 6436, 6085, 12564, 16380,\n 12289, 9338, 797, 10731, 1361, 3142, 21460, 4593, 17296,\n 10588, 18388, 2705, 13569, 3275, 10290, 12036, 3271, 14928,\n 9427, 20308, 9912, 4068, 371, 11576, 9593, 15542, 13133,\n 5646, 9662, 6294, 3219, 18582, 17191, 4945, 1446, 17989,\n 14275, 17087, 7816, 11441, 2063, 10923, 5439, 17518, 15042,\n 13076, 9538, 8004, 13639, 269, 18258, 6357, 7228, 19520,\n 3171, 17721, 22142, 13994, 18692, 8545, 18629, 1454, 3944,\n 20407, 21633, 3180, 11767, 22425]), 'S': array([ 5588, 46057, 50006, 20976, 30934, 47068, 30573, 54136, 40213,\n 30957, 17096, 4217, 1438, 56059, 57025, 37410, 46353, 20603,\n 47379, 9170, 31259, 25478, 34120, 37861, 22120, 23160, 16126,\n 38503, 50270, 37509, 21575, 43949, 18517, 40253, 8901, 30840,\n 19695, 2093, 56347, 23666, 688, 24676, 30554, 5491, 36072,\n 13887, 45579, 59300, 47810, 51634, 48403, 11214, 48163, 43942,\n 13367, 52545, 47532, 20184, 56173, 12674, 52431, 20778, 48386,\n 58498, 12260, 42507, 24670, 3902, 56356, 51057, 42285, 14064,\n 10961, 33771, 21161, 48402, 47066, 24300, 38937, 54218, 9303,\n 31310, 49120, 55030, 3876, 20604, 3289, 4758, 2022, 35691,\n 36711, 52180, 32550, 58446, 6145, 11429, 43467, 25964, 19796,\n 39259, 3853, 20019, 622, 5849, 7715, 47975, 11831, 23357,\n 33060, 30975, 20776, 32864, 10249, 30533, 31036, 53277, 49113,\n 5458, 44932, 9425, 41270, 39243, 45235, 49374, 29556, 18222,\n 4666, 49946, 56209, 52503, 32248, 4567, 57288, 35129, 26050,\n 52532, 44254, 44571, 44207, 22467, 57459, 56790, 56701, 49852,\n 2418, 52821, 4424, 10889, 55781, 27849, 29428, 8713, 33649,\n 26158, 24188, 17899, 14540, 24128, 24170, 15419, 52169, 11582,\n 43634, 29776, 6436, 25358, 2359, 8855, 42943, 16896, 32340,\n 33201, 35777, 8463, 24207, 58181, 28744, 2491, 19433, 6215,\n 49510, 24762, 47082, 32564, 28719, 19486, 24169, 13536, 24922,\n 52463, 11448, 54114, 11366, 6028, 40760, 36879, 56982, 6372,\n 25773, 34675, 52223, 57232, 59147, 22780, 18665, 23647, 49252,\n 17218, 295, 23556, 6093, 28183, 54726, 8573, 904, 11811,\n 2597, 58642, 47623, 40184, 55181, 42072, 1649, 11440, 41649,\n 19797, 54325, 1712, 41218, 19748, 33198, 5233, 57822, 53133,\n 7888, 40290, 5198, 46396, 17654, 34904, 48554, 34385, 15686,\n 14988, 40560, 53597, 2486, 4616, 59089, 24889, 10866, 35253,\n 32850, 38447, 13582, 52593, 18371, 23598, 2735, 34996, 4479,\n 33010, 22283, 49321, 35258, 12904, 18530, 19059, 31948, 5419,\n 35770, 31197, 50855, 31619, 44855, 34522, 398, 41015, 30461,\n 44230, 6350, 8024, 5984, 48537, 42347, 45904, 47584, 28462,\n 14245, 47195, 50171, 46375, 19447, 43428, 50893, 597, 13972,\n 24718, 28467, 52263, 52276, 41714, 2373, 28769, 12973, 40661,\n 22781, 40342, 57357, 6343, 54968, 29985, 27842, 33125, 9670,\n 44379, 13545, 11574, 23685, 8300, 3236, 11181, 358, 4169,\n 16363, 50596, 7315, 35557, 26412, 30966, 24744, 15790, 7109,\n 5986, 25157, 28162, 30167, 14128, 680, 12764, 8989, 48382,\n 11354, 38211, 11931, 1889, 31298, 11162, 8317, 17219, 53458,\n 18294, 23023, 16602, 59140, 9611, 43768, 35229, 45484, 18247,\n 35468, 12462, 35264, 10979, 1012, 56759, 47543, 7464, 12489,\n 45956, 11993, 11324, 35572, 48313, 19652, 53279, 56730, 42606,\n 24787, 37628, 31256, 37702, 32066, 54137, 36661, 27931, 51044,\n 15166, 57666, 41581, 17815, 53414, 30991, 5462, 44310, 27678,\n 57607, 46267, 54321, 30819, 56157, 3214, 13842, 1261, 42095,\n 52028, 58073, 28948, 49055, 21644, 38139, 50456, 30620, 463,\n 55759, 40057, 54531, 20651, 16399, 30640, 31696, 33970, 36023,\n 9291, 30913, 55010, 1504, 48339, 19951, 51941, 25101, 38568,\n 53526, 33331, 23620, 51629, 42019, 36496, 8110, 53174, 51976,\n 32657, 11298, 34106, 8366, 8672, 15822, 801, 27997, 38675,\n 51903, 29008, 51292, 31714, 41235, 13345, 20923, 619, 53816,\n 38430, 34533, 37774, 57536, 31279, 58196, 44685, 57058, 34776,\n 40727, 43321, 54069, 37211, 26813, 57494, 22318, 1384, 33389,\n 58762, 36013, 36605, 17071, 38550, 43521, 123, 50023, 26169,\n 47520, 11343, 49566, 51807, 29183, 26318, 26542, 47607, 42316,\n 21420, 19573, 26687, 46659, 47617, 52417, 50116, 20118, 1377,\n 52567, 53395, 58413, 21557, 45583, 53984, 44996, 26554, 56974,\n 8224, 4612, 3540, 3350, 5760, 50276, 17346, 58380, 53539,\n 55393, 53961, 32855, 3405, 38050, 49016, 6331, 58227, 19788,\n 15277, 33459, 30370, 14732, 9926, 55752, 34234, 7306, 13719,\n 6365, 18972, 24991, 25197, 2824, 13600, 20491, 52517, 24926,\n 51604, 25626, 30731, 48876, 13565, 39057, 8592, 40975, 44233,\n 16327, 53115, 28156, 48505, 37680, 44269, 14435, 12351, 5447,\n 44362, 30085, 7693, 6942, 49497, 39896, 17752, 15394, 12828,\n 51114, 25454, 10478, 30383, 56488, 58705, 35183, 14221, 35959,\n 57247, 3448, 15980, 27690, 5415, 53937, 2974, 33999, 9359,\n 32508, 14096, 55642, 28299, 53311, 20232, 41303, 56822, 37205,\n 54442, 5494, 13242, 47681, 58473, 25238, 26259, 47958, 4497,\n 10583, 30356, 57579, 4899, 28968, 49577, 26860, 38241, 22191,\n 7716, 57613, 16811, 35656, 21068, 23047, 31740, 13586, 55802,\n 19359, 21425, 14394, 55643, 25858, 35188, 16511, 1468, 7638,\n 51225, 42193, 41793, 12816, 34462, 33666, 44515, 51365, 152,\n 23510, 14791, 41043, 29578, 50734, 14849, 49486, 19357, 2598,\n 32360, 50580, 5218, 16925, 45409, 28163, 27851, 55434, 12779,\n 13359, 51375, 37163, 56580, 36464, 56297, 4507, 54896, 13812,\n 16026, 3551, 30101, 41512, 58795, 2492, 44297, 5735, 7011,\n 54324, 46151, 2507, 53793, 23215, 56174, 48210, 20494, 50800,\n 5996, 50905, 12813, 50474, 27137, 54014, 19210, 14825, 27042,\n 9078, 31290, 54489, 40276, 52465, 27508, 26940, 1941, 57955,\n 995, 55867, 24360, 57024, 40607, 23973, 33196, 28835, 43349,\n 4882, 43417, 46920, 9725, 3314, 21939, 47197, 16702, 22379,\n 20977, 22885, 17669, 11560, 10217, 40101, 9959, 31341, 57608,\n 52346, 34226, 9671, 54535, 21251, 40531, 36335, 34938, 2733,\n 44623, 53610, 7776, 1121, 41117, 27307, 58686, 46457, 18471,\n 56104, 24053, 32014, 8951, 52748, 53485, 34616, 54618, 24865,\n 52704, 18761, 895, 11497, 11419, 37609, 46796, 4503, 15311,\n 201, 25666, 57820, 18169, 30077, 30567, 1064, 47236, 56289,\n 1689, 39413, 49627, 30362, 683, 15431, 11381, 13693, 21815,\n 20815, 29815, 37753, 46554, 14669, 1627, 43886, 18071, 54488,\n 23577, 26486, 4172, 49576, 41061, 31270, 4975, 49495, 27005,\n 55448, 37464, 43468, 29316, 2020, 7723, 29841, 53559, 43504,\n 11980, 57000, 27234, 57643, 34677, 13572, 39783, 5061, 22753,\n 18260, 2016, 33857, 22162, 46194, 16094, 47986, 5740, 25516,\n 15263, 4481, 34832, 5825, 35179, 6622, 49797, 24717, 53454,\n 18709, 50450, 30658, 57163, 42803, 24176, 56873, 14977, 50860,\n 6186, 2321, 29119, 23080, 19178, 22722, 17434, 12126, 3207,\n 24340, 38664, 21861, 56395, 50056, 49684, 35867, 24817, 38458,\n 17127, 39567, 30422, 23797, 5565, 50612, 33661, 52547, 28763,\n 58519, 51240, 1559, 3107, 33294, 6413, 58488, 24220, 45368,\n 33908, 29135, 31547, 34824, 29946, 38967, 44005, 55132, 46073,\n 36938, 16550, 28450, 42988, 17591, 44325, 37727, 55373, 35824,\n 49548, 46314, 39807, 44686, 17657, 23287, 33498, 29752, 48486,\n 53870, 23102, 37176, 7572, 25089, 131, 50984, 37874, 9083,\n 49204, 18908, 56061, 54505, 2759, 23089, 33989, 36898, 22074,\n 12174, 27395, 55748, 57480, 8997, 8926, 43145, 56851, 51353,\n 13981, 14471, 46955, 3574, 11064, 3649, 25732, 12473, 10834,\n 37864, 59293, 50534, 39725, 9147, 50170, 22236, 6742, 14956,\n 10852, 3612, 23014, 54397, 5625, 5738, 19733, 19734, 8958,\n 37187, 10872, 41710, 34851, 6608, 1901, 29743, 28805, 33305,\n 47171, 33583, 8325, 30428, 47445, 912, 44854, 38801, 25209,\n 57269, 36579, 8145, 17578, 15085, 52501, 42748, 14738, 20835,\n 53338, 57305, 51259, 11023, 51066, 29337, 12863, 56552, 54696,\n 21793, 53248, 20160, 45945, 56757, 46923, 35492, 18099, 55864,\n 37918, 30205, 23331, 39151, 28445, 43190, 8148, 26165, 48455,\n 3235, 25818, 19107, 4262, 45729, 39949, 35530, 26645, 46500,\n 6379, 13924, 57555, 9754, 38266, 4964, 35134, 26769, 53171,\n 22421, 32327, 21155, 52130, 31108, 39318, 7811, 17876, 23714,\n 34615, 34335, 16789, 26767, 1466, 44612, 39644, 14882, 35658,\n 54869, 24410, 31691, 33445, 6381, 27707, 32463, 24984, 34340,\n 22805, 25885, 2270, 32534, 51300, 34918, 55661, 30519, 25133,\n 20733, 15358, 47096, 16139, 46786, 40056, 30720, 52118, 34433,\n 35357, 7221, 36433, 21210, 23111, 16841, 47092, 10367, 33729,\n 14783, 43298, 535, 3645, 11674, 34631, 3358, 7080, 58865,\n 27386, 54515, 11657, 31206, 20104, 9273, 31845, 31495, 36689,\n 190, 12046, 7105, 52531, 20091, 12436, 50338, 2923, 53856,\n 29784, 21731, 18887, 25088, 4694, 50552, 4391, 1431, 36182,\n 26557, 12298, 18140, 44006, 23169, 4535, 44422, 10938, 39635,\n 45109, 48642, 56313, 45062, 2308, 4682, 39843, 16670, 47515,\n 14793, 23276, 5674, 24299, 57860, 12650, 28347, 48133, 4696,\n 26200, 56112, 26916, 44392, 15494, 1844, 50193, 48208, 33588,\n 35601, 40286, 33562, 30769, 13777, 55044, 12397, 45475, 28993,\n 30784, 13838, 42415, 24608, 37154, 54511, 56812, 52603, 45772,\n 14413, 51489, 7234, 58221, 21891, 40840, 6875, 4015, 45466,\n 17368, 6891, 8900, 14293, 330, 2423, 47678, 24518, 39260,\n 48547, 48353, 36334, 47, 10675, 23301, 17814, 35984, 12714,\n 15461, 49026, 3900, 2562, 36954, 53352, 23511, 5322, 15378,\n 15725, 14473, 28286, 51594, 14427, 26441, 14131, 28818, 51423,\n 8428, 59208, 49443, 35092, 4120, 26005, 21216, 44899, 31014,\n 49439, 46435, 11078, 1562, 28557, 2471, 6138, 23928, 51832,\n 7331, 23574, 30444, 23367, 7430, 18194, 13555, 23471, 12145,\n 9586, 19832, 30886, 25318, 33658, 40516, 27741, 7439, 48574,\n 8431, 6843, 6429, 20655, 52732, 5302, 25794, 8781, 41671,\n 39575, 29067, 34669, 25772, 25104, 31459, 39883, 15162, 20517,\n 37223, 48183, 10899, 9162, 26070, 20920, 6225, 22439, 1089,\n 17904, 56418, 6063, 5156, 11936, 19066, 19368, 30189, 45878,\n 58374, 47477, 41009, 29887, 39595, 43931, 42584, 25211, 2870,\n 11082, 9614, 26385, 34098, 21528, 35319, 41645, 30909, 10060,\n 10298, 41420, 35426, 28618, 15212, 53165, 53215, 15666, 17574,\n 59036, 34154, 3340, 57838, 37475, 11703, 14696, 52105, 48459,\n 27570, 44930, 22445, 12518, 7694, 10375, 52772, 57574, 41613,\n 57342, 36378, 44070, 56714, 40436, 3242, 31102, 57995, 3012,\n 24895, 3239, 37286, 7985, 30268, 13316, 54544, 30433, 33429,\n 53547, 43111, 37867, 21854, 2865, 14425, 12414, 27502, 45443,\n 53370, 36438, 13884, 11663, 17177, 33321, 56598, 54901, 27689,\n 19170, 26226, 54047, 26934, 25894, 2752, 49591, 35647, 46406,\n 15518, 57416, 58505, 16871, 6465, 25231, 39895, 46876, 57985,\n 47144, 2586, 39263, 796, 53942, 26464, 46699, 10691, 4829,\n 13384, 4563, 34171, 6482, 53623, 42270, 37540, 49015, 23522,\n 46707, 32006, 357, 50452, 12215, 53287, 58104, 33412, 16837,\n 2851, 35728, 3085, 8386, 30596, 39760, 7780, 20630, 25886,\n 47535, 55777, 33987, 24995, 57218, 38536, 225, 50735, 21679,\n 45730, 57601, 26646, 22675, 2997, 58493, 9524, 1332, 7420,\n 23194, 16813, 953, 45663, 10524, 34714, 51326, 26942, 54782,\n 5663, 20632, 57876, 1217, 52627, 5220, 23206, 29456, 31178,\n 34706, 53478, 34534, 49840, 11611, 41223, 39130, 12666, 11750,\n 40845, 28870, 42957, 20462, 35996, 15372, 54748, 28279, 36887,\n 54364, 31050, 36724, 51370, 26223, 55486, 23946, 39496, 10041,\n 33123, 14545, 27297, 35136, 22413, 49882, 21772, 54576, 44064,\n 27711, 25725, 10686, 29, 27753, 23841, 645, 43071, 5170,\n 16494, 56996, 28324, 4111, 23256, 13507, 25042, 6679, 38089,\n 21576, 27703, 38627, 2826, 33314, 50357, 12009, 52409, 53637,\n 8957, 23920, 23978, 15441, 24303, 2110, 46802, 36227, 43359,\n 52194, 51598, 50074, 36125, 30657, 47989, 33289, 52369, 19035,\n 14270, 48190, 42434, 31527, 53686, 48511, 53364, 31773, 10489,\n 58969, 31901, 45519, 17863, 28658, 56150, 35985, 3791, 31964,\n 24860, 2738, 32496, 863, 34756, 8363, 43883, 3838, 52029,\n 7720, 47868, 36774, 31868, 41865, 52045, 2264, 7176, 56803,\n 26510, 1093, 29136, 33724, 55599, 36567, 5868, 38924, 50521,\n 48600, 32769, 16373, 46708, 19320, 3482, 7348, 52378, 38439,\n 19958, 25465, 57139, 4635, 31558, 31272, 40191, 11777, 57990,\n 908, 32160, 41881, 54188, 26615, 19415, 53291, 5323, 11578,\n 47330, 48312, 24878, 34454, 50793, 8516, 51697, 33137, 5639,\n 5442, 30583, 27249, 18307, 17787, 509, 7979, 55629, 21690,\n 3465, 49690, 30763, 55314, 25677, 22226, 47647, 55251, 45975,\n 17058, 33366, 36452, 34512, 23299, 59109, 40670, 55665, 22730,\n 52946, 166, 51179, 17737, 14502, 54733, 7069, 58364, 50100,\n 21132, 8265, 42930, 45395, 55305, 14884, 20565, 37394, 13444,\n 829, 42111, 35490, 15255, 31995, 50455, 58121, 13681, 52544,\n 17466, 34686, 2787, 49323, 36801, 17687, 10614, 43626, 25634,\n 21262, 1791, 30890, 24063, 28594, 36840, 11573, 51028, 33293,\n 44661, 13065, 30443, 34927, 28342, 46601, 24421, 56539, 36596,\n 23754, 46044, 42166, 26666, 11505, 8005, 54672, 28155, 2660,\n 9532, 16324, 36761, 38755, 48405, 25630, 667, 53624, 23162,\n 58756, 26742, 38806, 3016, 25557, 40289, 11161, 52807, 25283,\n 1684, 35397, 7783, 5685, 43506, 34659, 48242, 35833, 25679,\n 57130, 55355, 46300, 46060, 11938, 24911, 14495, 4164, 24832,\n 16239, 58214, 38883, 1475, 59018, 36263, 12507, 49135, 44337,\n 12092, 58931, 38756, 10410, 14374, 55215, 56512, 58177, 35411,\n 54036, 16852, 49472, 54827, 48882, 10003, 12168, 14469, 36808,\n 25764, 23438, 8122, 50696, 13365, 39489, 13779, 29589, 33375,\n 36515, 53549, 37228, 58367, 1382, 40692, 23390, 27814, 6672,\n 38018, 56507, 29415, 17020, 44965, 3617, 40589, 46733, 38343,\n 2378, 2583, 977, 3024, 19686, 31683, 39388, 42675, 40676,\n 33306, 54261, 49206, 42274, 6774, 42858, 35201, 12163, 13805,\n 22915, 2237, 4173, 19026, 3181, 48136, 52891, 35946, 49699,\n 2858, 23351, 22668, 46551, 43578, 49286, 9975, 43385, 22585,\n 20043, 15377, 35292, 45728, 5624, 36103, 13710, 18867, 38284,\n 32717, 4288, 18640, 32581, 45911, 12438, 20792, 18046, 56252,\n 51196, 57373, 2570, 5103, 18653, 35427, 36039, 16140, 43986,\n 46078, 15913, 42222, 48430, 21713, 49972, 16472, 1303, 38714,\n 16661, 6308, 58540, 41158, 18410, 48018, 29978, 16769, 10768,\n 11553, 41489, 30122, 7766, 41166, 889, 27835, 50848, 43263,\n 55549, 33931, 32167, 48431, 34233, 58538, 45796, 36588, 20177,\n 45215, 52236, 33084, 47752, 43994, 13201, 39085, 6511, 29053,\n 47627, 38452, 5313, 31083, 58474, 13809, 56636, 41562, 39566,\n 3438, 10924, 17816, 51864, 55621, 59073, 40859, 58425, 25163,\n 12118, 46743, 54822, 57759, 54000, 13162, 51633, 46792, 20124,\n 45459, 43127, 58805, 33492, 34181, 17395, 57209, 23693, 16989,\n 27644, 8548, 51685, 17162, 41199, 29597, 42881, 57573, 36963,\n 46841, 50825, 27994, 17846, 25910, 16610, 15724, 43633, 5962,\n 43498, 39158, 32175, 27364, 44871, 18549, 4352, 33011, 6596,\n 49025, 23397, 44390, 27377, 25595, 4144, 4242, 23066, 33116,\n 38387, 5425, 2437, 27953, 40411, 27245, 53879, 42475, 19842,\n 52804, 46582, 25892, 27493, 12923, 49825, 12062, 22958, 36597,\n 6254, 32825, 40652, 46718, 34464, 25479, 14972, 32391, 58921,\n 44261, 51955, 5434, 7975, 48700, 30525, 51135, 20686, 39653,\n 46805, 22617, 16335, 13843, 19490, 52230, 19945, 17262, 13161,\n 22602, 8090, 20998, 20163, 44354, 38407, 33763, 55339, 22806,\n 26424, 54305, 30778, 3913, 46939, 49494, 44473, 10918, 42242,\n 750, 37943, 38263, 59252, 41401, 21489, 50536, 20026, 48964,\n 25440, 6114, 12152, 22373, 35901, 15422, 56398, 26635, 1464,\n 26551, 48877, 4539, 37621, 101, 50203, 48802, 29272, 49617,\n 43716, 45948, 23278, 3596, 41723, 12416, 53982, 8255, 26547,\n 40503, 1758, 36376, 52298, 41321, 16480, 28828, 30161, 28465,\n 19104, 46597, 28464, 2125, 34605, 51542, 2286, 4442, 19500,\n 1810, 24839, 24485, 49336, 29551, 13451, 34173, 41197, 20747,\n 31717, 136, 27387, 15557, 50907, 52179, 31484, 42716, 55302,\n 37700, 45113, 58010, 36848, 43804, 44915, 33362, 7912, 54157,\n 8733, 2490, 7085, 5601, 18295, 16182, 9430, 30203, 15655,\n 49945, 549, 44089, 55967, 386, 20938, 7188, 3338, 27608,\n 10972, 17203, 20571, 29061, 3769, 6311, 24275, 31458, 21338,\n 58286, 41531, 58524, 57780, 48281, 53580, 37123, 34279, 22825,\n 49764, 22008, 54929, 34268, 13474, 56756, 38058, 45256, 12663,\n 32310, 44007, 1967, 1884, 23645, 8355, 11649, 5684, 924,\n 42758, 39887, 36106, 13695, 44600, 14442, 43579, 9539, 41167,\n 15800, 30210, 40341, 13810, 44214, 51895, 7342, 53578, 44438,\n 14125, 28051, 2804, 23474, 16849, 32322, 6012, 19595, 50019,\n 28511, 57483, 30068, 25653, 11303, 38614, 7504, 17584, 14949,\n 42700, 4254, 47761, 5148, 30843, 46464, 30471, 1560, 9528,\n 12007, 33120, 20053, 24644, 13432, 26205, 41989, 19524, 43523,\n 38839, 4888, 2608, 54006, 49300, 2439, 31012, 352, 26305,\n 35692, 48193, 6713, 12177, 11921, 27195, 54794, 3264, 52053,\n 23643, 53702, 17402, 32754, 24079, 18794, 20615, 53057, 35829,\n 4388, 26347, 57628, 48880, 34746, 32999, 10518, 26454, 43706,\n 13510, 45313, 26116, 43996, 51734, 773, 6704, 20081, 3132,\n 32805, 56147, 24757, 17985, 18740, 31101, 17199, 55344, 50863,\n 52207, 15595, 32966, 49710, 19305, 12629, 4896, 7295, 10093,\n 16985, 47437, 16428, 47357, 49892, 44027, 25616, 15661, 40506,\n 19614, 27800, 19549, 9757, 42430, 37241, 35287, 56648, 31074,\n 52475, 51571, 44372, 8306, 39932, 39787, 20205, 9492, 50812,\n 25181, 43621, 25843, 54376, 15979, 10432, 9167, 18253, 39180,\n 516, 35139, 53815, 15132, 56946, 5900, 22549, 31969, 21866,\n 13304, 9710, 34427, 40931, 24711, 11451, 3463, 25613, 33504,\n 15643, 26760, 55619, 38835, 38618, 13078, 52251, 45386, 3687,\n 53017, 58623, 55805, 52361, 13824, 10933, 9629, 55193, 58363,\n 48943, 41682, 9447, 53834, 16861, 52938, 41241, 1164, 30407,\n 18048, 29020, 17201, 22045, 37841, 48822, 54988, 33378, 49729,\n 50730, 45482, 51165, 35747, 13580, 52471, 49432, 2275, 6468,\n 55220, 37595, 54684, 4841, 37552, 13941, 30832, 23385, 47089,\n 28096, 10908, 33301, 48026, 38293, 26558, 18976, 24463, 32265,\n 52518, 57764, 34409, 21681, 35138, 27264, 34602, 14300, 14873,\n 7837, 28773, 21372, 17954, 18034, 18443, 45047, 12692, 1615,\n 31656, 39809, 19927, 18838, 26805, 30587, 26771, 45686, 14015,\n 43855, 4091, 42766, 22560, 2278, 2092, 25041, 55385, 36142,\n 54937, 58422, 45023, 33835, 28238, 5049, 13996, 36749, 40830,\n 58123, 25180, 8413, 10371, 10122, 54598, 23134, 48878, 34529,\n 974, 22615, 20653, 7910, 51689, 25585, 8780, 19009, 9458,\n 27856, 32650, 41983, 22656, 4601, 10989, 18491, 32042, 13121,\n 16291, 9224, 2363, 13567, 21975, 29778, 46943, 13272, 32528,\n 16491, 5381, 21163, 51717, 31314, 55239, 41064, 46344, 36195,\n 34683, 10106, 34345, 47502, 34852, 54409, 47822, 9592, 33302,\n 25488, 46744, 915, 7489, 56419, 2868, 2590, 59057, 37533,\n 14823, 39107, 31115, 1725, 19463, 51835, 49087, 18730, 51744,\n 58282, 13032, 20908, 49367, 46211, 45821, 58577, 19019, 45627,\n 15499, 15960, 12422, 12946, 16860, 30543, 24891, 2421, 27914,\n 53298, 34527, 18309, 42747, 16799, 35654, 43007, 39765, 40112,\n 3001, 9457, 2677, 52413, 43477, 20377, 42918, 28293, 22141,\n 45285, 57957, 49571, 38459, 11365, 49153, 9260, 52281, 59008,\n 12374, 23753, 9640, 20217, 15586, 11510, 1690, 30055, 43880,\n 52333, 16829, 53753, 46133, 47750, 42708, 8976, 16718, 47972,\n 52523, 24542, 7497, 10156, 54582, 13492, 41149, 44762, 15989,\n 36006, 12754, 7997, 10055, 51776, 22526, 2823, 5726, 20585,\n 40966, 22821, 11188, 36616, 55594, 31301, 53090, 55542, 33577,\n 36439, 17253, 7388, 9066, 12994, 6888, 49079, 2234, 28514,\n 45236, 2033, 17929, 4951, 43392, 56087, 8410, 54501, 45150,\n 13493, 10082, 28924, 14848, 39918, 31338, 52667, 21917, 48973,\n 15160, 27732, 50041, 51042, 9786, 50664, 57832, 10184, 2731,\n 43921, 56971, 58962, 3142, 25834, 3404, 55841, 48359, 50303,\n 11296, 52840, 6836, 1361, 23594, 33551, 27624, 41499, 2840,\n 12512, 13268, 23384, 56336, 32190, 14359, 12997, 48634, 9249,\n 31032, 49013, 50107, 16947, 22219, 51160, 33679, 6874, 31491,\n 16501, 38541, 36599, 345, 3604, 35294, 43297, 20702, 32086,\n 9938, 16020, 27767, 41679, 13305, 10669, 51840, 35137, 39495,\n 51574, 9623, 30867, 38538, 5222, 11480, 29333, 56042, 31522,\n 33585, 3563, 22893, 2180, 10623, 53251, 3971, 19279, 36155,\n 50424, 24739, 45411, 48992, 48437, 21374, 35699, 8968, 30464,\n 21233, 10304, 14208, 17508, 51074, 43515, 29666, 28591, 47559,\n 32968, 37132, 12153, 52689, 18386, 41731, 53877, 40742, 41635,\n 15289, 8154, 10856, 633, 33943, 41404, 31263, 31590, 12499,\n 52974, 28016, 45810, 17874, 43455, 2103, 9277, 27853, 53959,\n 34863, 8794, 37450, 40022, 7921, 33197, 3072, 2812, 14723,\n 34466, 27978, 1260, 2382, 35189, 18763, 32892, 8506, 35121,\n 12564, 8070, 2943, 42389, 42455, 49540, 12664, 57368, 5955,\n 44539, 48005, 48972, 5512, 46913, 39982, 18312, 529, 26394,\n 51286, 4109, 6815, 12755, 28058, 49676, 1878, 4069, 7438,\n 21689, 2121, 54950, 36207, 7119, 20929, 47431, 44852, 36798,\n 6176, 22310, 41506, 52450, 21737, 39859, 3580, 30170, 39721,\n 34262, 40149, 44249, 9777, 25061, 47244, 14649, 57909, 49517,\n 7777, 37447, 27717, 39049, 29577, 13125, 46770, 20075, 17697,\n 17460, 27442, 19342, 112, 31594, 17171, 41897, 4526, 13465,\n 47076, 28285, 40848, 56954, 32464, 9327, 12638, 18996, 32895,\n 33184, 51184, 3568, 33945, 47282, 10728, 32312, 50151, 47832,\n 9877, 51916, 54853, 50262, 56420, 11260, 4880, 55183, 55708,\n 52963, 50886, 47087, 282, 1385, 16090, 58743, 53175, 40216,\n 36829, 32809, 5117, 39687, 55218, 13334, 43272, 6459, 45777,\n 31091, 17666, 38442, 44303, 4468, 18165, 32975, 887, 47921,\n 23742, 10241, 14898, 42074, 55135, 22692, 38399, 56322, 47245,\n 35377, 54710, 46666, 24487, 24032, 27079, 23430, 54627, 39043,\n 47591, 394, 44570, 12747, 41699, 48774, 29913, 21447, 9774,\n 36050, 54754, 16898, 23607, 39786, 20829, 28756, 50040, 48106,\n 49508, 37281, 53878, 56921, 31119, 24042, 10467, 12040, 54673,\n 6393, 30320, 5952, 21759, 25860, 24777, 49660, 10950, 57862,\n 28359, 37147, 10663, 38465, 6484, 5237, 34112, 15947, 14736,\n 19906, 14153, 6181, 51355, 882, 28254, 28586, 25605, 6977,\n 4453, 7725, 47328, 42542, 3626, 4362, 1887, 5432, 39376,\n 39925, 7507, 3967, 36356, 42201, 49558, 12504, 45128, 52065,\n 39942, 54885, 36847, 35236, 3130, 46626, 57734, 16777, 28059,\n 48088, 9151, 26297, 22714, 12772, 39508, 51902, 12917, 38295,\n 6052, 16144, 56661, 57968, 22004, 37500, 45671, 8519, 35485,\n 25203, 40713, 58135, 25244, 54432, 56963, 44439, 20203, 41766,\n 50752, 20644, 21995, 11800, 58398, 21555, 9239, 24590, 30862,\n 59211, 12036, 19149, 56482, 17183, 18853, 14788, 7015, 51317,\n 12190, 23332, 15856, 17844, 27424, 39548, 8907, 55588, 42910,\n 22733, 22440, 6419, 10571, 58156, 55502, 16378, 46258, 49028,\n 17590, 39110, 16868, 42571, 25563, 51274, 58900, 42287, 20340,\n 14431, 36632, 56549, 25545, 48686, 34730, 5364, 16133, 35133,\n 10438, 10215, 38280, 13564, 8026, 50772, 31503, 32988, 43376,\n 32856, 34795, 28271, 35853, 8503, 18705, 45294, 8924, 2535,\n 2420, 52036, 59187, 16204, 24953, 21346, 20220, 42398, 19343,\n 40433, 37559, 13427, 49951, 28858, 8106, 6674, 17908, 58457,\n 17388, 57152, 56804, 4684, 13460, 47660, 6195, 30124, 39674,\n 23277, 54478, 44360, 42118, 13896, 31170, 11134, 14109, 632,\n 39245, 1919, 39999, 32174, 40721, 42621, 51508, 23734, 15488,\n 36532, 5150, 1058, 16669, 39623, 33900, 8724, 55747, 29458,\n 48338, 44883, 42965, 35085, 43933, 34017, 57515, 41157, 28319,\n 55778, 18102, 2284, 41840, 26435, 20827, 4946, 23199, 38885,\n 41980, 44969, 16751, 40886, 21752, 7849, 13870, 17864, 49836,\n 2806, 35621, 29070, 24117, 41068, 2511, 17352, 37090, 20801,\n 27542, 16906, 27063, 6061, 1672, 12059, 31487, 48467, 19092,\n 13514, 27722, 4502, 16421, 45487, 48436, 51532, 21653, 58593,\n 43656, 39858, 23984, 50639, 18261, 24954, 14352, 8560, 25433,\n 3081, 27118, 3811, 36016, 19390, 51857, 3286, 30379, 24194,\n 5964, 32536, 9274, 57009, 23140, 12214, 4225, 48156, 40088,\n 17771, 24706, 12699, 23214, 12532, 36909, 40979, 25198, 18855,\n 36108, 38920, 8035, 14310, 40943, 40809, 44532, 43512, 53263,\n 52132, 27229, 19837, 1582, 25983, 11880, 46680, 16635, 54475,\n 38298, 196, 12823, 1780, 19929, 18197, 26281, 36330, 44584,\n 40801, 17156, 36025, 53603, 18926, 48159, 25854, 7588, 31451,\n 24461, 26144, 58934, 14213, 36190, 35493, 54982, 14741, 11924,\n 55022, 15276, 25631, 6103, 38715, 16505, 42257, 31752, 23457,\n 12388, 979, 51215, 25790, 42692, 43950, 50060, 42273, 24126,\n 8220, 18686, 5090, 25835, 26136, 15691, 47294, 58841, 48740,\n 19326, 11794, 26606, 57783, 29176, 2215, 55649, 32008, 1817,\n 29303, 46161, 32544, 46008, 8116, 50053, 22663, 35209, 12645,\n 41946, 9240, 49870, 4464, 12586, 42922, 11209, 9942, 36359,\n 55404, 14279, 29677, 45735, 58138, 47609, 54194, 52797, 44893,\n 49849, 45226, 54792, 14689, 7216, 32155, 9785, 2335, 10095,\n 10352, 52987, 38730, 53304, 44541, 48644, 49867, 9141, 21218,\n 45270, 19200, 57206, 5134, 3931, 18918, 20332, 46077, 20735,\n 27358, 55928, 46679, 36708, 42124, 24336, 4923, 15227, 718,\n 58792, 16091, 49102, 34976, 41806, 11080, 7480, 13573, 41084,\n 11422, 14218, 53502, 12966, 37822, 26708, 28275, 24768, 1742,\n 4157, 12434, 21961, 31134, 57215, 5881, 55072, 57885, 14334,\n 55330, 50101, 46104, 2606, 20711, 23537, 29876, 10262, 40034,\n 47721, 3327, 37668, 58920, 7831, 25185, 57878, 31281, 28307,\n 32542, 48498, 19355, 53064, 50797, 49666, 35125, 9663, 50731,\n 8521, 51080, 19399, 53762, 27517, 48442, 39111, 21107, 36015,\n 38551, 43405, 28065, 27203, 10314, 14178, 20584, 11110, 7734,\n 6456, 39095, 59248, 52716, 56622, 12792, 9728, 8929, 58798,\n 18860, 43309, 58297, 26326, 52912, 13950, 32538, 37227, 33224,\n 10312, 49916, 33912, 3746, 12067, 35618, 25149, 3137, 30365,\n 30927, 30459, 5051, 20034, 51173, 57861, 46014, 39230, 2285,\n 19370, 26451, 45397, 43772, 27463, 26251, 32739, 53823, 42828,\n 27419, 17408, 55147, 20944, 49570, 30760, 47033, 34693, 9496,\n 47596, 51188, 7167, 26183, 14799, 47670, 23323, 52663, 41470,\n 56246, 52498, 20488, 42518, 8367, 21842, 53308, 18844, 39397,\n 48647, 29221, 18891, 38392, 30910, 34657, 49511, 532, 26257,\n 40597, 9175, 48695, 58788, 58554, 141, 3530, 44882, 10547,\n 52160, 53985, 31183, 56076, 34967, 57444, 17176, 13206, 44674,\n 58092, 1541, 3938, 36369, 34051, 50278, 15387, 35344, 47558,\n 18234, 58699, 34632, 18492, 3175, 45282, 8466, 31082, 25457,\n 48567, 37057, 49906, 22145, 1546, 8190, 53948, 57450, 19561,\n 16881, 26054, 54904, 29406, 31790, 52006, 55604, 15967, 400,\n 24956, 38711, 9819, 50990, 15011, 40691, 314, 25734, 40744,\n 55283, 8169, 26585, 3725, 27897, 39713, 9672, 50869, 8801,\n 8174, 21833, 27916, 43112, 28928, 18124, 58996, 50823, 40681,\n 55071, 19369, 11380, 50916, 36566, 48089, 2044, 35329, 54620,\n 20797, 3702, 24771, 52835, 8646, 18025, 3854, 39437, 16313,\n 1730, 52991, 24976, 56126, 24928, 22700, 214, 23222, 11204,\n 48883, 611, 38719, 17976, 53191, 36222, 15159, 9313, 33386,\n 10611, 32039, 46173, 35387, 42784, 34501, 27971, 54099, 59245,\n 12453, 30651, 41830, 29011, 5320, 19220, 49399, 5139, 11635,\n 44632, 9387, 8916, 17197, 19033, 24649, 31748, 37384, 14229,\n 53227, 58681, 13390, 47084, 19389, 5027, 647, 412, 11224,\n 15267, 44380, 27012, 37042, 51175, 12000, 28194, 19479, 2152,\n 3829, 41154, 14929, 15006, 42504, 57478, 16946, 15443, 46701,\n 38350, 20577, 56588, 33633, 38178, 48843, 9136, 31142, 33253,\n 22185, 17074, 18677, 32570, 46905, 12684, 28995, 9796, 20158,\n 41118, 58178, 25539, 22225, 18181, 31445, 4915, 9712, 28525,\n 1946, 5040, 22214, 35358, 45384, 46742, 42383, 29198, 47189,\n 52996, 37152, 355, 55052, 1954, 32582, 44440, 21724, 6638,\n 5705, 24165, 2764, 29885, 33923, 35091, 50627, 44475, 3174,\n 23193, 17842, 18341, 45976, 15261, 7898, 41667, 51960, 12543,\n 21761, 5216, 40134, 21267, 47707, 35521, 30298, 2564, 38357,\n 27399, 11625, 22846, 38843, 56371, 23359, 2350, 35349, 8843,\n 49045, 4454, 7288, 18857, 37945, 18493, 22502, 52177, 4634,\n 42448, 51422, 56857, 33761, 11060, 1183, 26586, 38622, 59275,\n 46414, 7844, 54271, 12038, 9962, 26865, 10825, 50130, 12352,\n 18746, 21342, 57633, 48580, 32180, 9390, 29646, 18322, 23918,\n 45881, 30494, 16232, 45937, 16535, 24367, 33407, 22332, 37144,\n 34000, 46061, 11935, 50435, 44272, 35056, 95, 37005, 18907,\n 49437, 30692, 20129, 21792, 24286, 41996, 18241, 341, 4485,\n 37671, 43075, 38620, 22349, 40266, 46461, 45568, 28663, 49099,\n 13040, 39927, 49602, 35520, 20817, 24074, 55764, 52884, 49827,\n 5046, 18814, 20848, 799, 54504, 53518, 37489, 32641, 51886,\n 44917, 47382, 42691, 25346, 53358, 20253, 6046, 43712, 19956,\n 34583, 42938, 18141, 34708, 22911, 11893, 1687, 47696, 45318,\n 38347, 37756, 38026, 29179, 14755, 12811, 52363, 3114, 42075,\n 54923, 17147, 4090, 12424, 52813, 34545, 16243, 13706, 57252,\n 30952, 22512, 15167, 18522, 52643, 9219, 16884, 4122, 30507,\n 48128, 646, 52563, 27929, 16484, 25918, 35074, 21204, 35151,\n 13738, 51969, 38488, 58587, 49487, 58701, 32446, 26794, 33227,\n 9884, 28243, 47886, 26620, 39309, 42382, 11949, 37718, 7791,\n 54344, 53980, 48151, 46437, 48535, 28922, 23637, 28143, 27639,\n 36133, 24740, 20714, 17017, 17220, 49845, 15329, 17957, 27518,\n 50962, 18564, 57731, 5067, 54670, 26960, 37020, 30058, 24713,\n 38096, 613, 16708, 51653, 44095, 47226, 52490, 45242, 19504,\n 52084, 24244, 14568, 40755, 35328, 33836, 49122, 55937, 57814,\n 20710, 46881, 33363, 1108, 21908, 24198, 58032, 39834, 39187,\n 8375, 54741, 126, 55901, 6030, 7727, 45415, 57464, 26719,\n 38021, 39211, 31347, 26954, 3844, 27649, 7768, 21521, 54128,\n 2213, 3910, 12254, 27933, 16706, 25082, 55872, 42513, 41208,\n 15005, 36857, 47581, 23038, 54561, 15305, 45695, 15669, 19080,\n 23190, 30273, 27270, 43237, 49559, 36933, 9545, 38844, 43918,\n 45577, 39827, 17782, 32714, 11458, 56381, 54204, 54640, 45131,\n 41188, 51754, 8750, 21856, 28109, 38321, 58998, 10620, 38308,\n 34829, 52208, 7570, 51528, 14209, 50723, 37424, 19562, 40841,\n 57214, 15861, 12206, 24864, 22544, 25217, 13137, 56672, 30704,\n 44693, 37271, 16027, 56680, 8618, 23738, 53170, 47286, 40373,\n 36701, 13959, 20796, 24391, 54083, 3610, 9238, 15222, 6229,\n 35742, 48570, 15679, 53119, 32546, 29754, 59111, 29435, 27184,\n 46291, 56346, 12286, 58373, 36692, 18790, 637, 13172, 11484,\n 47319, 35394, 37421, 58450, 20482, 4961, 21275, 2481, 6208,\n 1440, 17438, 28168, 38208, 50318, 24836, 48336, 51304, 8856,\n 30516, 8370, 23710, 41594, 21361, 52335, 37911, 34586, 24405,\n 19630, 50945, 3732, 42536, 18616, 46807, 34843, 41361, 6663,\n 16468, 58973, 30976, 45603, 58835, 22784, 22065, 53123, 53429,\n 23816, 4575, 52037, 28871, 37499, 37960, 52097, 53605, 39894,\n 56066, 3816, 15651, 40825, 34123, 49574, 9609, 43907, 52377,\n 15492, 34128, 38601, 24646, 6772, 3276, 36861, 19268, 44480,\n 38132, 57005, 21151, 51555, 23416, 2725, 43819, 54813, 9428,\n 11360, 24876, 43739, 15843, 53655, 30580, 33165, 50527, 14927,\n 45668, 46947, 54783, 24433, 26850, 30646, 25269, 50299, 39082,\n 57592, 54557, 30249, 23124, 40820, 3737, 28619, 17164, 20378,\n 30446, 8163, 54472, 49485, 28325, 28796, 28667, 37788, 10741,\n 45688, 42669, 36728, 58650, 45329, 29703, 14805, 42757, 48572,\n 28218, 25873, 8079, 1777, 45864, 24527, 44567, 30889, 34342,\n 47574, 36563, 42163, 27357, 21743, 2927, 34146, 36609, 25055,\n 24599, 12609, 46722, 11434, 47283, 9106, 6004, 9753, 36218,\n 56542, 56831, 32867, 38141, 55509, 139, 8399, 52886, 43248,\n 41350, 45435, 25200, 46408, 32234, 50831, 19393, 25847, 49651,\n 32661, 45065, 28946, 2479, 14337, 9265, 48989, 24703, 27886,\n 28652, 9128, 44294, 23197, 31950, 41463, 5870, 41842, 55342,\n 4389, 58407, 10511, 29665, 29534, 29699, 4873, 9049, 14098,\n 9678, 46502, 21169, 2983, 35313, 14180, 23269, 3627, 58200,\n 56357, 27459, 15928, 19583, 22597, 27261, 18074, 29883, 38060,\n 45174, 18560, 38560, 39374, 9761, 42022, 12247, 30811, 1975,\n 47747, 51609, 12043, 44000, 31879, 35611, 6552, 2859, 31176,\n 5113, 42891, 48778, 45399, 34630, 15456, 44948, 58494, 20396,\n 31489, 34694, 12345, 33340, 16623, 30297, 7562, 36441, 16758,\n 55358, 36682, 36389, 43542, 39044, 46801, 41751, 24003, 24043,\n 48536, 45260, 59271, 15319, 38907, 6231, 207, 27859, 33152,\n 6258, 6798, 17335, 54160, 5316, 9993, 38300, 31325, 10070,\n 18995, 46759, 29613, 50079, 29968, 15804, 50122, 55144, 22605,\n 51950, 31415, 41378, 5533, 38857, 58163, 2348, 43224, 32830,\n 48947, 55476, 19974, 57278, 15163, 11685, 1576, 42824, 27298,\n 28762, 31231, 10775, 26036, 8497, 35265, 9914, 32807, 36419,\n 17227, 26109, 47820, 18474, 8667, 40946, 34986, 16009, 40202,\n 22106, 6855, 23336, 18764, 24464, 47149, 48293, 9124, 22096,\n 11894, 20216, 9454, 42633, 43401, 19657, 7225, 41969, 42511,\n 8660, 21290, 54664, 44293, 46738, 291, 20720, 13653, 6002,\n 15294, 49736, 22331, 50546, 40212, 14623, 2444, 19905, 21524,\n 10871, 51686, 58982, 29769, 50429, 24450, 2599, 16953, 1972,\n 19628, 42619, 28630, 40179, 33333, 44190, 47757, 20413, 28937,\n 16085, 31340, 49635, 2693, 37242, 31418, 30523, 36348, 5957,\n 18691, 2027, 10858, 18158, 12056, 32577, 48527, 26681, 39070,\n 44557, 7460, 42407, 26232, 32689, 42955, 4802, 5281, 3766,\n 49174, 1863, 20903, 22640, 19250, 55983, 7283, 56217, 26983,\n 9542, 14366, 30631, 21923, 55886, 29106, 41478, 55379, 44558,\n 7711, 33054, 26825, 28659, 2011, 6314, 34884, 40558, 26947,\n 37944, 33246, 22333, 22589, 14699, 12605, 3508, 1436, 26143,\n 37351, 50996, 42237, 55108, 46118, 46286, 19835, 8457, 32352,\n 42893, 50833, 38925, 13620, 44275, 28612, 26509, 33369, 12016,\n 771, 56162, 2504, 4218, 59221, 35112, 25671, 42888, 9724,\n 56620, 23751, 968, 58260, 22415, 12119, 6297, 35391, 609,\n 31502, 11884, 46785, 23650, 32797, 40199, 37778, 15331, 39634,\n 48149, 41274, 52493, 25992, 57867, 11100, 35310, 23462, 12960,\n 988, 25625, 9229, 35885, 21344, 45508, 18016, 5550, 43701,\n 48897, 20882, 56309, 50998, 12034, 13679, 13050, 19977, 15515,\n 51959, 53954, 7341, 48192, 18078, 37993, 59272, 11761, 4127,\n 36399, 46227, 40883, 23831, 22354, 18910, 23819, 52408, 53701,\n 10269, 5219, 3660, 18888, 47012, 38153, 57874, 59279, 38626,\n 2789, 48433, 4862, 7925, 56006, 39212, 34420, 3940, 12506,\n 31058, 18249, 13515, 7182, 43466, 55827, 53658, 2588, 52671,\n 41993, 26763, 26372, 3372, 41327, 30979, 57037, 56410, 20709,\n 39160, 2624, 50012, 46510, 54849, 35705, 54002, 4294, 5230,\n 40746, 47914, 8782, 13396, 28401, 53779, 22220, 51676, 13059,\n 26375, 43052, 20953, 3597, 40680, 45154, 4674, 12225, 18418,\n 43527, 36531, 45541, 5890, 43275, 15977, 43887, 32393, 46804,\n 32870, 13808, 11645, 31615, 56645, 32, 7805, 13089, 43917,\n 49532, 12560, 44408, 22497, 26512, 8162, 24609, 12891, 29036,\n 27467, 1903, 46824, 3398, 24968, 54252, 28208, 27718, 36987,\n 39175, 54944, 42856, 35381, 33080, 29091, 17834, 2637, 17327,\n 35467, 32810, 27376, 11628, 23554, 58832, 4396, 40097, 36196,\n 36308, 39531, 46298, 39523, 33608, 29892, 32620, 1157, 44107,\n 7156, 26872, 820, 39331, 37859, 13761, 32070, 14203, 14385,\n 31155, 37406, 54688, 30156, 51688, 23689, 4529, 55664, 29367,\n 19649, 57857, 42677, 16840, 40532, 34260, 46483, 24796, 2905,\n 47386, 42563, 33310, 43678, 18379, 51507, 57042, 13501, 3400,\n 37670, 25102, 21079, 35368, 55560, 39731, 18581, 53858, 48837,\n 47342, 34223, 54119, 24495, 15993, 7494, 47331, 38981, 54410,\n 44551, 37594, 12865, 15134, 39952, 47388, 31889, 4614, 37068,\n 49796, 32099, 45590, 7957, 23499, 31109, 42980, 42125, 56393,\n 49130, 10356, 49061, 53895, 12251, 44363, 44260, 50404, 4530,\n 48398, 57362, 29227, 1805, 37040, 31967, 9932, 56526, 54368,\n 51786, 35900, 37988, 54438, 44166, 55276, 25077, 25300, 33454,\n 58162, 53480, 26503, 26230, 48605, 11755, 56175, 45114, 41184,\n 36511, 47238, 7983, 30201, 47959, 4810, 52928, 42973, 15144,\n 58558, 47523, 34250, 39460, 37358, 19029, 35897, 5172, 44393,\n 22402, 25039, 4607, 20255, 39089, 37182, 25295, 31764, 29383,\n 9997, 19913, 28486, 51896, 22559, 1412, 17453, 54613, 2043,\n 10821, 29172, 33789, 24356, 14639, 25179, 28876, 28294, 34926,\n 48860, 7557, 5991, 23453, 24549, 16865, 8575, 25984, 23241,\n 17939, 50447, 19794, 47054, 39789, 24453, 53854, 5724, 15743,\n 56720, 4446, 51239, 11639, 25357, 58616, 23649, 29569, 35939,\n 52851, 43805, 11487, 45245, 56389]), 'P': array([33806, 37108, 40402, 4460, 11576, 905, 23111, 25145, 5405,\n 30411, 617, 43172, 25518, 17963, 9241, 11750, 17268, 9743,\n 1028, 41629, 37142, 23612, 34014, 30582, 17662, 36039, 43237,\n 13631, 43921, 1817, 24230, 32210, 1297, 34578, 7604, 29959,\n 45689, 26855, 23031, 9145, 25275, 25634, 47004, 33414, 27260,\n 7074, 37544, 19234, 1157, 19643, 20031, 17299, 1289, 25136,\n 35847, 38654, 19638, 46085, 35069, 39915, 39140, 11719, 25018,\n 38567, 11415, 27511, 6342, 31889, 32892, 4047, 18647, 32749,\n 38590, 7043, 9714, 28071, 29087, 12914, 15516, 23237, 8117,\n 38494, 46735, 14014, 35852, 36668, 42586, 16981, 40757, 23671,\n 13993, 16243, 46857, 11338, 41473, 8824, 42958, 8375, 779,\n 39093, 3717, 6767, 28969, 32259, 9291, 37986, 3392, 28874,\n 27366, 33458, 36949, 19438, 36971, 33135, 17406, 1329, 1676,\n 39325, 34213, 12563, 17804, 24061, 1390, 15302, 37989, 5324,\n 24905, 39726, 8180, 33558, 35980, 18365, 35512, 40975, 46628,\n 6627, 43376, 26923, 5553, 31566, 14958, 19856, 14066, 17398,\n 46298, 19678, 14244, 9746, 26542, 40548, 12299, 35242, 8055,\n 30695, 42016, 26962, 18286, 25735, 29352, 38055, 16280, 19233,\n 21216, 8179, 46763, 28725, 31608, 40471, 41407, 23524, 44767,\n 8306, 34186, 16764, 34465, 38945, 5291, 31379, 31897, 8252,\n 20660, 20697, 27736, 33197, 22469, 17527, 30916, 30123, 18071,\n 30071, 23170, 37059, 46887, 8160, 28459, 33964, 44069, 45223,\n 42594, 2442, 9851, 2175, 41857, 46173, 25011, 5369, 29286,\n 37838, 44029, 15128, 39320, 25068, 7189, 8131, 32537, 21783,\n 36224, 40687, 13508, 7109, 14959, 37787, 46636, 14929, 18603,\n 27190, 36233, 1882, 16814, 46356, 12572, 19419, 12512, 3429,\n 30109, 21712, 46066, 21011, 11887, 27180, 4447, 43171, 38885,\n 2288, 43590, 23705, 14393, 35537, 38981, 1431, 29183, 45803,\n 30513, 21893, 32276, 41386, 8677, 42856, 12749, 25891, 4237,\n 15853, 43996, 3658, 33065, 22073, 18264, 23857, 4270, 1325,\n 29790, 40261, 38444, 38103, 16992, 5543, 25047, 16809, 21008,\n 13575, 2094, 16994, 26768, 25944, 1485, 798, 34582, 20604,\n 33906, 34325, 26810, 9518, 18691, 14885, 28449, 21608, 28431,\n 35710, 30293, 25336, 22328, 18540, 45870, 20290, 42591, 25511,\n 26822, 35795, 19671, 17036, 37703, 28155, 7985, 30792, 37759,\n 21315, 16794, 36432, 23408, 36824, 12493, 2060, 37486, 124,\n 25817, 934, 43576, 3460, 3089, 19876, 2595, 21459, 44258,\n 46893, 33709, 16294, 42389, 31075, 9216, 6790, 15644, 63,\n 29565, 34136, 37854, 995, 3435, 23948, 15288, 4575, 44215,\n 31373, 43732, 11817, 10310, 42339, 1656, 13139, 14509, 25644,\n 17831, 22841, 14116, 19972, 39137, 40631, 18179, 22471, 45683,\n 19061, 28319, 23478, 4622, 42099, 22933, 43989, 5556, 5991,\n 8681, 18256, 29572, 34983, 45213, 42665, 43851, 2717, 32850,\n 18432, 31927, 25064, 9900, 25132, 32600, 28689, 30586, 10241,\n 27247, 10599, 43920, 34624, 33443, 44760, 31182, 2653, 39264,\n 12991, 41935, 12715, 44831, 30839, 27727, 23086, 10536, 36823,\n 24963, 35493, 29400, 25215, 17650, 25017, 5363, 18871, 38649,\n 13113, 9841, 35778, 2171, 12403, 39273, 18789, 33184, 7728,\n 18423, 29597, 32770, 14371, 18114, 17848, 951, 41873, 9612,\n 5875, 43567, 37332, 11974, 31957, 18642, 12777, 24700, 8994,\n 506, 45027, 7124, 16902, 40954, 35519, 7344, 35848, 46464,\n 11299, 3207, 32184, 27066, 5523, 34702, 27356, 46868, 3807,\n 11447, 29364, 7334, 41745, 22283, 36031, 41888, 46358, 42275,\n 20180, 13314, 46190, 8846, 22069, 25977, 33219, 45879, 5688,\n 8064, 33523, 33567, 4963, 42846, 23397, 36009, 35581, 7153,\n 31146, 35753, 6240, 43328, 42734, 17992, 41150, 37628, 45549,\n 24270, 8021, 37204, 3347, 39806, 70, 10018, 12968, 34819,\n 35945, 19498, 3094, 15242, 22416, 17003, 27825, 39677, 17271,\n 13640, 12892, 40683, 14031, 11718, 20993, 5205, 7338, 11695,\n 26823, 23630, 7778, 25962, 36467, 6605, 6113, 842, 14753,\n 41155, 4991, 26737, 23191, 40705, 13297, 45997, 17745, 322,\n 7711, 5976, 9286, 14444, 13636, 46924, 19211, 46197, 413,\n 46232, 23681, 15185, 21255, 12292, 20735, 11593, 39391, 13168,\n 36204, 7952, 6076, 19011, 27643, 20596, 37551, 19484, 42682,\n 18652, 46568, 40439, 46701, 45396, 7217, 21235, 6295, 23547,\n 29452, 26115, 27905, 2847, 32460, 21670, 13893, 9554, 38899,\n 29502, 45089, 43047, 26186, 19980, 45579, 33504, 29977, 25330,\n 17446, 23992, 43468, 9413, 18721, 22882, 45471, 36715, 24924,\n 23672, 13875, 40382, 9373, 16910, 36078, 25674, 45029, 26235,\n 14340, 18166, 43867, 46210, 17073, 4621, 41666, 20294, 19521,\n 46976, 14823, 38195, 20649, 3218, 34458, 37902, 25828, 29772,\n 9831, 26195, 38234, 15689, 29138, 36579, 20791, 4836, 25404,\n 13118, 25166, 16505, 44752, 13493, 14748, 42435, 38508, 14950,\n 37364, 10351, 13862, 804, 21571, 20695, 5610, 38486, 21766,\n 35534, 44279, 5177, 21207, 10372, 27819, 16978, 43931, 14970,\n 14473, 16603, 36026, 3698, 7219, 31041, 15949, 24156, 1056,\n 36952, 22995, 13545, 11946, 43077, 5571, 38580, 37356, 16626,\n 21288, 18236, 35458, 26864, 42398, 7187, 26919, 25960, 19872,\n 42443, 40006, 1934, 20483, 8221, 15573, 42007, 25844, 23786,\n 18155, 40426, 13916, 25408, 35733, 827, 9530, 4219, 12304,\n 46608, 34422, 41690, 29661, 4812, 19586, 12732, 3818, 42012,\n 35896, 29682, 35390, 43082, 36487, 18220, 22446, 20202, 45281,\n 29908, 4737, 10576, 41537, 20041, 26228, 17863, 30526, 31004,\n 30063, 43365, 22090, 26852, 16801, 19520, 42020, 42802, 31474,\n 24558, 17701, 25821, 33869, 11823, 962, 28963, 4426, 1694,\n 36733, 25242, 914, 34904, 12779, 6525, 19749, 29512, 25398,\n 10878, 29532, 18187, 41519, 19844, 31098, 28434, 15764, 42053,\n 23054, 37166, 2360, 23894, 11608, 27299, 43254, 23452, 41647,\n 35122, 24897, 46430, 18577, 32129, 21534, 6671, 17969, 7175,\n 19718, 5739, 21605, 45216, 6510, 46664, 31842, 31092, 27835,\n 26395, 38979, 118, 36429, 4480, 460, 21257, 16607, 6058,\n 16924, 18510, 32699, 43079, 4865, 14712, 45323, 7253, 30944,\n 43308, 6198, 14274, 37716, 28500, 4669, 16232, 4334, 46366,\n 32297, 16843, 1466, 871, 8567, 43575, 24053, 5729, 17563,\n 10365, 36075, 910, 41755, 42694, 10552, 3053, 29069, 19141,\n 7477, 29611, 23322, 22684, 8288, 32152, 35675, 706, 37792,\n 41720, 37647, 1935, 27701, 43089, 32590, 23238, 27878, 19755,\n 38647, 12194, 24122, 2147, 25235, 27739, 25451, 5679, 13207,\n 12486, 45218, 17744, 18157, 35338, 3421, 20620, 29181, 34812,\n 42638, 10784, 39508, 25890, 41276, 17728, 45705, 46445, 39698,\n 28709, 10117, 19513, 3793, 24036, 44476, 7835, 23246, 18291,\n 8936, 31734, 29639, 10303, 34402, 477, 17094, 7497, 18819,\n 20140, 23134, 11302, 14175, 40854, 13660, 29546, 10958, 43243,\n 3411, 44271, 40235, 6284, 20304, 4069, 21287, 42116, 37130,\n 32961, 44577, 13293, 41686, 38566, 15182, 5576, 1818, 9020,\n 11392, 28310, 19166, 23390, 42160, 7006, 39165, 41543, 15444,\n 43589, 5918, 9276, 35328, 24819, 13376, 20299, 15735, 36745,\n 20591, 23712, 44988, 28937, 43863, 43019, 38467, 39920, 25212,\n 40348, 13235, 11495, 36386, 33758, 33112, 9306, 45391, 33647,\n 11735, 26293, 39666, 44877, 21031, 26811, 7508, 43116, 24655,\n 21447, 18444, 17243, 34143, 39451, 15362, 22150, 45828, 6268,\n 11612, 15775, 42514, 11393, 20750, 10834, 4402, 41765, 4264,\n 23594, 17277, 29526, 44615, 16759, 10605, 45521, 26904, 29804,\n 29797, 41769, 25440, 29042, 41611, 17815, 7879, 34621, 39190,\n 16864, 31858, 12231, 14627, 39437, 24789, 43159, 21890, 36527,\n 27458, 47006, 34815, 10908, 24609, 14848, 29681, 11366, 31886,\n 9801, 14570, 15207, 35446, 39696, 6598, 41273, 29029, 44251,\n 206, 41826, 21725, 39059, 28167, 23189, 20646, 30134, 4915,\n 24781, 11531, 36698, 34313, 20781, 46414, 38691, 14915, 25736,\n 9535, 23391, 1641, 27551, 39983, 42444, 41467, 4028, 3190,\n 32320, 39075, 14012, 9107, 14839, 1268, 37875, 5413, 35934,\n 16680, 30082, 17291, 486, 38755, 31381, 19103, 36126, 2981,\n 37944, 18041, 14801, 10209, 29229, 27001, 2944, 17777, 16370,\n 22934, 11611, 15339, 44853, 6101, 4075, 34527, 33593, 25335,\n 18899, 38990, 39767, 6420, 34982, 25983, 29618, 5494, 8009,\n 13357, 19347, 39151, 44184, 15218, 39614, 29020, 3942, 5742,\n 16099, 10050, 31037, 36901, 38658, 35936, 4378, 25853, 29046,\n 45178, 17235, 5230, 2998, 27322, 32836, 4381, 11728, 38502,\n 5197, 35165, 11551, 22063, 30970, 5683, 10569, 5033, 14546,\n 11660, 13471, 44067, 15515, 6103, 15796, 20926, 9019, 45421,\n 29222, 14941, 415, 17309, 39986, 17549, 46967, 9328, 29527,\n 8630, 33228, 4394, 41188, 35066, 3350, 30706, 28545, 41648,\n 41875, 21152, 31096, 46644, 12167, 45938, 39004, 45460, 45986,\n 3101, 7724, 26201, 4882, 42699, 16082, 34856, 12564, 5969,\n 38550, 22426, 28713, 38310, 23908, 28886, 29620, 13849, 12549,\n 1182, 21241, 41945, 10498, 35793, 31028, 28165, 31695, 45905,\n 45644, 40679, 1355, 14545, 11305, 13795, 19397, 10136, 15925,\n 33975, 41484, 17880, 27082, 25953, 41091, 14512, 30066, 22213,\n 40632, 25115, 31964, 31391, 9954, 43916, 27083, 4858, 12314,\n 27976, 43593, 38989, 32435, 15651, 3564, 10737, 38443, 26563,\n 1307, 15756, 31218, 27446, 32343, 18934, 33286, 5273, 3934,\n 17001, 25520, 31271, 11024, 25312, 2646, 6999, 13083, 12709,\n 46032, 29998, 20754, 32893, 15381, 25201, 11989, 32024, 9564,\n 27253, 37521, 11896, 14388, 7767, 30056, 17429, 18611, 28372,\n 24030, 12453, 36002, 14667, 3730, 28023, 21981, 29756, 13011,\n 22676, 22358, 46010, 22919, 42509, 32225, 5503, 42955, 7348,\n 31759, 7210, 18085, 27685, 13530, 29743, 31076, 36033, 14648,\n 24350, 39574, 31985, 6015, 14672, 24113, 5956, 25606, 33232,\n 38015, 22850, 38707, 29637, 27953, 125, 34935, 20539, 1617,\n 3039, 42824, 17190, 19480, 18119, 40627, 38687, 32037, 31528,\n 25657, 46380, 12628, 666, 5123, 25536, 24097, 4238, 21927,\n 22876, 24826, 26568, 3073, 19641, 13342, 3041, 8708, 3898,\n 1963, 13823, 8236, 7026, 28142, 14154, 10171, 43213, 23145,\n 44920, 1276, 20492, 6505, 11365, 11679, 46031, 31595, 24600,\n 10806, 11947, 27558, 3782, 8247, 14521, 46384, 14951, 38781,\n 10345, 21620, 8044, 29311, 128, 17491, 38560, 13149, 40982,\n 39357, 24351, 38254, 16851, 33179, 1735, 14877, 6412, 11252,\n 1012, 14531, 44705, 28540, 2960, 34492, 44169, 19644, 44220,\n 46717, 21735, 12489, 32107, 45759, 22658, 21594, 35978, 42181,\n 22800, 13859, 36287, 29986, 2524, 41775, 25185, 19863, 23769,\n 44419, 7380, 10860, 20684, 12812, 43343, 17040, 35533, 22937,\n 3583, 5915, 18562, 40200, 15547, 44352, 10399, 13216, 6810,\n 35792, 2373, 13039, 23800, 8610, 14782, 24346, 24714, 28956,\n 36644, 15859, 39389, 15442, 358, 38866, 43656, 20679, 38186,\n 19137, 43014, 24201, 3181, 31963, 45740, 43910, 26788, 40,\n 19958, 9415, 37433, 30214, 738, 22562, 3979, 19680, 1848,\n 8875, 35169, 5943, 8272, 40812, 19601, 13764, 23937, 5315,\n 19079, 28025, 16646, 32371, 31581, 9961, 25794, 1315, 23661,\n 33972, 9484, 39010, 27712, 31387, 3204, 17089, 16469, 25112,\n 40170, 39686, 27462, 18914, 36665, 10771, 11648, 37002, 34339,\n 11139, 45086, 37473, 29268, 35898, 6094, 12052, 41568, 37506,\n 42923, 18686, 38105, 28295, 31813, 39359, 542, 41025, 15658,\n 39714, 47089, 11418, 33812, 21982, 26354, 24692, 18068, 15378,\n 754, 7235, 15855, 18222, 28877, 13381, 4712, 11474, 38233,\n 4337, 29382, 27341, 17576, 8265, 4539, 39931, 28044, 13903,\n 17796, 10085, 34475, 29588, 27338, 9301, 26138, 25179, 31793,\n 11322, 18573, 16855, 38474, 39089, 39855, 18498, 6986, 9209,\n 4714, 25809, 9971, 18966, 45588, 36456, 26871, 11977, 13871,\n 21418, 34326, 12119, 2662, 33564, 15448, 20563, 29304, 31582,\n 12910, 119, 5899, 6499, 10127, 2419, 4806, 31781, 22606,\n 22668, 30787, 31899, 4854, 26038, 13026, 4502, 37742, 17534,\n 39174, 22784, 37307, 32016, 31219, 21796, 15618, 1916, 33066,\n 9277, 48, 576, 25107, 7288, 11436, 23910, 10377, 18722,\n 41914, 37475, 2201, 34842, 8416, 6907, 31890, 28906, 11921,\n 864, 10329, 29524, 8670, 6600, 26447, 12429, 39248, 26688,\n 21664, 35939, 36216, 30742, 8679, 2272, 10693, 32658, 3988,\n 21149, 3541, 38127, 31322, 14309, 17181, 40573, 5724, 4094,\n 46512, 1891, 65, 20840, 40336, 25710, 19432, 45445, 42412,\n 14206, 30333, 30180, 44218, 43481, 16699, 28993, 10295, 6434,\n 44249, 36786, 37287, 10997, 4808, 5433, 11430, 41823, 19940,\n 20707, 32866, 18006, 1378, 37371, 41617, 1069, 3314, 32804,\n 28007, 35618, 18251, 43352, 46068, 43752, 10246, 16790, 12255,\n 18128, 850, 23852, 22590, 35365, 22467, 18047, 15156, 36129,\n 15198, 13098, 43821, 2168, 29341, 7659, 33685, 38019, 40810,\n 13834, 24403, 12276, 2304, 35053, 29634, 16512, 44328, 22169,\n 45320, 20351, 28214, 25331, 43615, 12508, 17234, 35756, 10553,\n 14587, 18405, 16678, 32450, 20365, 28612, 3353, 31266, 22315,\n 31518, 31234, 13809, 37368, 29967, 621, 33389, 32192, 29873,\n 14221, 30758, 9235, 1303, 39641, 47119, 7662, 26759, 8852,\n 45496, 43067, 5074, 44324, 20611, 2942, 8544, 43991, 40537,\n 34596, 36842, 608, 23917, 10029, 11957, 34706, 22247, 11680,\n 26980, 566, 21054, 30412, 6289, 4227, 9926, 39422, 29200,\n 4499, 29711, 19300, 19391, 42821, 17034, 46817, 2834, 26565,\n 23541, 45972, 24889, 24960, 18639, 32270, 13291, 21392, 30627,\n 19146, 46142, 14827, 5948, 44709, 19575, 31419, 19867, 4221,\n 12832, 27352, 32185, 25072, 35214, 26172, 10638, 35436, 24626,\n 26879, 39002, 28878, 21414, 33011, 40105, 21088, 39350, 10968,\n 2486, 37372, 44425, 21265, 24812, 24689, 19670, 11506, 9175,\n 35085, 44408, 37157, 17665, 34597, 13941, 31973, 35009, 38198,\n 45743, 26023, 44129, 46211, 35696, 35033, 14258, 39563, 3121,\n 40231, 45174, 38936, 21815, 12820, 40328, 33865, 26991, 31400,\n 9671, 39050, 34850, 858, 28014, 30903, 8945, 35726, 1310,\n 25091, 24975, 47087, 6112, 35545, 12497, 13585, 21206, 22231,\n 62, 32264, 8443, 37069, 45106, 20316, 45791, 29023, 42541,\n 25591, 34048, 23643, 32395, 2471, 40817, 31487, 39126, 25386,\n 32722, 28638, 3167, 26449, 20577, 12581, 12199, 28453, 45236,\n 44187, 27349, 9470, 6532, 8873, 45876, 6214, 3545, 11157,\n 3870, 3227, 22230, 15504, 42911, 13142, 11075, 535, 20777,\n 14179, 25677, 41481, 14327, 14857, 14113, 5702, 16523, 18728,\n 22414, 28244, 44597, 2203, 6099, 39780, 7961, 46157, 33422,\n 21150, 42378, 26682, 21794, 24125, 22170, 16314, 38860, 6937,\n 15040, 40171, 14480, 34648, 37271, 38822, 31991, 15289, 22988,\n 29120, 26118, 35887, 33257, 37357, 18571, 22649, 310, 15898,\n 11484, 12895, 34304, 8740, 8828, 20773, 26741, 10517, 6174,\n 3937, 4344, 2482, 13058, 9574, 40813, 5131, 10178, 1478,\n 42189, 39375, 17738, 45649, 14347, 26461, 44686, 7116, 38053,\n 21940, 39867, 44740, 7732, 28106, 30837, 31443, 24370, 24740,\n 485, 32832, 2534, 2845, 29173, 19442, 44977, 34300, 37325,\n 34204, 39749, 429, 20461, 29440, 31661, 8829, 34964, 43177,\n 11962, 6062, 16034, 1150, 42816, 22609, 41929, 45774, 2612,\n 3789, 20214, 16870, 29547, 817, 20618, 4494, 30466, 823,\n 41980, 17146, 37150, 34483, 2118, 7061, 16271, 28804, 34647,\n 480, 34753, 6866, 38806, 19149, 8555, 7581, 42205, 14602,\n 11512, 14170, 22551, 7672, 27183, 10580, 42553, 21467, 29301,\n 45179, 5978, 3771, 9816, 20824, 29780, 38998, 2439, 23477,\n 28104, 23627, 1500, 45512, 6575, 23850, 13959, 29351, 9539,\n 13598, 17106, 31143, 23921, 7522, 4699, 809, 11757, 9292,\n 20536, 17149, 29704, 518, 17420, 40526, 28164, 18831, 36956,\n 26861, 10409, 43154, 29622, 18917, 25577, 44592, 25120, 12567,\n 23971, 28659, 18637, 45246, 37653, 20059, 40741, 45284, 24595,\n 17070, 16063, 22089, 46311, 47129, 37167, 11738, 43377, 17180,\n 34448, 34542, 42801, 9476, 33456, 47022, 34777, 26946, 4530,\n 15430, 29717, 6930, 34809, 35864, 7304, 33019, 2488, 25530,\n 37314, 13133, 34747, 25359, 23274, 3751, 4887, 13087, 931,\n 9485, 40482, 17283, 38448, 29031, 20559, 34079, 2841, 20089,\n 17953, 418, 38263, 43092, 27242, 45575, 45708, 6401, 18586,\n 28842, 18381, 4212, 14524, 4810, 37480, 41646, 3360, 17795,\n 42150, 39488, 32838, 4665, 35188, 19187, 47079, 2619, 42199,\n 46041, 3880, 19311, 26748, 40098, 46905, 33890, 3006, 2467,\n 22151, 23641, 32138, 19147, 13892, 16977, 3080, 729, 18255,\n 44060, 25307, 31508, 27740, 3591, 43607, 14703, 7108, 20783,\n 10299, 42304, 25951, 31955, 35087, 3013, 42210, 23741, 4222,\n 42587, 31911, 3719, 5189, 39426, 35543, 27972, 23757, 958,\n 17083, 45872, 27742, 40876, 18049, 20740, 44833, 9558, 8761,\n 25090, 35591, 33156, 28345, 1426, 31816, 10228, 26684, 45264,\n 18895, 4500, 26369, 39615, 1120, 16556, 45529, 12428, 23074,\n 26926, 14130, 36863, 40157, 5078, 37991, 10889, 6011, 29134,\n 8492, 10111, 4439, 26176, 4234, 12433, 22354, 37868, 32294,\n 10982, 19046, 20006, 4750, 41086, 7286, 25430, 5261, 861,\n 15141, 4049, 35514, 37922, 1706, 36900, 44017, 41404, 42122,\n 27735, 25216, 24547, 10294, 2874, 2378, 6378, 36185, 35425,\n 12825, 4307, 13042, 7727, 26289, 21563, 21171, 11017, 46019,\n 24741, 3184, 35880, 15672, 29642, 9965, 44427, 18115, 43533,\n 25647, 7684, 27108, 11076, 44081, 42452, 38047, 46362, 10078,\n 19619, 16265, 8199, 12710, 35260, 29910, 34115, 25079, 35746,\n 44246, 27678, 32700, 35868, 44758, 23315, 3601, 32823, 18631,\n 46987, 37085, 39925, 5414, 7455, 6362, 17708, 22223, 32368,\n 35102, 13615, 44938, 899, 42427, 31405, 8275, 398, 33177,\n 36436, 21132, 363, 37053, 11992, 13171, 27392, 27497, 3734,\n 3487, 4246, 27008, 46102, 17750, 24981, 10658, 35910, 33416,\n 10575, 20491, 44011, 26077, 26601, 20489, 30563, 38933, 41825,\n 32678, 1838, 46402, 28206, 37389, 17532, 39083, 19478, 34348,\n 41615, 39619, 876, 34441, 15629, 24871, 33993, 31972, 8403,\n 9236, 27600, 34857, 22553, 33274, 10896, 9752, 22208, 25161,\n 10154, 33034, 44996, 20778, 37889, 17647, 42149, 44510, 12811,\n 5922, 17267, 20734, 34212, 20607, 36387, 12978, 42909, 30534,\n 6851, 10916, 8811, 32497, 23703, 5595, 11149, 37919, 23996,\n 34651, 33444, 10474, 10833, 10802, 2987, 30131, 23450, 44706,\n 37407, 41778, 10382, 33639, 9468, 843, 24195, 44587, 1143,\n 32007, 200, 27298, 35577, 41517, 5268, 25851, 26044, 42278,\n 22884, 13306, 14279, 28066, 569, 37018, 18074, 19181, 6500,\n 11387, 32023, 42607, 41079, 45046, 21279, 40236, 28855, 11193,\n 10610, 28534, 14176, 46198, 32218, 11121, 22960, 46618, 16974,\n 44645, 27469, 10940, 36088, 14984, 1914, 5307, 41574, 23481,\n 43144, 26975, 7856, 28795, 38683, 30609, 24284, 6052, 114,\n 10852, 13831, 27802, 41735, 45713, 33755, 8705, 24739, 36654,\n 10846, 46625, 26419, 17816, 39690, 15837, 12107, 35005, 14944,\n 36807, 5590, 9747, 16359, 24369, 19209, 36478, 30014, 8642,\n 20161, 35096, 4942, 23307, 23479, 7193, 46577, 42817, 32128,\n 41692, 2358, 30192, 17470, 2311, 33772, 0, 28722, 20885,\n 6140, 32321, 285, 14381, 2054, 16381, 36850, 16182, 4646,\n 38734, 27896, 44438, 28751, 21492, 10814, 39306, 32110, 45184,\n 16735, 26153, 26878, 21047, 13496, 19426, 29530, 45994, 44414,\n 583, 37385, 25715, 42246, 7356, 33915, 25155, 20992, 18660,\n 45842, 27236, 23563, 6673, 41354, 24851, 37654, 15929, 12027,\n 18102, 29032, 21818, 17909, 27927, 40147, 41205, 37574, 26750,\n 34715, 16681, 1146, 22156, 20860, 6428, 36658, 45555, 20271,\n 7251, 14133, 613, 13881, 7491, 5125, 4032, 27772, 43802,\n 17407, 39720, 25874, 18416, 40253, 11852, 29401, 40094, 28599,\n 27229, 39985, 24845, 3396, 3047, 32568, 25239, 21198, 10850,\n 14664, 45034, 1696, 3153, 44377, 31511, 45660, 40111, 8029,\n 20113, 1607, 46097, 26039, 43544, 31052, 6861, 46985, 20024,\n 14761, 32919, 24152, 7368, 1699, 16473, 42645, 42420, 10397,\n 20405, 7814, 20240, 12037, 36437, 37545, 6025, 46528, 35138,\n 40436, 23438, 11794, 18327, 14832, 12528, 40613, 23714, 27447,\n 5394, 3464, 8358, 28, 2522, 24915, 349, 38014, 9388,\n 6819, 10545, 38672, 46115, 39715, 2457, 8823, 25915, 39021,\n 33635, 9981, 38664, 37454, 2278, 36804, 20981, 25342, 30058,\n 27087, 32286, 41598, 31898, 28271, 41678, 36269, 4857, 13477,\n 9195, 17852, 9584, 2780, 21941, 44070, 7269, 28231, 22651,\n 33576, 8124, 5725, 1559, 35551, 12519, 15880, 5651, 45764,\n 24818, 17619, 40088, 24513, 11271, 19516, 33496, 5813, 12456,\n 27564, 40070, 13682, 221, 33498, 32363, 42809, 19237, 39652,\n 29984, 1962, 6633, 34083, 18059, 220, 15850, 31464, 27355,\n 38183, 33527, 41238, 45505, 35465, 3510, 24459, 8557, 42014,\n 19472, 8647, 39832, 40187, 39388, 13953, 20237, 23464, 38207,\n 30055, 36203, 16774, 1080, 25039, 41845, 6321, 41988, 25188,\n 19321, 44866, 30402, 20434, 45564, 44321, 23882, 14208, 18282,\n 21868, 6935, 27440, 21304, 37859, 45144, 43571, 36763, 37438,\n 26219, 16530, 20157, 9486, 6764, 25218, 17691, 36930, 45794,\n 43458, 8889, 45998, 46364, 20029, 41584, 25482, 5711, 20016,\n 2961, 13845, 34726, 7561, 7021, 1713, 28243, 41479, 23378,\n 29048, 40518, 25309, 34343, 3509, 28140, 28518, 3669, 10445,\n 27330, 37594, 45551, 11869, 11032, 22851, 43824, 26908, 9450,\n 28649, 30500, 30633, 45717, 38642, 6941, 1279, 22885, 20841,\n 8506, 16425, 25127, 38305, 4792, 21384, 44201, 32947, 17088,\n 36038, 28406, 34288, 30539, 26912, 18154, 12758, 47105, 10770,\n 10434, 21305, 39348, 12612, 26262, 15772, 45812, 24909, 2698,\n 9978, 32300, 43207, 39175, 6832, 43661, 11170, 33094, 3757,\n 21675, 8521, 9832, 40016, 11902, 4784, 31847, 41728, 22944,\n 7445, 494, 8688, 11324, 31446, 20415, 12646, 40263, 21951,\n 12885, 1174, 15656, 40069, 935, 42724, 23470, 7417, 10923,\n 41730, 19394, 23798, 39309, 25049, 19673, 24890, 345, 38752,\n 796, 13310, 21944, 32323, 1885, 42454, 33525, 28112, 1839,\n 33589, 21456, 8778, 11861, 1745, 30518, 1599, 20782, 121,\n 20946, 14375, 14, 7103, 22317, 43045, 34498, 19000, 46448,\n 7151, 33944, 21018, 29228, 44341, 37613, 2744, 4318, 37140,\n 32083, 28762, 45805, 38272, 32462, 14234, 13214, 11646, 39360,\n 45857, 31996, 9976, 45737, 21778, 2433, 40449, 44734, 11545,\n 15103, 37740, 10424, 9756, 13145, 6879, 35564, 17494, 37746,\n 6359, 1185, 43638, 5788, 1301, 38702, 2537, 24116, 36794,\n 23834, 1947, 22280, 42959, 8097, 10632, 40575, 6226, 14548,\n 25585, 1452, 21452, 38579, 45924, 13416, 9620, 3768, 10537,\n 30659, 4895, 33572, 17555, 25945, 2641, 29630, 36647, 22871,\n 30646, 25280, 28290, 12726, 31548, 6349, 29393, 6104, 33142,\n 46499, 40212, 8651, 1363, 37443, 37911, 17994, 46443, 41901,\n 23222, 43131, 39394, 8699, 19839, 27243, 6437, 28031, 5847,\n 37026, 17998, 38863, 44212, 6050, 24279, 33289, 412, 19804,\n 35926, 4367, 34639, 32271, 8961, 184, 18749, 46095, 38718,\n 31871, 16717, 32327, 17808, 31185, 35157, 14562, 31320, 30108,\n 6796, 24693, 26881, 2215, 28767, 31570, 26729, 2183, 43025,\n 19, 2932, 39465, 1862, 19425, 1900, 29677, 15212, 18300,\n 27262, 1861, 4191, 33423, 23512, 8578, 881, 20325, 46126,\n 16171, 24529, 8311, 26161, 33200, 41289, 37277, 28587, 24593,\n 2029, 15451, 42226, 9443, 38255, 10298, 14005, 26791, 42798,\n 25862, 2750, 40107, 45286, 41146, 33923, 26402, 6514, 32741,\n 24964, 32985, 23654, 1792, 7624, 37964, 44330, 10258, 32096,\n 16800, 13917, 20287, 30637, 720, 952, 1169, 26669, 7394,\n 36079, 4745, 29951, 9412, 39184, 28793, 37487, 33889, 3222,\n 10065, 40361, 40034, 11591, 11630, 31507, 41910, 13590, 7133,\n 1945, 16822, 37468, 19090, 9193, 8938, 35042, 19898, 5421,\n 20832, 38170, 18672, 18552, 30357, 7495, 2823, 43635, 30538,\n 47034, 23048, 29360, 35496, 37323, 9346, 7597, 31207, 42410,\n 45874, 8294, 13194, 21525, 3711, 10210, 27645, 12499, 36240,\n 15497, 4654, 29420, 3223, 18907, 18599, 844, 2082, 4765,\n 41688, 39861, 22781, 47070, 33075, 41956, 20038, 18509, 359,\n 42823, 29928, 25776, 16711, 13877, 17451, 16468, 7093, 10363,\n 32622, 19491, 12283, 42123, 5028, 25704, 20221, 21481, 32833,\n 4773, 42604, 45042, 28494, 12372, 20554, 12379, 35007, 29548,\n 4875, 3166, 17677, 46103, 23116, 2196, 30248, 14864, 18802,\n 16805, 37134, 42881, 36343, 5375, 39658, 12605, 34963, 35429,\n 12243, 16238, 25958, 43205, 46245, 4068, 38339, 15623, 41934,\n 19808, 43049, 45621, 30611, 24420, 16960, 46749, 14069, 10735,\n 16074, 11982, 28765, 2518, 10092, 38732, 45233, 46409, 46112,\n 9600, 14349, 44956, 2164, 15512, 35754, 21283, 2878, 21494,\n 30130, 36292, 45819, 44529, 10941, 14105, 16233, 1232, 10030,\n 38435, 10163, 31668, 47113, 33776, 26935, 24492, 8427, 423,\n 19032, 6311, 11842, 21884, 40377, 10636, 32830, 44082, 31206,\n 16793, 40458, 30569, 13398, 35435, 36928, 10949, 20727, 3472,\n 26107, 3422, 45247, 7828, 39279, 10458, 5968, 38422, 4341,\n 22640, 1034, 16259, 46351, 32477, 34847, 13888, 3627, 15345,\n 21428, 33166, 40026, 34225, 38138, 15615, 44822, 13252, 7955,\n 37612, 12673, 33276, 22922, 37526, 30485, 5931, 6712, 2742,\n 17019, 22541, 23587, 41123, 23751, 3436, 39987, 46275, 22425,\n 31077, 42214, 26661, 22455, 41751, 37334, 11847, 32238, 40758,\n 40091, 5529, 38108, 11714, 13977, 12208, 16212, 5546, 5242,\n 46274, 39759, 27940, 37938, 20303, 25692, 29755, 40528, 23457,\n 20747, 31834, 11357, 22284, 3811, 26711, 31678, 38867, 33579,\n 30931, 25813, 44949, 15906, 42195, 45389, 2843, 18347, 8920,\n 6802, 18982, 18491, 31778, 2180, 43290, 22444, 19868, 18644,\n 10727, 27344, 26775, 14262, 18933, 10934, 7556, 45360, 20497,\n 34641, 38931, 35079, 19055, 28710, 17715, 38038, 29653, 15717,\n 14479, 27690, 23072, 37471, 38353, 7283, 13250, 8704, 15681,\n 19986, 33379, 1753, 14909, 35133, 13147, 23239, 43332, 47018,\n 46862, 45902, 1354, 41058, 7233, 40586, 31409, 39725, 27648,\n 4519, 35610, 3338, 16788, 3549, 1855, 27684, 39703, 13792,\n 9352, 42844, 34316, 34614, 39049, 40594, 26805, 27948, 4333,\n 25500, 40657, 28707, 22336, 10585, 8539, 41149, 31865, 25965,\n 11356, 6974, 20898, 30912, 19627, 43321, 16278, 39617, 18732,\n 18687, 6413, 2233, 29706, 40623, 45630, 37477, 18645, 19162,\n 14600, 18817, 42480, 43438, 2403, 46423, 8754, 44794, 7281,\n 43510, 4921, 35126, 13915, 300, 28089, 22458, 42063, 4104,\n 43912, 10825, 26659, 19834, 21849, 19706, 8241, 7613, 27974,\n 16552, 5095, 23467, 5256, 5495, 30547, 28550, 20814, 34405,\n 18175, 36315, 4093, 27380, 1555, 9675, 19364, 6487, 23881,\n 42438, 22621, 13699, 42312, 11703, 24089, 10686, 38909, 22018,\n 34801, 33111, 31975, 32045, 6952, 40186, 30334, 13758, 28901,\n 34956, 12360, 5200, 3726, 7763, 42254, 35343, 44455, 12109,\n 22024, 41911, 24032, 31961, 21362, 19527, 2331, 36603, 17575,\n 8640, 27038, 13425, 15324, 32853, 3063, 44485, 44887, 13105,\n 46560, 39869, 46876, 6635, 2707, 23542, 30360, 46622, 26887,\n 35230, 29064, 9165, 6898, 45067, 36857, 19726, 4510, 25143,\n 41490, 45836, 31925, 11579, 19446, 18666, 15466, 30222, 35404,\n 23103, 1145, 17974, 10888, 32456, 19168, 5493, 1815, 13387,\n 35744, 23188, 11766, 12404, 42085, 6903, 25759, 35289, 36670,\n 34509, 26762, 12513, 26285, 16707, 19914, 23544, 30430, 13110,\n 23514, 29749, 10647, 21012, 22769, 5196, 35823, 18908, 14140,\n 4263, 22891, 44287, 490, 35348, 37009, 12375, 34337, 43317,\n 15469, 10911, 41168, 5832, 33912, 5773, 11747, 17150, 29629,\n 3468, 37058, 15688, 22596, 2638, 43934, 43224, 22249, 37517,\n 42590, 21261, 29765, 25169, 1252, 33210, 38785, 23123, 41714,\n 20454, 37500, 3283, 42132, 22601, 17059, 13577, 13422, 45009,\n 44745, 35810, 33067, 4467, 1100, 40992, 9914, 46889, 2924,\n 4098, 45735, 39876, 16357, 42350, 41021, 15356, 12265, 1023,\n 15601, 18572, 34823, 15558, 4676, 3799, 16658, 23106, 8253,\n 39633, 22287, 15412, 21922, 6269, 40628, 18615, 31604, 36580,\n 42929, 18654, 12427, 39384, 10404, 23666, 27906, 33058, 44404,\n 35565, 33164, 12447, 17637, 14996, 40946, 43032, 34512, 15985,\n 4374, 26539, 46444, 3001, 22597, 28188, 1740, 33799, 34236,\n 33780, 2423, 32839, 2126, 18884, 13632, 42445, 24930, 13301,\n 43039, 6144, 7105, 12374, 15549, 36683, 34309, 38232, 4663,\n 12756, 20514, 43442, 35962, 42040, 29139, 11423, 12660, 16004,\n 22711, 33542, 1089, 10799, 41570, 39776, 2309, 13700, 965,\n 31638, 12879, 25421, 42215, 35833, 14288, 32471, 27933, 19052,\n 19106, 25493, 29718, 9770, 39035, 32731, 23803, 2264, 20650,\n 26214, 12334, 37426, 29671, 38433, 26306, 31468, 19829, 43758,\n 36266, 31249, 8659, 37705, 8020, 2588, 8602, 20386, 31465,\n 12330, 2314, 13766, 21888, 24417, 46240, 1942, 37218, 32013,\n 37352, 44911, 5513, 33365, 36520, 17918, 23527, 15231, 15468,\n 45993, 2545, 35432, 1980, 25447, 30977, 10524, 26074, 35099,\n 4261, 23932, 7218, 20899, 835, 37537, 15625, 40640, 31777,\n 9253, 22690, 24727, 23783, 22091, 9781, 36446, 6976, 4019,\n 35045, 15209, 7848, 9427, 1495, 42375, 17718, 44643, 3257,\n 25681, 13059, 7333, 26945, 28936, 3058, 11190, 28972, 46810,\n 3405, 32812, 11101, 13858, 25081, 34798, 8987, 21680, 36827,\n 6659, 25466, 7498, 35556, 24138, 2669, 9960, 22604, 40470,\n 24641, 21368, 40577, 27047, 32199, 42222, 38513, 39569, 45458,\n 8879, 45659, 43797, 2652, 38157, 10570, 35431, 13723, 22768,\n 26256, 7012, 12014, 22076, 20247, 31264, 23983, 39778, 14305,\n 30779, 19816, 31187, 22728, 3192, 6234, 725, 14403, 19579,\n 12989, 39874, 4923, 20293, 1538, 14729, 26401, 40177, 37885,\n 45720, 16935, 5512, 16657, 10005, 12150, 43964, 35554, 2417,\n 45202, 21861, 11813, 33694, 44511, 9087, 39786, 32328, 7782,\n 8317, 41103, 27211, 45139, 24675, 307, 10007, 30021, 6615,\n 41790, 11228, 22932, 33515, 20285, 46373, 42385, 18568, 32064,\n 34493, 40418, 6285, 39777, 12557, 40633, 24235, 7537, 4624,\n 40358, 29487, 24577, 11133, 44966, 34146, 1118, 9892, 30149,\n 25093, 35252, 40981, 40833, 20896, 43734, 4616, 44273, 31115,\n 43151, 41140, 20493, 15005, 34457, 27160, 6092, 16565, 16319,\n 38532, 29861, 33187, 22967, 23913, 9677, 21172, 35758, 991,\n 45961, 38104, 5137, 35217, 23553, 7373, 31646, 1104, 8949,\n 4336, 28351, 34341, 33206, 46742, 8639, 39997, 19908, 41539,\n 44628, 4182, 46369, 25123, 18939, 45118, 30228, 12867, 21839,\n 44034, 38082, 10579, 21163, 18391, 21046, 19921, 29599, 2591,\n 7785, 24085, 28008, 33960, 721, 27022, 36481, 44098, 36278,\n 7277, 22530, 28592, 2165, 14762, 44573, 33126, 35916, 46055,\n 24972, 23345, 24060, 36198, 41761, 10036, 29667, 35232, 37730,\n 3777, 4658, 26033, 11107, 39428, 38563, 7083, 32470, 46398,\n 3316, 3519, 31592, 17937, 13712, 22889, 19083, 32672, 27080,\n 14500, 46953, 41786, 41380, 25347, 20879, 12198, 32499, 43398,\n 3611, 6014, 31427, 4757, 2368, 2454, 40425, 30712, 32459,\n 16742, 13846, 17043, 18426, 27191, 46874, 18957, 6084, 13643,\n 13648, 12943, 29118, 11444, 36443, 34351, 30966, 25325, 39990,\n 38546, 44261, 38349, 32424, 43005, 14728, 41218, 38236, 14612,\n 32412, 45331, 31510, 46972, 21865, 30847, 32492, 38619, 6502,\n 17465, 32296, 8667, 6293, 30018, 19202, 23624, 37011, 32421,\n 23443, 20839, 1237, 22291, 6934, 23064, 40999, 8836, 1241,\n 33150, 35135, 14768, 35208, 31307, 18875, 6470, 18990, 16183,\n 3666, 29843, 23978, 33794, 11984, 37392, 23158, 24437, 46266,\n 44248, 42136, 14953, 25497, 12008, 38126, 22540, 1809, 10946,\n 39826, 32443, 44432, 4644, 21191, 1173, 5820, 45417, 831,\n 45437, 33186, 13465, 29241, 15953, 43745, 43195, 3897, 12536,\n 21358, 44086, 40499, 18530, 26360, 20411, 1005, 29248, 7518,\n 19645, 23440, 43820, 9185, 31151, 31645, 14115, 10202, 7180,\n 8625, 34776, 15817, 3558, 32160, 3387, 27418, 34368, 29303,\n 11492, 30725, 38320, 12237, 21043, 27934, 16499, 22450, 24242,\n 6841, 35740, 37817, 6117, 19684, 3526, 25603, 7654, 36964,\n 26662, 28102, 45292, 1460, 40269, 42640, 7715, 30616, 20823,\n 28276, 6872, 15575, 3165, 37330, 12993, 9813, 9083, 42667,\n 18800, 29171, 39800, 36762, 12238, 20957, 41626, 26000, 43073,\n 175, 13879, 24317, 9771, 7412, 17112, 41650, 28705, 44227,\n 40489, 27582, 36920, 37671, 561, 25238, 34973, 24978, 35356,\n 22174, 36056, 37374, 41896, 40447, 7237, 37927, 23057, 29145,\n 7696, 34572, 3088, 27374, 15343, 46306, 40333, 36268, 13154,\n 2158, 39562, 16931, 29877, 23699, 26263, 32068, 22936, 11822,\n 36717, 8049, 9006, 38994, 32232, 4095, 40732, 19853, 41338,\n 39250, 11126, 43165, 7888, 45609, 38539, 1785, 24603, 8216,\n 45148, 2951, 28343, 12740, 46663, 3615, 15760, 42697, 42262,\n 21703, 17029, 6471, 40134, 28711, 41307, 12476, 29194, 16534,\n 38165, 27226, 45532, 47127, 11633, 30685, 19304, 5863, 13454,\n 14252, 33039, 38374, 6203, 6843, 42125, 26037, 35311, 16104,\n 17233, 1246, 6278, 25075, 40015, 26374, 39215, 43682, 37819,\n 21282, 9751, 20274, 23462, 1345, 26367, 5692, 9096, 46471,\n 22410, 11429, 30422, 33320, 34142, 8202, 45696, 33803, 10643,\n 8156, 22346, 19208, 38131, 18536, 22486, 42842, 2639, 35268,\n 18694, 38521, 34708, 21109, 31334, 14542, 25542, 5320, 38287,\n 14022, 13814, 27441, 3497, 15406, 25227, 27869, 5280, 13746,\n 34397, 17757, 29280, 43449, 6454, 14165, 19905, 35715, 35774,\n 23738, 12039, 11087, 32627, 29008, 24862, 16251, 33238, 9381,\n 5538, 46172, 319, 11653, 12286, 24630, 9845, 9846, 2879,\n 17401, 19169, 40584, 33712, 4666, 34064, 11485, 45558, 16229,\n 22776, 39038, 28666, 26561, 35147, 27657, 611, 9642, 24330,\n 977, 3788, 31979, 19927, 22137, 22172, 16023, 13952, 21027,\n 37092, 7310, 12949, 4463, 6657, 21068, 41949, 34658, 38152,\n 41353, 9155, 5677, 33676, 2105, 9587, 32574, 26572, 7608,\n 9571, 23386, 6018, 19016, 40052, 42894, 12725, 24211, 5208,\n 36165, 37780, 4511, 3982, 6813, 3643, 15058, 32817, 12284,\n 30207, 7266, 42453, 41550, 11254, 31740, 19595, 31982, 5488,\n 38346, 1715, 23891, 29733, 46492, 13464, 20319, 23509, 7786,\n 18653, 11186, 30415, 40561, 40907, 30418, 34788, 38529, 11344,\n 43680, 11623, 35943, 44209, 46442, 38952, 34560, 25939, 31830,\n 45134, 46144, 22965, 24490, 10088, 8729, 3138, 43360, 31213,\n 22418, 41470, 2614, 32154, 34178, 32923, 46961, 30808, 11189,\n 42431, 42117, 33118, 42825, 19524, 13992, 23381, 42265, 36351,\n 29614, 45484, 22948, 34176, 34748, 23528, 45068, 36304, 43785,\n 16579, 24094, 40432, 36435, 39981, 36140, 2424, 7033, 1166,\n 26612, 19967, 36537, 13294, 34169, 27723, 33836, 9734, 3062,\n 15066, 5723, 37980, 36337, 27013, 18037, 38091, 37057, 19070,\n 2230, 2427, 35080, 20473, 46655, 16668, 37652, 29498, 27509,\n 29988, 29408, 46388, 25835, 7114, 6725, 24073, 16155, 27156,\n 4643, 9616, 42975, 1659, 22253, 1416, 28776, 24443, 23273,\n 8956, 13976, 24509, 14697, 20580, 10538, 26250, 39289, 22350,\n 41789, 5060, 33054, 6163, 19776, 4179, 30571, 34003, 36523,\n 45367, 14905, 34494, 11967, 26558, 4876, 16669, 36623, 9577,\n 27665, 12818, 17348, 11581, 34269, 18847, 19175, 14868, 39827,\n 45984, 29695, 1153, 6480, 40326, 28671, 22679, 44766, 37883,\n 38230, 28773, 22127, 17529, 46420, 44124, 46916, 18535, 11083,\n 40617, 24701, 7836, 4592, 12920, 11702, 42002, 5342, 35406,\n 34145, 5193, 6145, 6346, 9026, 2712, 43345, 6033, 37701,\n 24559, 8437, 44457, 13033, 10422, 32818, 38189, 13460, 26680,\n 42525, 1400, 30065, 3108, 13262, 33102, 13127, 21968, 4474,\n 37096, 46057, 16953, 21717, 29453, 19656, 17656, 35300, 30035,\n 45744, 35265, 30031, 18231, 43673, 7080, 3926, 4719, 10558,\n 31471, 33479, 8161, 18087, 45143, 40495, 20087, 11318, 25040,\n 5522, 28717, 36571, 44440, 29501, 26905, 34297, 42415, 5867,\n 44018, 46339, 7738, 34938, 46156, 16438, 22796, 9403, 35960,\n 41891, 11509, 46449, 10150, 44657, 21947, 28067, 33636, 37021,\n 10014, 13864, 21589, 46330, 10618, 8155, 34859, 16098, 3368,\n 38628, 30291, 35052, 5016, 41674, 46127, 36962, 896, 17611,\n 45206, 41762, 22457, 12897, 6027, 1525, 1664, 24409, 43487,\n 23707, 43783, 11323, 29246, 39779, 29459, 30104, 39673, 894,\n 12945, 32230, 27196, 10506, 40078, 37099, 25996, 38897, 35,\n 8924, 13506, 20055, 2670, 19597, 37516, 18253, 36438, 23633,\n 15611, 45679, 11755, 25256, 29523, 1662, 44660, 14834, 28591,\n 29405, 24431, 43766, 42224, 24694, 11489, 29720, 10667, 23401,\n 16598, 30119, 14326, 18774, 15096]), 'A': array([54898, 25236, 64496, 13494, 67591, 12074, 41531, 47800, 25903,\n 2737, 1912, 69132, 22407, 38045, 34213, 25612, 78438, 18291,\n 55094, 3401, 24111, 19209, 19264, 50634, 74233, 78722, 64610,\n 80591, 25362, 45351, 22449, 63748, 27203, 60600, 62415, 60774,\n 11815, 22862, 13597, 1102, 49089, 44242, 58467, 24876, 68550,\n 77834, 5198, 49748, 42811, 69748, 20389, 30474, 71845, 21111,\n 27956, 49431, 43876, 71366, 62023, 14892, 41907, 32249, 62375,\n 43676, 28154, 1937, 8806, 48513, 30825, 27161, 59466, 79874,\n 44730, 6299, 15392, 11622, 39433, 64789, 4975, 63200, 45147,\n 57191, 35665, 71997, 11587, 15550, 52291, 18717, 73408, 73170,\n 74983, 53847, 67053, 45862, 44496, 40403, 54302, 37355, 42288,\n 1107, 77022, 46491, 58255, 50460, 34781, 61785, 9270, 10965,\n 33663, 5261, 32998, 59200, 31669, 34888, 17898, 4581, 64200,\n 2972, 30645, 19194, 8902, 11461, 28613, 1553, 32983, 19652,\n 41913, 46399, 20190, 18867, 3775, 60535, 40623, 2473, 19238,\n 7372, 12572, 37726, 9121, 8884, 45966, 22487, 58742, 22295,\n 71630, 49070, 62485, 2287, 42057, 54609, 61176, 36086, 40430,\n 13508, 75695, 32968, 32934, 70833, 3153, 63744, 3553, 3647,\n 15975, 12779, 18008, 73950, 63786, 36040, 46501, 31673, 37600,\n 6492, 51395, 55851, 40461, 1037, 73266, 54449, 36072, 64065,\n 55119, 69089, 56131, 15076, 81489, 35480, 9288, 36712, 79295,\n 65316, 35708, 19858, 20691, 70323, 38540, 20560, 18231, 38170,\n 56836, 1917, 69811, 53900, 52579, 37647, 66611, 63132, 17562,\n 76423, 46594, 73444, 71742, 54392, 41239, 9576, 80054, 59367,\n 46484, 29594, 77813, 56166, 36604, 44306, 64029, 38605, 39241,\n 51736, 13880, 42797, 75400, 69398, 61541, 36274, 55476, 14153,\n 11298, 19395, 51934, 51735, 22794, 14216, 52093, 72920, 64025,\n 73234, 23049, 82372, 34397, 12876, 31748, 61063, 60986, 49712,\n 75796, 7582, 21050, 3128, 28441, 64335, 45065, 19364, 6094,\n 40975, 35311, 10759, 70553, 80479, 81553, 14616, 53234, 39583,\n 34666, 45057, 12893, 22236, 54429, 58870, 63520, 69768, 62421,\n 32486, 22637, 8577, 82617, 36867, 46249, 22158, 56905, 31025,\n 73270, 47428, 69812, 62025, 77640, 6770, 49995, 22751, 58043,\n 45609, 29194, 18798, 60083, 20293, 40910, 76861, 34948, 13574,\n 5164, 11402, 11971, 47709, 20918, 16655, 29150, 11087, 31337,\n 59353, 50411, 3561, 5357, 33560, 36325, 30777, 17757, 51597,\n 18377, 36980, 70033, 63921, 66152, 61149, 80752, 10563, 48880,\n 9939, 8143, 68103, 2658, 56714, 36945, 8448, 21720, 47644,\n 63893, 27742, 70900, 41575, 81255, 61486, 35212, 80806, 78469,\n 8857, 9205, 53012, 12525, 35249, 67142, 8688, 53140, 40705,\n 39524, 23718, 71665, 78231, 45268, 58943, 55711, 65430, 61352,\n 36237, 34592, 3649, 41739, 18207, 32799, 45528, 12269, 75681,\n 49042, 81359, 79228, 17425, 25193, 65400, 29080, 13698, 80852,\n 34327, 23914, 39294, 27490, 71381, 62182, 33921, 56556, 74716,\n 13074, 12578, 62688, 52822, 79140, 38490, 75088, 22235, 20127,\n 264, 59313, 55775, 78782, 27543, 21293, 17378, 10397, 54330,\n 36959, 31275, 15574, 19892, 38521, 41792, 34711, 48439, 10129,\n 52323, 63826, 6108, 36159, 42319, 36476, 62999, 11746, 6742,\n 44050, 39153, 9554, 72434, 37117, 57633, 33773, 65363, 58089,\n 75193, 25750, 59655, 32046, 67168, 60805, 12725, 38463, 54322,\n 22085, 46837, 46872, 67126, 26644, 6653, 69759, 73792, 57243,\n 75243, 48775, 43798, 5129, 38076, 60484, 12297, 11463, 29817,\n 36156, 28586, 16342, 24134, 45711, 53332, 6239, 6777, 25966,\n 65723, 72422, 37755, 77705, 13779, 24826, 34764, 65046, 75310,\n 61424, 39401, 13975, 36037, 68870, 1492, 5328, 58801, 11284,\n 40527, 77609, 3890, 38009, 16403, 39042, 77494, 10150, 54221,\n 75279, 74049, 2444, 65719, 25217, 38734, 390, 30775, 73982,\n 61629, 55961, 9386, 11734, 3160, 28355, 48131, 81079, 16325,\n 424, 43499, 11526, 11267, 19546, 73769, 29356, 15898, 61435,\n 65057, 30618, 60142, 35996, 1396, 63328, 5972, 79574, 32279,\n 55412, 43980, 63783, 820, 45755, 25699, 66698, 33273, 29644,\n 11413, 78423, 45331, 74690, 4931, 56741, 57895, 20766, 64710,\n 64641, 64477, 48134, 7877, 25978, 41891, 74630, 6714, 5616,\n 55352, 49921, 66036, 35579, 29913, 66755, 76500, 14338, 32466,\n 45062, 7556, 72449, 74361, 51481, 72000, 31231, 35707, 57420,\n 64824, 74974, 22395, 45501, 24598, 48637, 19835, 20800, 3607,\n 63849, 21872, 55339, 57070, 21852, 28688, 14202, 57280, 34987,\n 20790, 18744, 79048, 48725, 20165, 21687, 77144, 82193, 79136,\n 45506, 72196, 60322, 54492, 51219, 63522, 4708, 7708, 63255,\n 21015, 1647, 45957, 27957, 66435, 59857, 46622, 29704, 15465,\n 45885, 80544, 27434, 18044, 37880, 31902, 19939, 21350, 72471,\n 54013, 77928, 48536, 63253, 67046, 69763, 59757, 57206, 38957,\n 61411, 16989, 54756, 22851, 58002, 12437, 42092, 40668, 12576,\n 71710, 61215, 97, 77047, 22115, 15283, 78871, 20723, 79698,\n 81668, 51847, 78226, 79344, 80713, 53621, 79995, 106, 64145,\n 18384, 53315, 16182, 81888, 33349, 9923, 67075, 34618, 2524,\n 252, 34751, 24840, 78467, 22978, 28063, 5158, 4408, 40481,\n 50400, 23115, 78139, 52213, 15018, 73998, 19387, 8548, 68014,\n 21656, 13271, 41487, 31016, 169, 23596, 32942, 76329, 80796,\n 45195, 77901, 39271, 23649, 41966, 61171, 15556, 48759, 41247,\n 54410, 8765, 24207, 981, 10897, 10564, 65306, 38803, 21177,\n 19799, 12840, 5898, 73167, 73328, 663, 72375, 68376, 44401,\n 33995, 53801, 70040, 55299, 144, 10958, 51079, 12567, 79970,\n 1858, 30806, 17864, 14898, 36547, 61065, 20708, 69667, 17127,\n 60213, 20839, 79311, 78683, 62832, 14539, 57579, 34603, 18183,\n 77819, 32591, 6140, 81064, 34559, 71954, 24724, 75159, 32091,\n 36218, 28892, 3762, 35703, 3108, 26056, 46615, 39707, 25598,\n 80589, 60794, 72046, 43661, 70834, 44871, 8924, 74224, 34524,\n 31664, 63299, 74423, 12532, 5406, 4960, 70179, 54802, 56887,\n 30208, 57029, 7503, 5168, 53079, 54964, 69846, 41851, 32133,\n 62128, 78218, 80509, 74743, 52399, 64788, 80170, 64843, 68255,\n 65162, 21959, 22147, 14102, 60607, 48383, 9048, 3033, 79665,\n 47481, 34477, 57142, 1741, 63052, 42034, 44291, 61698, 43006,\n 31708, 24980, 52424, 62758, 56554, 26810, 6272, 61117, 79125,\n 12851, 51702, 49049, 9070, 27157, 12003, 10047, 63683, 36686,\n 68343, 57938, 65017, 19891, 19839, 762, 62631, 30360, 80161,\n 24805, 35197, 61762, 41834, 65934, 44265, 77616, 31562, 69057,\n 56441, 58859, 16764, 71177, 34465, 58832, 67347, 50392, 49136,\n 34090, 7849, 56371, 30327, 31170, 57295, 64621, 427, 38577,\n 51601, 63524, 28064, 18655, 25945, 3698, 20739, 26081, 10309,\n 78750, 10062, 15584, 5202, 35002, 33106, 5452, 68169, 63397,\n 40612, 67435, 29864, 11539, 3349, 52352, 34185, 31816, 49599,\n 75824, 16147, 38613, 17963, 9825, 78565, 49718, 49989, 22683,\n 40708, 19756, 5284, 57517, 52038, 16128, 36030, 60708, 13537,\n 75119, 65835, 42154, 13758, 69737, 68504, 30454, 54115, 61655,\n 56420, 16121, 53885, 55601, 38876, 30840, 22468, 63625, 58992,\n 35277, 51789, 61197, 58763, 12378, 11420, 81194, 46536, 36090,\n 8300, 42848, 49940, 58938, 59099, 47274, 42406, 63739, 47582,\n 45229, 42070, 61235, 19643, 43699, 21709, 36400, 50843, 71662,\n 19909, 67921, 12057, 51245, 15214, 5046, 80989, 15742, 54890,\n 64466, 70497, 76070, 36647, 28639, 75267, 54510, 30924, 72525,\n 40161, 72147, 81075, 41502, 28095, 40256, 77177, 10233, 25962,\n 23021, 51474, 5956, 69981, 77647, 7798, 21338, 5468, 10295,\n 60056, 67303, 75649, 41674, 26369, 21395, 57559, 76860, 30979,\n 29653, 2697, 31595, 46346, 2767, 78055, 35677, 16926, 16846,\n 45225, 62635, 442, 78520, 42226, 70608, 16086, 55507, 27469,\n 31538, 7928, 39236, 47415, 77783, 67818, 12326, 46472, 23307,\n 8401, 22899, 53944, 42837, 5752, 46294, 52731, 25987, 9653,\n 3741, 47889, 10943, 17258, 80292, 38910, 16240, 56256, 76373,\n 45640, 31866, 39560, 66040, 12432, 10550, 35466, 81845, 725,\n 4040, 79763, 20086, 23485, 9745, 21762, 63297, 62208, 46310,\n 77495, 57901, 50991, 34353, 69318, 44354, 27528, 76816, 83016,\n 54341, 48897, 42268, 51627, 65847, 62937, 24311, 13224, 82417,\n 66542, 56367, 57261, 1004, 250, 78984, 76850, 65520, 55019,\n 81776, 1415, 37179, 35361, 54830, 30232, 54258, 24897, 33241,\n 57408, 13021, 61423, 67381, 15429, 31637, 9429, 13849, 71336,\n 30157, 72072, 59507, 51408, 38254, 12476, 46400, 25294, 12069,\n 48871, 675, 75934, 82743, 28325, 15383, 52130, 19456, 69084,\n 44427, 58425, 73991, 79007, 58135, 41694, 58345, 56645, 56734,\n 21484, 42306, 25504, 57497, 72327, 66314, 77962, 61815, 23176,\n 22032, 56849, 48506, 36634, 36283, 60540, 76389, 27299, 45439,\n 74166, 59107, 59832, 51608, 34418, 27917, 10863, 55809, 7989,\n 29869, 24004, 22133, 24733, 16446, 60574, 13382, 59167, 12128,\n 44128, 73870, 63340, 53440, 29092, 51475, 79224, 18079, 26087,\n 81433, 70462, 48616, 30174, 46403, 80191, 82345, 40039, 11514,\n 38361, 5453, 17477, 31626, 18139, 38499, 48374, 24010, 72050,\n 32026, 78462, 6151, 17867, 23895, 20417, 29481, 47752, 62817,\n 5950, 14190, 52214, 57013, 80016, 20907, 16834, 41983, 8211,\n 45603, 71029, 38019, 8989, 12511, 35724, 24855, 67107, 59258,\n 41964, 71432, 33996, 57830, 38209, 4135, 65492, 41163, 656,\n 10986, 49698, 6236, 39201, 32190, 35065, 63383, 35473, 56781,\n 6166, 76548, 9394, 79278, 54304, 68587, 14403, 13460, 52404,\n 78074, 67955, 76754, 20446, 67324, 43872, 7252, 42224, 32063,\n 69178, 19801, 65728, 19188, 16420, 44558, 51650, 34415, 59977,\n 25451, 13889, 24713, 30868, 2819, 63804, 45782, 69703, 17895,\n 38373, 32892, 41900, 34291, 10757, 79087, 60547, 60369, 62112,\n 11899, 25283, 26823, 27444, 64103, 35555, 77527, 5483, 3938,\n 71586, 76398, 26689, 76901, 78916, 63569, 82389, 9293, 43665,\n 42174, 81178, 32472, 51455, 4473, 55371, 51977, 48024, 60644,\n 48092, 63754, 65997, 17573, 66690, 6972, 64282, 23023, 34406,\n 13178, 17102, 50191, 20943, 49550, 14262, 54580, 21587, 13226,\n 39394, 36515, 68100, 3643, 66684, 411, 19647, 75898, 23323,\n 13384, 11010, 26504, 78544, 44251, 24544, 6927, 45010, 80240,\n 2955, 28761, 5219, 72310, 52300, 8111, 49028, 53911, 51334,\n 12823, 71193, 19075, 32462, 8954, 13442, 26532, 40131, 51593,\n 43849, 46453, 7269, 73640, 52953, 62396, 7523, 77342, 34605,\n 53255, 49043, 59080, 15042, 21424, 21112, 35501, 58839, 13751,\n 2519, 19713, 45303, 27364, 4344, 70041, 80710, 46946, 21273,\n 47975, 35102, 65396, 39620, 61260, 24156, 161, 41592, 39577,\n 48322, 76341, 64533, 72228, 80630, 20919, 16641, 37601, 43690,\n 15778, 82337, 17403, 9662, 8525, 28336, 50643, 75218, 42654,\n 25335, 78649, 22972, 37864, 7953, 37497, 41888, 79485, 25482,\n 51168, 75462, 45661, 59588, 24736, 66485, 73593, 58223, 34385,\n 26758, 82767, 50807, 26453, 28888, 55023, 11673, 37655, 33121,\n 27706, 58597, 57342, 19210, 37452, 28150, 78905, 27757, 27090,\n 74249, 10393, 63737, 20317, 36380, 52737, 61148, 1561, 8518,\n 48028, 18921, 61304, 30204, 16313, 26014, 9920, 74837, 20187,\n 61174, 80157, 50635, 57577, 27930, 30273, 58333, 10101, 2970,\n 23681, 62209, 69927, 69496, 72633, 32115, 55790, 39022, 19963,\n 33022, 54535, 14273, 59637, 4465, 77892, 2311, 28193, 47307,\n 17782, 71215, 5023, 24, 74907, 3339, 8488, 74289, 45960,\n 76372, 52143, 52594, 53654, 71465, 43840, 10677, 69131, 56580,\n 57799, 4215, 4280, 10428, 57169, 53131, 59785, 31583, 23980,\n 48188, 33634, 2794, 2908, 79696, 72650, 22418, 57369, 37823,\n 81495, 38944, 18892, 25964, 48409, 63343, 57410, 79098, 12857,\n 58408, 42889, 68451, 60190, 34937, 72972, 82938, 47239, 44797,\n 38780, 14141, 842, 33566, 70360, 44653, 70632, 75980, 56654,\n 19778, 34991, 24967, 49093, 25969, 22000, 72744, 75181, 40625,\n 80973, 43280, 8245, 77189, 66229, 75647, 25994, 34631, 29885,\n 878, 78363, 67734, 52447, 65319, 30237, 5844, 34052, 12427,\n 17732, 57116, 33833, 37359, 46350, 57445, 27459, 37247, 31533,\n 73172, 30994, 25085, 71985, 44602, 68993, 46195, 81393, 74385,\n 50828, 68362, 80045, 36069, 63550, 63808, 3918, 37878, 65476,\n 46275, 31654, 16372, 17935, 44023, 75198, 64524, 9725, 4484,\n 13675, 46309, 52513, 48228, 487, 48479, 78995, 38471, 22944,\n 22018, 24739, 16920, 16435, 58098, 66997, 29053, 9008, 44183,\n 19244, 60882, 82041, 29749, 43100, 28398, 75127, 77243, 38538,\n 3093, 23025, 31206, 52350, 73974, 82365, 50174, 700, 53122,\n 54426, 70888, 1921, 60266, 41049, 8326, 24163, 10885, 66300,\n 67384, 15384, 79350, 80954, 10044, 20007, 36269, 32835, 74682,\n 61082, 52238, 27943, 33941, 22378, 66749, 36477, 68118, 56036,\n 42111, 29056, 9236, 78406, 74807, 31847, 42562, 30808, 21090,\n 45880, 53608, 58278, 6207, 35985, 7906, 6183, 49342, 55059,\n 64646, 60401, 77459, 18616, 14448, 13937, 61456, 8812, 26329,\n 74379, 28921, 281, 37580, 55396, 20008, 53868, 78108, 77738,\n 53698, 28678, 61595, 76520, 75008, 16430, 50385, 80955, 16391,\n 42431, 78771, 82068, 3710, 40742, 23328, 23100, 49482, 4104,\n 22070, 33401, 2741, 47333, 23371, 23754, 72936, 30061, 63330,\n 68478, 21749, 47023, 58274, 18269, 50171, 36025, 61570, 68535,\n 64528, 2721, 22612, 12086, 70568, 59197, 77693, 62739, 68336,\n 76442, 3489, 10993, 77383, 57372, 68857, 48256, 55718, 6880,\n 17535, 36215, 23875, 19345, 755, 30470, 50663, 78099, 19196,\n 3679, 58187, 197, 59693, 27792, 53238, 67020, 41936, 58725,\n 12395, 48757, 64697, 49659, 16923, 41114, 34379, 30054, 34135,\n 26012, 46947, 53170, 66299, 15339, 22049, 82338, 71530, 35436,\n 49444, 59437, 74614, 67556, 33660, 28080, 50257, 51494, 94,\n 57172, 40866, 76044, 41995, 41646, 12038, 72464, 51343, 28273,\n 40873, 26033, 16340, 67305, 5647, 44231, 21931, 10041, 76081,\n 30522, 62005, 68526, 65225, 5833, 74839, 24021, 66700, 10509,\n 49281, 11978, 53914, 39178, 9203, 49458, 72059, 48321, 10002,\n 51544, 11504, 6575, 65385, 49833, 81048, 50386, 43562, 32616,\n 71898, 80093, 14333, 25230, 18579, 2792, 43130, 73875, 78511,\n 3837, 52682, 26525, 56388, 75414, 54642, 5249, 27952, 5207,\n 27505, 65172, 66835, 26074, 3772, 35514, 59999, 76789, 73498,\n 51309, 72927, 15897, 44794, 50536, 29330, 69539, 78001, 61749,\n 35935, 79514, 37700, 58872, 142, 53306, 9752, 8650, 38466,\n 23894, 73173, 2043, 45787, 67517, 75062, 28151, 62070, 65865,\n 34211, 25885, 1603, 3345, 70163, 52658, 54379, 41884, 28367,\n 58065, 19025, 54361, 20530, 79795, 21889, 80283, 64492, 24250,\n 25849, 75925, 69510, 33508, 41787, 34305, 25834, 57574, 38805,\n 33039, 36559, 72496, 38197, 72088, 36044, 35083, 60032, 18877,\n 801, 71343, 55953, 42271, 31921, 46959, 17587, 45655, 18368,\n 39875, 51846, 19953, 24533, 47435, 12552, 64599, 74454, 13593,\n 9532, 61380, 1448, 48521, 2116, 14130, 75270, 55237, 77482,\n 42077, 13746, 10213, 41360, 72210, 69017, 21777, 32701, 40983,\n 82143, 47022, 7655, 69042, 54870, 33145, 77055, 44338, 21578,\n 454, 81583, 70963, 54027, 43595, 30720, 53490, 36441, 27800,\n 2224, 68226, 70088, 81792, 67587, 42379, 66150, 48799, 38175,\n 40269, 59741, 60855, 21754, 42161, 33789, 45036, 63433, 54468,\n 52489, 37382, 11106, 26342, 78959, 20836, 5720, 32682, 44208,\n 52458, 21291, 10561, 40010, 65290, 23082, 51264, 53213, 19211,\n 27638, 22511, 71653, 45256, 15507, 67823, 47727, 52197, 74790,\n 5142, 8471, 9970, 10688, 16750, 64191, 26238, 75213, 66800,\n 27135, 38534, 6039, 34128, 27355, 37494, 57585, 23595, 56071,\n 37645, 12954, 43869, 50595, 28354, 25566, 77128, 77173, 71202,\n 69753, 16943, 26013, 20433, 18846, 4840, 64295, 65862, 32186,\n 50805, 5607, 57674, 29853, 9141, 27111, 73965, 60524, 51470,\n 56, 54116, 58671, 11758, 76421, 70730, 28439, 20562, 68599,\n 78915, 76232, 4880, 38364, 66225, 26920, 59645, 34802, 46451,\n 72100, 26672, 72552, 5909, 23451, 51316, 33309, 73386, 62103,\n 52582, 52920, 4361, 47150, 59176, 37928, 78089, 20193, 6654,\n 10450, 26281, 37910, 56419, 37796, 14669, 29827, 74476, 13066,\n 65471, 3739, 72532, 49880, 40940, 23326, 7002, 27461, 39664,\n 15456, 8772, 37976, 43739, 19806, 63952, 31094, 17543, 43753,\n 62525, 54078, 57619, 46772, 25052, 8705, 11531, 23089, 78437,\n 2402, 43696, 73997, 41692, 66173, 10176, 62889, 14252, 68887,\n 19334, 28895, 23921, 51039, 7773, 6735, 4452, 10011, 57046,\n 39103, 75913, 56174, 1669, 35507, 77641, 24619, 79859, 72710,\n 6230, 38781, 3554, 7813, 33749, 58807, 12243, 25582, 18567,\n 41392, 46834, 33251, 45230, 66697, 2388, 22996, 45694, 79570,\n 69238, 80524, 59000, 63030, 45544, 59818, 18050, 63781, 16503,\n 53690, 51714, 58861, 81885, 71899, 9470, 49171, 16498, 3635,\n 72556, 15501, 4593, 30904, 75639, 37469, 15010, 33021, 47869,\n 19910, 10500, 27472, 61668, 22400, 64061, 712, 51391, 66522,\n 19368, 49600, 7537, 70319, 51243, 11548, 51019, 64950, 62831,\n 76493, 70214, 26457, 21206, 73685, 19362, 15051, 74814, 54030,\n 43152, 69654, 73238, 13288, 28356, 593, 24013, 31087, 69248,\n 36335, 43262, 11468, 7149, 72479, 46117, 40546, 80707, 9751,\n 79929, 64669, 48290, 48786, 50060, 72174, 69334, 12398, 52215,\n 53581, 54888, 63912, 35822, 34326, 79881, 27033, 1657, 59286,\n 30346, 27557, 48845, 3456, 9622, 51886, 49765, 72319, 4231,\n 50765, 15620, 65118, 233, 53895, 7465, 6853, 27121, 60901,\n 2714, 10088, 65289, 41988, 36739, 64538, 72671, 15641, 76071,\n 62379, 41635, 9373, 36051, 79911, 80276, 26192, 72625, 46796,\n 35920, 34386, 29652, 46126, 21365, 16958, 76728, 30032, 37807,\n 58698, 53134, 33342, 12035, 62866, 24051, 24546, 35209, 8139,\n 63495, 18923, 34422, 61086, 54374, 75050, 73023, 76920, 77297,\n 7189, 2539, 18523, 79766, 24101, 24796, 7469, 53470, 70631,\n 12632, 16358, 28864, 22078, 74720, 814, 77592, 74664, 5069,\n 22318, 63844, 75748, 29126, 16141, 27794, 48469, 33015, 63727,\n 53217, 49743, 46828, 81328, 14806, 33067, 58817, 68958, 17421,\n 148, 4119, 76856, 8253, 22091, 42807, 30379, 33940, 64639,\n 43922, 4577, 26337, 52980, 30705, 3074, 34179, 14014, 67018,\n 27375, 58412, 51979, 58304, 6532, 8918, 79599, 44034, 70427,\n 42002, 44095, 77209, 81215, 43702, 36412, 42986, 41883, 66517,\n 34789, 60391, 46787, 63096, 24356, 43766, 40190, 63963, 46002,\n 81414, 9159, 55197, 19669, 5868, 65994, 71056, 16660, 10828,\n 39564, 7622, 26863, 44863, 36697, 29026, 64957, 10375, 82323,\n 76206, 33119, 34567, 72102, 33001, 15272, 61033, 62971, 35353,\n 31955, 6031, 29462, 44728, 31171, 18614, 48332, 62815, 44963,\n 27105, 38400, 54031, 41997, 82614, 65173, 62498, 54005, 53448,\n 35924, 16665, 33409, 34152, 55902, 32790, 81983, 67348, 38612,\n 13124, 23520, 58961, 75924, 44940, 29580, 15573, 55548, 65592,\n 64355, 17697, 33408, 71072, 78493, 54296, 56758, 71805, 610,\n 13565, 66239, 19521, 63425, 31767, 72553, 65124, 26841, 30372,\n 4083, 80450, 10244, 43959, 69195, 70162, 56510, 55669, 22461,\n 49082, 62302, 40844, 40914, 36164, 29020, 62956, 48833, 20958,\n 68329, 56444, 11453, 65349, 78156, 55083, 25817, 49294, 58578,\n 43639, 11396, 30184, 56748, 73931, 244, 60302, 72582, 34245,\n 48570, 1777, 29090, 39846, 33977, 17430, 55291, 35004, 30845,\n 42295, 36082, 70352, 27064, 63899, 20659, 79505, 3658, 26579,\n 2474, 49944, 12059, 69405, 73463, 50461, 15387, 29999, 52779,\n 65188, 71672, 81018, 68067, 57573, 42193, 81874, 10313, 40451,\n 32247, 34233, 8444, 81262, 22529, 81985, 37938, 41841, 4129,\n 33353, 68704, 56731, 36314, 55469, 54804, 33226, 10444, 14586,\n 36509, 52247, 30308, 81692, 8678, 71135, 8567, 76650, 29239,\n 37311, 64848, 82459, 72211, 57503, 31958, 17901, 20710, 9811,\n 13108, 64658, 72624, 68545, 53373, 15141, 66111, 66307, 61236,\n 67395, 13735, 9906, 16600, 37420, 46567, 18086, 69161, 50267,\n 18558, 30319, 18062, 35236, 48379, 23731, 25318, 9424, 20550,\n 30298, 36091, 4805, 59042, 77013, 3619, 17816, 2872, 18182,\n 47796, 67693, 22740, 24702, 81809, 41272, 58062, 11920, 54437,\n 41333, 35524, 82457, 22425, 6452, 20989, 53472, 20878, 60016,\n 62893, 21447, 54893, 8952, 77032, 1941, 29696, 50052, 19295,\n 36927, 56151, 10927, 76006, 4541, 25810, 58773, 76934, 5846,\n 16218, 68708, 78075, 56696, 18896, 49424, 34120, 68066, 28885,\n 34998, 69219, 968, 15110, 3419, 3187, 6209, 65399, 59946,\n 1920, 16898, 60342, 52810, 23570, 16916, 74082, 35886, 9893,\n 74073, 13132, 33593, 28407, 81011, 65191, 2478, 38136, 58576,\n 12015, 75201, 40345, 16451, 67547, 56028, 72148, 37731, 51469,\n 50154, 81633, 8614, 50077, 63590, 57883, 44712, 25659, 65696,\n 24174, 23786, 31691, 42276, 3592, 24749, 29178, 39881, 66421,\n 46570, 14584, 69644, 57195, 52273, 18176, 80602, 17296, 58930,\n 74457, 55388, 33176, 21400, 63763, 36876, 53711, 81471, 44396,\n 12996, 60627, 11103, 79242, 15689, 41754, 50078, 22767, 38742,\n 36860, 57650, 25841, 38307, 47893, 15482, 22923, 38121, 8363,\n 1928, 62801, 28075, 48875, 21381, 21840, 48429, 51144, 38006,\n 14522, 48313, 67440, 57262, 36727, 39587, 12477, 29340, 9485,\n 58558, 57530, 62074, 2238, 400, 53497, 54853, 61183, 2132,\n 19036, 80738, 2531, 66232, 80773, 2350, 12202, 32884, 64402,\n 49351, 62330, 55402, 7235, 15342, 65950, 75725, 50232, 41656,\n 79714, 59439, 65213, 27908, 33881, 31880, 74482, 51134, 53061,\n 23638, 31038, 36141, 58017, 43425, 19530, 54111, 23662, 52673,\n 20310, 50339, 26324, 29343, 62475, 15131, 81854, 38353, 49373,\n 23775, 1997, 81719, 65260, 64729, 42140, 32888, 61646, 21187,\n 37449, 72430, 43129, 5131, 80196, 22202, 11777, 34006, 33168,\n 67735, 36555, 46436, 42696, 66741, 11312, 73958, 21205, 55033,\n 67433, 13670, 23571, 59169, 55795, 51155, 3258, 69919, 47627,\n 62409, 41060, 74725, 20572, 3152, 37979, 46530, 24440, 16949,\n 80860, 72278, 12961, 26865, 34783, 62204, 51537, 60842, 25617,\n 78037, 3704, 14395, 3065, 69785, 35499, 45849, 29281, 29077,\n 12688, 65533, 59027, 46325, 79533, 26420, 16527, 72895, 210,\n 8858, 33530, 19679, 67677, 73210, 70677, 15886, 49562, 47476,\n 9937, 61126, 45207, 35077, 6759, 1072, 13330, 62226, 43057,\n 9641, 47363, 39197, 31232, 22031, 32284, 18613, 11066, 56810,\n 63591, 40639, 65951, 1288, 54697, 65515, 57991, 50928, 13454,\n 41982, 49311, 27533, 77726, 19220, 62207, 11715, 38873, 38829,\n 17362, 71718, 15650, 33986, 29722, 48211, 68458, 52752, 11966,\n 55216, 30471, 26747, 78785, 72711, 39517, 41927, 74660, 41401,\n 16543, 78509, 7820, 38173, 74973, 28632, 2051, 53862, 58057,\n 61463, 69411, 19140, 79631, 29280, 78933, 26108, 60452, 69124,\n 75477, 9534, 5372, 55070, 52465, 22732, 28810, 7890, 36496,\n 3565, 7859, 63064, 36454, 71566, 54565, 6300, 51527, 25274,\n 30060, 1445, 35207, 63061, 4600, 24575, 17986, 66631, 71474,\n 71803, 69211, 20024, 22801, 62143, 7666, 64535, 2913, 29625,\n 30226, 54598, 12517, 13575, 61398, 23141, 67001, 74954, 35527,\n 49717, 66, 6706, 32805, 33505, 37099, 73233, 823, 15794,\n 12731, 17230, 51526, 79238, 45235, 42354, 50607, 67238, 62150,\n 48553, 17231, 80953, 55825, 49829, 69758, 10692, 65334, 60152,\n 3610, 21920, 12814, 34619, 36957, 39892, 55830, 30159, 4582,\n 57905, 82346, 2639, 4859, 76162, 59530, 46319, 75499, 64128,\n 48052, 19271, 26923, 65893, 76933, 23403, 1653, 55288, 52537,\n 47114, 19392, 28823, 52311, 34555, 26173, 54676, 28916, 59189,\n 18102, 15220, 46074, 75487, 40109, 38810, 270, 13505, 35150,\n 42445, 32936, 78190, 74190, 11827, 30496, 75278, 17774, 3443,\n 21525, 48848, 18456, 41454, 38043, 81737, 30863, 82479, 43969,\n 65787, 61061, 61776, 81772, 54626, 70395, 69215, 44052, 70980,\n 50218, 69207, 3039, 82565, 4883, 54874, 80248, 28809, 47142,\n 57233, 62174, 6048, 63634, 36212, 34733, 65140, 16117, 6857,\n 49927, 23213, 82251, 61873, 999, 81953, 50226, 81900, 35362,\n 51851, 75646, 66763, 58513, 63262, 25350, 27698, 39175, 33002,\n 4574, 57950, 17356, 32071, 76129, 48137, 81980, 77285, 62989,\n 52444, 27919, 75655, 7580, 33107, 70159, 64104, 16964, 59749,\n 33473, 8819, 10040, 43913, 81316, 58095, 21029, 78079, 3323,\n 41406, 61468, 37552, 34299, 55206, 23917, 11876, 78389, 10476,\n 19267, 49012, 14161, 64735, 21008, 14607, 66318, 64567, 47324,\n 59388, 78314, 23476, 58950, 4889, 65888, 19190, 5683, 3060,\n 52998, 8298, 70314, 3521, 37992, 7113, 39314, 51952, 65897,\n 71863, 43862, 75568, 43434, 30081, 79582, 47797, 18842, 82020,\n 80371, 51850, 77253, 32931, 2392, 23931, 68593, 33961, 53539,\n 41791, 74855, 25018, 52533, 44, 59469, 40605, 13961, 57019,\n 16467, 69260, 8984, 24918, 68106, 77390, 43942, 77070, 78501,\n 58022, 79013, 31543, 76339, 56413, 36388, 81165, 75153, 74642,\n 15909, 82352, 41549, 55290, 82785, 52368, 26449, 64593, 74826,\n 56424, 74297, 29820, 14128, 45569, 44649, 3232, 73820, 9981,\n 78388, 75663, 34943, 59045, 61105, 29407, 8849, 35560, 4322,\n 26526, 42108, 70239, 28768, 13808, 16624, 60168, 44648, 22993,\n 24073, 72011, 49379, 44416, 47590, 57320, 78695, 48996, 31588,\n 69645, 77341, 77355, 51417, 53821, 21657, 46034, 17789, 40307,\n 10776, 31786, 57543, 81280, 19984, 33830, 22757, 82884, 77250,\n 7379, 24467, 26725, 46688, 12825, 82140, 68589, 45823, 30926,\n 74201, 69414, 7173, 48459, 58962, 8378, 19221, 39436, 17758,\n 6789, 20358, 76588, 13306, 22173, 80673, 15307, 51553, 41721,\n 26200, 29571, 28852, 38802, 23296, 30341, 22196, 10298, 68821,\n 6732, 21910, 32288, 18630, 37440, 51490, 19032, 25248, 58362,\n 6996, 61744, 165, 26745, 59566, 71231, 73049, 56671, 36618,\n 17754, 72536, 48160, 17191, 74276, 54548, 7547, 15993, 54769,\n 17241, 13191, 22839, 36254, 77577, 15694, 29231, 87, 71823,\n 14348, 79079, 42491, 76250, 6030, 14210, 67072, 33483, 31742,\n 64450, 64505, 24026, 38267, 56705, 15192, 49626, 66999, 66753,\n 24449, 31672, 57370, 63806, 11073, 4288, 4927, 14873, 69137,\n 28152, 66791, 11819, 74282, 75296, 81415, 69186, 11434, 14269,\n 39319, 61654, 10527, 67897, 34879, 6707, 39128, 15033, 67845,\n 7587, 29322, 78482, 76796, 37097, 34558, 72164, 61328, 64949,\n 35698, 7958, 6208, 45251, 39569, 34493, 43529, 72104, 63774,\n 60005, 7489, 64754, 36894, 17978, 45031, 57900, 69660, 25321,\n 46271, 67511, 80586, 58299, 7103, 18307, 17255, 30674, 36726,\n 48428, 70786, 56065, 82186, 52634, 57274, 22399, 32944, 15190,\n 18705, 36741, 52111, 4500, 23484, 56400, 37209, 79119, 3165,\n 46192, 48643, 2899, 75523, 23684, 54248, 55269, 51790, 28098,\n 6297, 41593, 12748, 50321, 54839, 10558, 53828, 33225, 56892,\n 71523, 46931, 60800, 808, 56440, 82495, 3004, 34079, 12529,\n 40181, 12044, 61760, 10403, 23059, 82387, 45448, 20537, 56435,\n 11668, 51748, 52229, 72864, 73129, 82761, 49734, 62901, 61153,\n 62736, 33130, 17354, 39930, 75732, 28379, 77743, 61275, 72507,\n 13980, 41057, 74683, 35214, 49842, 52927, 66516, 77677, 63886,\n 14379, 49065, 26434, 41698, 22187, 73584, 19031, 56275, 55802,\n 26695, 40572, 14866, 50963, 47738, 64908, 55148, 55728, 47035,\n 17848, 73849, 23642, 70301, 7308, 16256, 32181, 23933, 50160,\n 10434, 48829, 72961, 70010, 76670, 58825, 68407, 20363, 79842,\n 8983, 43791, 18027, 63686, 4540, 41450, 72399, 65796, 67413,\n 99, 39366, 26313, 9850, 51073, 55743, 31209, 29942, 3354,\n 11574, 73296, 9157, 51056, 50735, 40171, 24802, 74099, 69363,\n 48901, 21516, 60739, 76223, 8554, 41712, 7960, 59923, 76504,\n 62063, 16881, 51556, 29478, 33584, 24170, 8183, 497, 73592,\n 63925, 43658, 61076, 24775, 70511, 22916, 56249, 9127, 19299,\n 48885, 2763, 48133, 28069, 5809, 73253, 45602, 62781, 81411,\n 70579, 58444, 42809, 12817, 62660, 77185, 47643, 62066, 27208,\n 28455, 79896, 43785, 80994, 45989, 51411, 5802, 81965, 42667,\n 59636, 50550, 20964, 82137, 14037, 57314, 69183, 8825, 18793,\n 31960, 1590, 14802, 64815, 20147, 5605, 55951, 714, 16875,\n 4695, 48182, 27186, 587, 32382, 10337, 28903, 77427, 16833,\n 78476, 52227, 15286, 20385, 35870, 52567, 82977, 739, 45807,\n 52008, 79138, 37030, 82680, 73019, 64018, 79116, 36785, 61922,\n 39824, 36999, 8213, 18885, 80340, 26005, 79320, 30664, 36504,\n 82145, 39290, 51045, 72952, 20702, 56285, 13918, 49000, 54663,\n 31160, 37104, 21988, 57748, 30486, 32571, 6885, 33171, 5361,\n 37193, 77829, 83034, 63958, 56125, 73477, 28410, 10222, 1121,\n 2835, 36807, 6990, 22096, 54949, 33970, 47098, 30123, 46419,\n 2601, 78367, 9443, 72224, 75764, 81142, 73549, 3804, 45182,\n 44687, 772, 17666, 17954, 38531, 72585, 81476, 82163, 39941,\n 48843, 24938, 7598, 37844, 25272, 42709, 8101, 67859, 45415,\n 61651, 35571, 6866, 36830, 51438, 6818, 58112, 12136, 17488,\n 82406, 57666, 73040, 48681, 185, 64198, 16513, 44081, 46677,\n 44816, 2544, 67995, 10300, 4796, 47810, 60851, 13535, 24336,\n 75768, 5521, 60938, 27885, 79468, 79126, 34303, 24653, 13736,\n 65676, 36065, 66647, 80192, 16265, 64431, 22270, 29005, 39976,\n 48142, 18080, 56684, 38977, 35643, 14966, 3626, 16536, 21337,\n 6754, 57584, 80651, 21753, 41182, 2833, 56096, 33032, 32039,\n 71602, 63532, 26801, 26735, 5753, 77214, 75327, 45149, 29589,\n 7939, 57151, 67350, 63829, 48656, 5051, 68400, 14268, 41881,\n 51326, 56719, 39455, 10685, 33897, 64511, 19591, 16731, 20084,\n 4128, 37073, 72862, 79660, 51892, 23174, 32997, 77346, 17940,\n 56540, 35216, 70381, 6448, 81821, 2618, 23574, 70576, 2935,\n 64993, 76055, 43554, 29743, 31214, 12928, 79279, 58692, 31067,\n 78258, 31811, 69561, 47540, 11844, 35607, 68779, 50854, 66812,\n 75047, 53060, 36463, 60695, 15629, 41269, 61452, 4571, 37881,\n 55752, 35958, 16648, 383, 47057, 37346, 21769, 17746, 10616,\n 20466, 44536, 72682, 61075, 8443, 19982, 82791, 78850, 72259,\n 81442, 48548, 13569, 4401, 13389, 72226, 53840, 26969, 3219,\n 70306, 32147, 58980, 62202, 57796, 17914, 80936, 48929, 29996,\n 74929, 21271, 8036, 71345, 17030, 23992, 37323, 71422, 66936,\n 39222, 38550, 51616, 68403, 3721, 19660, 56453, 66710, 23645,\n 19239, 41645, 36473, 18735, 29380, 62879, 60900, 47766, 78357,\n 14554, 60226, 49857, 46631, 16755, 6722, 61542, 75770, 8477,\n 34593, 69756, 6813, 52078, 59178, 20337, 43675, 6666, 43986,\n 10190, 29357, 71267, 45665, 43737, 4779, 52886, 43703, 27796,\n 15533, 43356, 27496, 33117, 68050, 78806, 66265, 24759, 15534,\n 66291, 67573, 61393, 73825, 3256, 48972, 65876, 36812, 69647,\n 58420, 30497, 36054, 70538, 27305, 60799, 16739, 54933, 53800,\n 43370, 77486, 4197, 37456, 73758, 42357, 36067, 23670, 9262,\n 74809, 1570, 31710, 60487, 59727, 38129, 10786, 16339, 81055,\n 63709, 58325, 59988, 77416, 8315, 1233, 33665, 48271, 70295,\n 7726, 35646, 24767, 5800, 76198, 3668, 53244, 33844, 22830,\n 18360, 32241, 70627, 26460, 13186, 1643, 6552, 71361, 41177,\n 3210, 55147, 13912, 33000, 67589, 6226, 43997, 79524, 75752,\n 35816, 5744, 48981, 24781, 19066, 39025, 18502, 59081, 64406,\n 67618, 17888, 15761, 43490, 51625, 77439, 68406, 7294, 1242,\n 32085, 70483, 23630, 2073, 23958, 37014, 13165, 81519, 58136,\n 44910, 47467, 66343, 79026, 70676, 2261, 82268, 63729, 50689,\n 25446, 77733, 49996, 29002, 81564, 78994, 39405, 39758, 15763,\n 43126, 82666, 56572, 9000, 66966, 43645, 2929, 28059, 30274,\n 23812, 67774, 80654, 51307, 21601, 10392, 35965, 54062, 49766,\n 47863, 47154, 64588, 8680, 49710, 49368, 18734, 58460, 74435,\n 11654, 71424, 45786, 80618, 66946, 9314, 2035, 19078, 75881,\n 25287, 82566, 22976, 48410, 50059, 18562, 16794, 82257, 38089,\n 26736, 17552, 66995, 3122, 74175, 43478, 76948, 69827, 67251,\n 8387, 43148, 43502, 59245, 74402, 62665, 22195, 67321, 34782,\n 4332, 19731, 24252, 79167, 38856, 71950, 49667, 82878, 59224,\n 19675, 40104, 12901, 63404, 79492, 4553, 1498, 19532, 68304,\n 77455, 68104, 15057, 49900, 55512, 78882, 73681, 18762, 40017,\n 20873, 13400, 54533, 75271, 24347, 73179, 79895, 56046, 61996,\n 80570, 21992, 5718, 25982, 63283, 40646, 37492, 52833, 59193,\n 4514, 79619, 13646, 14985, 59635, 81706, 52170, 64126, 53475,\n 11989, 46690, 21909, 60502, 35933, 18777, 46827, 60615, 48655,\n 10316, 54553, 65128, 30004, 42890, 78090, 30006, 16606, 82673,\n 32185, 76408, 17043, 42348, 35153, 26236, 74675, 6971, 26516,\n 778, 1322, 34956, 55143, 56165, 25351, 16938, 72297, 23744,\n 76851, 76609, 25211, 11537, 28853, 41066, 37681, 8591, 30477,\n 77897, 80884, 30740, 66404, 77162, 35008, 45162, 82085, 63620,\n 42751, 2799, 82579, 46793, 54241, 68197, 32984, 67876, 56487,\n 75517, 9049, 56902, 39207, 65688, 45771, 44541, 1162, 55499,\n 45332, 30625, 20885, 36633, 64007, 75968, 48320, 30581, 8928,\n 76276, 66923, 40130, 19726, 5318, 67336, 49569, 62419, 57182,\n 8565, 71070, 6538, 45592, 56422, 64631, 18481, 77010, 70365,\n 25647, 52792, 66425, 12319, 34472, 8904, 62039, 66804, 67024,\n 36203, 76275, 18996, 55620, 76166, 58303, 78802, 66855, 69663,\n 67354, 38797, 37546, 63968, 46331, 17786, 44205, 68277, 71740,\n 34793, 67721, 41493, 64782, 70285, 50745, 79769, 61794, 13314,\n 50229, 14681, 1797, 66171, 10113, 46107, 42109, 10012, 35871,\n 21531, 35700, 32865, 60274, 21217, 31161, 12864, 81784, 61180,\n 38090, 35827, 52621, 25428, 14435, 10758, 5681, 65911, 66801,\n 16956, 57330, 2979, 13051, 71059, 39952, 18479, 13276, 79989,\n 63202, 28122, 64197, 48303, 77248, 71099, 25548, 6861, 6981,\n 31760, 79610, 73869, 67917, 14733, 56851, 22466, 22629, 25336,\n 10207, 49993, 58915, 6554, 55078, 73440, 48372, 76886, 1377,\n 22998, 21095, 75101, 22726, 28856, 80247, 37606, 71015, 58800,\n 11377, 33061, 42733, 16522, 11019, 13974, 67334, 3960, 17859,\n 107, 57136, 51195, 76471, 63057, 12707, 27085, 36893, 10623,\n 494, 9907, 50209, 82679, 53377, 65317, 36554, 26596, 78935,\n 21408, 61500, 22404, 8241, 47468, 39035, 50603, 76904, 79459,\n 61696, 28466, 2199, 21954, 62072, 78581, 10000, 22784, 39071,\n 43711, 57010, 65281, 17020, 40078, 42804, 59410, 19955, 9797,\n 49226, 20355, 6439, 33290, 65167, 46341, 20371, 28329, 67436,\n 77005, 22023, 14775, 39640, 29523, 58218, 7402, 50393, 30971,\n 20985, 43111, 3688, 69979, 3888, 28989, 27369, 48011, 13614,\n 40697, 64231, 25469, 39893, 67743, 79378, 62728, 32233, 65207,\n 8703, 9795, 69629, 72062, 20997, 76647, 58068, 35947, 64439,\n 33345, 2651, 78876, 59993, 76736, 20220, 11449, 70617, 19377,\n 12281, 22405, 75355, 38974, 38311, 384, 45536, 6749, 46692,\n 19880, 59786, 73086, 54395, 24941, 3266, 33205, 11747, 26706,\n 53038, 35233, 71458, 60532, 51216, 82651, 5398, 55696, 42987,\n 71724, 61228, 43012, 82034, 80245, 72326, 64129, 2038, 2726,\n 13043, 14776, 50924, 60380, 45943, 26951, 44368, 46458, 45399,\n 32914, 62715, 71068, 70709, 63365, 1512, 52180, 77753, 41047,\n 66134, 5308, 28432, 25652, 73189, 26818, 14618, 20487, 30807,\n 67186, 65144, 39742, 42030, 15154, 43856, 38537, 6455, 46266,\n 36872, 25905, 56064, 64827, 886, 72469, 79283, 72285, 36391,\n 13342, 30222, 35105, 61223, 5385, 28700, 76229, 14675, 66584,\n 51775, 21523, 74678, 81382, 26403, 79314, 14912, 76596, 56433,\n 20426, 46674, 69575, 73860, 40239, 22654, 74635, 10908, 5696,\n 55518, 9563, 70819, 18436, 7418, 38358, 61106, 34667, 36541,\n 32608, 25661, 58568, 66074, 70671, 18212, 61851, 66401, 18317,\n 11290, 66706, 76210, 9102, 82684, 46804, 2302, 68086, 2508,\n 22033, 57031, 35510, 49259, 72788, 13692, 13657, 49201, 69096,\n 4213, 80531, 67692, 71946, 69103, 3179, 27114, 27034, 78514,\n 24634, 67081, 48309, 69751, 60793, 80425, 38926, 18081, 61589,\n 3738, 17688, 58201, 36676, 50687, 74436, 24921, 45359, 67179,\n 31627, 72188, 56095, 61032, 34520, 27577, 82581, 12863, 39057,\n 27278, 1873, 15223, 49039, 56742, 53658, 52362, 74102, 8726,\n 15187, 70428, 8972, 52559, 42911, 78030, 24528, 65005, 69702,\n 3121, 76170, 38018, 19129, 17912, 42722, 21618, 64532, 63983,\n 35368, 77403, 32987, 22342, 28304, 31997, 2735, 75375, 116,\n 41654, 23453, 63085, 62733, 73896, 22253, 77269, 12192, 69353,\n 46317, 34530, 25319, 64573, 46283, 53304, 73752, 72623, 34255,\n 45272, 32660, 65419, 67580, 3849, 33883, 77948, 53167, 72358,\n 17463, 63028, 35454, 25565, 68915, 17869, 7818, 75309, 2092,\n 69410, 18150, 21938, 8585, 70567, 35926, 1048, 68594, 65159,\n 60511, 40351, 37827, 28915, 9778, 69794, 6097, 68298, 31194,\n 5032, 13492, 47262, 9755, 69166, 72346, 30484, 82665, 7395,\n 57170, 23850, 41573, 57002, 51273, 12724, 18816, 73434, 76216,\n 69617, 18427, 58167, 48377, 20600, 35460, 50901, 25850, 39331,\n 51742, 52900, 59707, 55818, 79470, 81770, 76097, 42282, 79648,\n 65858, 68300, 11874, 42689, 34562, 65535, 50553, 55702, 14253,\n 65942, 78499, 56553, 75369, 10668, 70615, 32918, 19774, 17857,\n 5464, 12214, 69047, 45520, 28663]), 'V': array([51658, 62882, 19504, 36048, 39681, 67739, 34139, 24334, 64611,\n 57909, 65525, 51865, 64490, 23114, 26271, 67045, 37098, 15345,\n 38955, 37717, 50440, 60731, 59460, 22119, 103, 41485, 55870,\n 31787, 8910, 50360, 41975, 31124, 70872, 11424, 28299, 58212,\n 69369, 51573, 11081, 50959, 69235, 42937, 54703, 47627, 59555,\n 63569, 17720, 24488, 41253, 50079, 21143, 57601, 18154, 4660,\n 5563, 25148, 41514, 16118, 49484, 14483, 32922, 40524, 46355,\n 19905, 780, 52513, 20682, 55891, 24318, 37993, 57505, 66559,\n 70440, 39018, 47861, 39521, 21584, 31019, 64281, 58828, 43655,\n 5075, 7518, 34853, 70213, 39359, 1881, 2390, 61405, 14452,\n 17515, 18759, 50874, 11842, 24102, 26983, 29465, 39346, 36430,\n 41763, 70104, 40423, 51117, 23819, 9625, 48335, 22610, 36414,\n 60663, 13404, 19084, 30933, 1323, 313, 23697, 60801, 29665,\n 66019, 64379, 13400, 66022, 49260, 43039, 41143, 50853, 40482,\n 58597, 9775, 48167, 55984, 29638, 19744, 49064, 16006, 11494,\n 35091, 63105, 58001, 24042, 30699, 1913, 61489, 16814, 27360,\n 14946, 12419, 22808, 11621, 35598, 23942, 32047, 57746, 54151,\n 67600, 25998, 5887, 59109, 12520, 42149, 24574, 40028, 62749,\n 23549, 11349, 38493, 30528, 46836, 53162, 51559, 71047, 39577,\n 63511, 1498, 11535, 2551, 5932, 56267, 42572, 38120, 32984,\n 70250, 54564, 52478, 37409, 37022, 27166, 22114, 35757, 27651,\n 20188, 30119, 29466, 53183, 26357, 61413, 4713, 59078, 21075,\n 50286, 40038, 71388, 22700, 69288, 30906, 58919, 32755, 17716,\n 22567, 14816, 50762, 68739, 15135, 69405, 41411, 22162, 7199,\n 57106, 47027, 27919, 3210, 21087, 21061, 1086, 48637, 55781,\n 8002, 27302, 69891, 68313, 68675, 11100, 32809, 68075, 1970,\n 49015, 4995, 29095, 5214, 66948, 51999, 54892, 52135, 65932,\n 12458, 44656, 34355, 30063, 54258, 26615, 9426, 1400, 4519,\n 45932, 60093, 58187, 58915, 25420, 43981, 29590, 60059, 8074,\n 61702, 16628, 48643, 16399, 68818, 24832, 40110, 19095, 58077,\n 41989, 24853, 48966, 68951, 57182, 5331, 2489, 24522, 66863,\n 53399, 25212, 17468, 51917, 37799, 47147, 2861, 40499, 34642,\n 27494, 58085, 64862, 59649, 49884, 46795, 43897, 19459, 40160,\n 65208, 41241, 14615, 15624, 48117, 7959, 12612, 69385, 41729,\n 37373, 44241, 1064, 19746, 55350, 10218, 27862, 30173, 33852,\n 68115, 39760, 22189, 67309, 49813, 20872, 44759, 59398, 33257,\n 16717, 20512, 51155, 33355, 43818, 21144, 2200, 55408, 41851,\n 14765, 2091, 11920, 61609, 52974, 69546, 24492, 21549, 3046,\n 2860, 38222, 64904, 63429, 52546, 24311, 28826, 513, 29772,\n 1254, 67004, 8096, 3136, 22465, 55761, 14292, 24140, 35292,\n 60968, 61214, 1876, 2636, 2052, 5765, 9896, 43776, 22957,\n 58180, 51185, 22546, 5858, 13024, 43406, 8931, 41764, 5834,\n 46915, 60163, 6757, 1766, 24665, 26383, 38373, 11929, 17436,\n 28741, 19633, 57004, 35273, 37514, 15337, 65383, 35867, 13040,\n 14085, 68657, 64194, 66611, 40748, 39183, 37416, 50365, 33802,\n 55276, 49764, 40968, 18844, 20799, 17412, 33056, 66714, 68762,\n 56008, 1048, 59457, 14895, 28194, 33934, 56640, 9979, 12892,\n 41063, 14476, 29362, 28847, 62548, 13875, 7735, 47198, 60823,\n 7794, 46756, 23594, 24772, 5694, 24715, 42739, 44233, 2840,\n 42301, 40425, 54909, 35661, 40699, 47110, 65278, 54182, 54988,\n 32457, 23206, 5867, 34950, 67307, 70140, 63734, 27037, 1263,\n 63209, 59614, 31018, 42362, 9723, 52698, 59385, 1815, 55001,\n 46034, 12590, 41961, 5236, 3396, 68616, 36490, 51255, 21387,\n 65624, 61789, 45569, 60505, 30872, 23252, 58972, 31328, 37216,\n 66414, 2939, 66545, 69698, 7151, 17137, 29180, 17733, 31330,\n 32968, 32171, 59422, 55610, 65263, 1061, 24688, 62561, 62318,\n 194, 23514, 26508, 11130, 44426, 3570, 25466, 48745, 9894,\n 9269, 32472, 27293, 32354, 17300, 10459, 42302, 28225, 57163,\n 52505, 37405, 19389, 50400, 15259, 52368, 41738, 65989, 64116,\n 19789, 18765, 1794, 5426, 17962, 46689, 66840, 65265, 69439,\n 6367, 59897, 29149, 5445, 4708, 59660, 10512, 15441, 64917,\n 9729, 6342, 53009, 44881, 33053, 46039, 21595, 56465, 48766,\n 17349, 66180, 48427, 8855, 61559, 56581, 8640, 5835, 1865,\n 29089, 23078, 16046, 66543, 24034, 8879, 28351, 18218, 46478,\n 14630, 31998, 40456, 29145, 55179, 14072, 29908, 64975, 66342,\n 31218, 63620, 43422, 64744, 47563, 14082, 44134, 6319, 8577,\n 26673, 34548, 24280, 11911, 41410, 56910, 61486, 69326, 45402,\n 4338, 50935, 54021, 14702, 58170, 48397, 10614, 31113, 36470,\n 62267, 66669, 33225, 15407, 54137, 38784, 29000, 22434, 7181,\n 12740, 58844, 15674, 41125, 32366, 9575, 53863, 53694, 16074,\n 3985, 17232, 43617, 65960, 26641, 62033, 62580, 66594, 27038,\n 67842, 68202, 64049, 24760, 41395, 39560, 69207, 21084, 36448,\n 13830, 44490, 33203, 40100, 58402, 58791, 4079, 26044, 61851,\n 15774, 6404, 7901, 27953, 45303, 19300, 8837, 38307, 48446,\n 55197, 32078, 39517, 43130, 62426, 59618, 64817, 27574, 62865,\n 34557, 52777, 60836, 7200, 11736, 69759, 62448, 55229, 67069,\n 6917, 41453, 53006, 43105, 15346, 41498, 59327, 30966, 12099,\n 47118, 35679, 38531, 12377, 46647, 62044, 43092, 8136, 26536,\n 14932, 40259, 30076, 3003, 38845, 43657, 1311, 34214, 22584,\n 8745, 58126, 53174, 8242, 30499, 28726, 21127, 49497, 45533,\n 16158, 4522, 92, 12488, 15275, 2470, 45317, 33317, 8038,\n 66534, 15189, 52145, 9489, 46964, 3345, 71290, 14136, 11573,\n 63885, 12397, 14529, 7765, 60094, 59030, 62161, 63085, 70006,\n 5059, 34697, 37970, 35157, 31487, 13328, 40447, 26316, 57298,\n 14898, 20238, 59339, 20800, 6836, 4078, 18691, 16811, 31261,\n 9818, 37402, 33589, 11511, 13437, 28334, 53296, 3034, 49265,\n 16305, 15163, 65858, 1438, 30175, 60128, 6467, 35282, 51316,\n 16486, 61073, 11182, 13031, 61813, 34680, 46284, 2645, 26660,\n 13062, 11850, 46237, 60588, 67007, 52563, 48274, 23984, 17718,\n 70619, 26886, 63074, 19372, 19677, 7306, 58089, 63820, 29282,\n 64722, 41819, 68771, 48630, 63485, 65333, 47193, 67748, 65884,\n 46468, 48545, 14360, 34519, 25651, 48460, 26395, 9624, 21981,\n 33471, 48989, 23020, 26755, 34902, 37389, 67159, 63586, 19606,\n 9402, 21090, 32174, 48262, 3990, 55933, 55993, 71287, 22773,\n 12399, 36369, 26204, 64113, 46769, 615, 62491, 16057, 60339,\n 69052, 55114, 40789, 8895, 9834, 24017, 22357, 30133, 50466,\n 38902, 18026, 46455, 9032, 65090, 66773, 6873, 4987, 68162,\n 27384, 64918, 30411, 3338, 37322, 19461, 45780, 49533, 8035,\n 33836, 55915, 6574, 4670, 41280, 850, 31246, 20412, 55875,\n 41024, 29211, 60569, 60101, 13150, 1362, 49549, 28247, 61692,\n 49520, 1732, 63259, 16588, 55727, 24647, 27550, 51312, 25959,\n 961, 62843, 28918, 42606, 12440, 32328, 30244, 18420, 22139,\n 50316, 22974, 41770, 60872, 65189, 39498, 22947, 2687, 61076,\n 39137, 62168, 34573, 22338, 21434, 10171, 47035, 38926, 64352,\n 65861, 58897, 13689, 55166, 24310, 43574, 51837, 56084, 59495,\n 39585, 13179, 24886, 34927, 17827, 40964, 34583, 32434, 53641,\n 67248, 70578, 69734, 12625, 50084, 19220, 33801, 4802, 42843,\n 5884, 36613, 23524, 44456, 14688, 12662, 51798, 53989, 23270,\n 11411, 56759, 43337, 65438, 40767, 71138, 52528, 7543, 49007,\n 10336, 19886, 12508, 38554, 12659, 10464, 39942, 57320, 63699,\n 21529, 57463, 37063, 49459, 12402, 15620, 5543, 60191, 4570,\n 52874, 12307, 10324, 4087, 14203, 31864, 36, 62788, 47869,\n 13691, 65269, 57762, 28903, 918, 65754, 25864, 52082, 47392,\n 56053, 65503, 55012, 33714, 21097, 68393, 30661, 6551, 11303,\n 47123, 44610, 28420, 65958, 3127, 47664, 39920, 34743, 20342,\n 27653, 4301, 7606, 35811, 16669, 40801, 67337, 27855, 49216,\n 34491, 48912, 12950, 44894, 58750, 51353, 61461, 49412, 47442,\n 30695, 46827, 28208, 5484, 3817, 3473, 61265, 60615, 8222,\n 41401, 28243, 28071, 31394, 7266, 47407, 46863, 22673, 23749,\n 63323, 52494, 20472, 56822, 2361, 66167, 26262, 13544, 12350,\n 3880, 41853, 27446, 44193, 70793, 57787, 66514, 23086, 66403,\n 48676, 3999, 52089, 56214, 54818, 34401, 45980, 22310, 48489,\n 49986, 70379, 40353, 58607, 12494, 22324, 70343, 68585, 28724,\n 1485, 7976, 13523, 67338, 52057, 19323, 61845, 55206, 10773,\n 23667, 18454, 22166, 59047, 37010, 60152, 20324, 62730, 14510,\n 6900, 63119, 58300, 44479, 25558, 63971, 28110, 16279, 51677,\n 30313, 37166, 13602, 71329, 35066, 64978, 42667, 58027, 30899,\n 18189, 50912, 54846, 18565, 32645, 1699, 43218, 46332, 20822,\n 771, 54634, 54836, 2479, 6019, 62928, 9462, 40826, 11030,\n 24200, 46701, 14352, 3478, 5138, 35024, 56495, 48627, 43865,\n 66776, 70703, 59234, 70982, 11766, 51829, 1043, 6904, 66322,\n 18589, 50030, 11818, 50643, 4056, 15537, 28359, 27464, 68379,\n 10348, 44588, 36540, 10517, 53754, 7730, 23085, 16924, 62722,\n 12179, 3140, 21555, 567, 14101, 32176, 34365, 28168, 21772,\n 70362, 17819, 50350, 1545, 61143, 63239, 1117, 47495, 32237,\n 18797, 35843, 68302, 45071, 32234, 63463, 24536, 58986, 11545,\n 64477, 3081, 43099, 6027, 64058, 6677, 20854, 6675, 7007,\n 27722, 65032, 28655, 14440, 8154, 7019, 60305, 20499, 59964,\n 38739, 38475, 42422, 63453, 49689, 38369, 59992, 41047, 9691,\n 29090, 835, 15224, 7159, 67075, 50224, 18785, 36096, 31205,\n 4742, 51210, 31448, 47619, 8271, 21589, 66308, 13205, 34607,\n 51120, 62224, 24566, 7102, 41102, 50123, 19642, 10605, 33842,\n 45257, 49250, 4681, 43691, 45174, 70854, 29483, 64173, 61031,\n 49811, 62823, 41108, 47367, 53575, 54428, 13491, 67267, 40340,\n 4738, 12343, 33651, 38573, 3212, 25324, 31109, 25698, 48407,\n 37770, 37632, 65800, 34395, 9885, 35320, 21622, 26435, 6821,\n 60594, 48225, 4599, 36756, 10125, 4380, 55945, 59203, 14734,\n 58489, 1056, 44809, 17540, 64443, 64278, 37839, 25674, 5023,\n 56317, 10670, 64836, 45266, 3231, 32564, 43944, 21101, 36620,\n 8218, 40821, 45381, 15816, 5349, 26709, 56432, 56268, 32534,\n 62331, 43528, 11490, 69048, 56861, 35759, 8875, 19397, 48700,\n 52795, 32959, 31929, 38254, 68263, 24635, 69268, 32146, 61438,\n 41512, 38960, 10233, 1850, 32649, 40900, 68976, 34969, 53823,\n 24658, 25751, 42722, 24179, 3493, 44262, 1968, 32733, 14951,\n 41079, 67762, 35461, 46771, 2410, 12229, 8871, 23512, 5892,\n 9284, 1264, 65598, 34987, 6469, 59145, 66106, 42773, 45448,\n 20661, 22813, 65131, 14497, 48115, 44239, 39431, 67034, 39436,\n 69783, 24139, 19925, 4003, 50369, 22415, 50720, 11443, 472,\n 64728, 15017, 68336, 10086, 48896, 56584, 9758, 11998, 28292,\n 64577, 26600, 42781, 35103, 13771, 34310, 26704, 34613, 64608,\n 31005, 9978, 70771, 62381, 36052, 22259, 21874, 51257, 41541,\n 62065, 1365, 53583, 38896, 34966, 50241, 47641, 53415, 9093,\n 58615, 1326, 4997, 40734, 34941, 69317, 21610, 64429, 59777,\n 11708, 54156, 10368, 618, 35099, 57956, 20396, 24889, 2046,\n 24547, 55189, 27659, 69788, 24447, 21478, 48173, 25794, 3954,\n 22036, 22323, 19564, 69387, 56606, 43346, 63103, 31061, 38714,\n 7387, 19807, 64464, 5855, 39357, 11821, 36718, 78, 71264,\n 41126, 14727, 11986, 60288, 40323, 13795, 34956, 10377, 43001,\n 29015, 63381, 41286, 25274, 67094, 45725, 52617, 57741, 35003,\n 51603, 53536, 58457, 34106, 68879, 37658, 39948, 28532, 12563,\n 8755, 26464, 60858, 59825, 50899, 41660, 14757, 61626, 40528,\n 38805, 3737, 42627, 37702, 12190, 18303, 17601, 2450, 51717,\n 43772, 54048, 22723, 29379, 32605, 55238, 49660, 6905, 59803,\n 43726, 15883, 26270, 29425, 63295, 8679, 13913, 5067, 35990,\n 31408, 28312, 56401, 4899, 60786, 58361, 21324, 13797, 29902,\n 65447, 28383, 26272, 27065, 60808, 55731, 52529, 20776, 31658,\n 53927, 31131, 53018, 43675, 60403, 26449, 66701, 56334, 53244,\n 4889, 8921, 29537, 23651, 15168, 12259, 3383, 56186, 36507,\n 33627, 54448, 21934, 58544, 7220, 61776, 3616, 27846, 16081,\n 2694, 56516, 46642, 1073, 57450, 5687, 71002, 38736, 39538,\n 57067, 3921, 2198, 45474, 55748, 17826, 13540, 30452, 54348,\n 4187, 172, 51394, 15463, 64339, 16287, 55293, 11, 43448,\n 15628, 46618, 41493, 40248, 34180, 43062, 67413, 5118, 31694,\n 56024, 40010, 70659, 34542, 29242, 5864, 9507, 31946, 41098,\n 42066, 22926, 47903, 19705, 65582, 49166, 54932, 20050, 25581,\n 63689, 19108, 65326, 71311, 25772, 27188, 52250, 50411, 1298,\n 66276, 51632, 48882, 6317, 27381, 64811, 28117, 22057, 43893,\n 13610, 3831, 50177, 34311, 54566, 14208, 26898, 60346, 32184,\n 15187, 28344, 10165, 27865, 7288, 8097, 14181, 11444, 21190,\n 49028, 62705, 6589, 60336, 20013, 54834, 59432, 54081, 5814,\n 26225, 64488, 24266, 68703, 61127, 6908, 56480, 56792, 24790,\n 50707, 27357, 3470, 23372, 31447, 58755, 35127, 44778, 10589,\n 21978, 1845, 4094, 36168, 7558, 37040, 17165, 68671, 23751,\n 5762, 62931, 25396, 39442, 64842, 15227, 63787, 59447, 51818,\n 63936, 39680, 47345, 47115, 54694, 65765, 11152, 21468, 22761,\n 43696, 64730, 25100, 24684, 58032, 48261, 56840, 70794, 22872,\n 20878, 20064, 31643, 38524, 68768, 8433, 63048, 51908, 13582,\n 67256, 7436, 24342, 11426, 67624, 42633, 69774, 543, 67239,\n 26745, 33684, 14397, 17652, 50620, 13481, 13077, 4145, 31547,\n 58887, 44636, 48531, 20695, 36725, 11945, 68321, 30127, 68893,\n 22349, 60923, 16082, 71070, 22882, 38722, 3185, 25812, 4947,\n 13060, 56375, 20601, 30328, 65121, 6273, 13573, 52657, 71061,\n 32682, 495, 16079, 50721, 18542, 5256, 43470, 45162, 40506,\n 38895, 59392, 33931, 11550, 46920, 69329, 17546, 39992, 22272,\n 32674, 44895, 18150, 43325, 12178, 47817, 3095, 22785, 32677,\n 48809, 37480, 33321, 64903, 24622, 15952, 60591, 69809, 52812,\n 120, 50300, 48433, 45222, 57066, 30071, 29199, 69600, 8178,\n 40144, 28172, 3507, 27372, 22529, 14283, 21455, 70352, 40219,\n 33956, 17360, 4970, 46114, 39983, 33901, 36288, 28374, 60722,\n 50960, 59040, 14795, 44701, 32802, 21644, 62588, 68936, 37102,\n 59416, 47778, 14372, 43444, 69787, 69624, 12711, 5977, 12614,\n 10420, 65930, 50439, 17503, 45914, 66977, 6514, 63748, 47712,\n 65169, 30316, 64733, 27059, 25727, 26137, 39360, 17525, 64048,\n 25506, 24901, 3450, 56972, 15780, 54718, 35646, 41087, 46090,\n 33883, 68602, 45724, 43041, 14605, 23272, 25763, 41942, 3326,\n 39728, 16848, 20939, 52206, 68038, 24335, 25699, 51532, 34790,\n 12609, 62797, 46418, 44476, 34205, 28458, 8563, 59678, 49700,\n 31846, 31721, 2690, 65741, 14312, 3846, 3964, 5274, 26431,\n 47048, 23938, 29728, 19525, 35552, 18861, 41715, 13727, 1560,\n 8404, 38677, 38575, 70317, 42329, 52901, 32168, 2169, 58175,\n 51232, 27956, 69496, 37550, 15301, 48348, 64894, 64881, 69454,\n 49423, 5342, 14043, 43240, 27917, 48074, 11354, 63272, 33656,\n 24600, 27685, 14627, 3963, 27866, 31439, 55665, 597, 39444,\n 62546, 22192, 4422, 36619, 10184, 24396, 29900, 23057, 59647,\n 1484, 22950, 39187, 19993, 52214, 62492, 23161, 29790, 5060,\n 8007, 17570, 10523, 61371, 15149, 42580, 57915, 66860, 63753,\n 35967, 23871, 47602, 70447, 20769, 10939, 62966, 63792, 46891,\n 49370, 38141, 10463, 8955, 13398, 18290, 32032, 45199, 11724,\n 50689, 53969, 25807, 19282, 21446, 32313, 53063, 37513, 41224,\n 10673, 10593, 13358, 67019, 13560, 21665, 67540, 556, 38881,\n 48305, 67977, 59775, 60007, 30804, 51197, 20080, 52988, 5098,\n 8930, 20182, 7162, 7081, 50276, 27108, 67164, 44525, 5340,\n 51239, 19056, 38381, 39652, 34467, 37755, 28972, 23342, 42600,\n 66417, 28734, 32191, 44693, 33095, 12856, 68784, 3235, 58458,\n 7516, 47823, 2839, 60748, 68210, 33645, 51820, 23886, 33676,\n 40251, 51149, 42782, 22770, 4315, 62444, 51398, 33495, 52171,\n 16288, 17807, 59273, 17930, 1906, 26816, 66448, 65633, 47742,\n 31621, 3778, 2818, 41249, 67386, 61477, 7192, 3643, 50779,\n 1135, 4753, 34851, 3170, 10430, 40579, 45491, 14311, 49599,\n 3523, 37552, 5951, 63732, 18312, 2012, 8943, 65201, 18883,\n 66705, 52296, 28005, 61241, 21201, 15820, 23784, 61917, 64022,\n 57472, 58738, 56306, 53778, 22542, 4564, 43086, 54527, 26066,\n 20950, 59779, 50038, 31075, 11118, 31157, 67081, 29975, 33798,\n 10490, 14032, 10675, 54821, 11455, 50826, 19074, 9130, 34248,\n 64570, 19364, 68948, 13070, 58585, 48687, 21679, 54551, 54043,\n 37716, 46302, 3079, 46754, 22563, 35094, 67138, 36978, 50105,\n 57297, 39599, 47593, 25512, 25073, 24915, 55722, 6744, 38962,\n 59088, 31184, 66347, 34480, 68993, 10724, 24839, 69971, 38644,\n 19204, 12124, 5079, 43222, 24608, 3018, 49904, 4254, 52735,\n 8457, 2474, 45260, 23088, 37574, 20818, 24572, 24132, 29249,\n 49562, 26065, 54569, 60356, 66264, 39193, 69094, 47772, 43043,\n 24212, 21122, 7967, 20243, 28521, 52799, 17170, 9141, 47382,\n 10157, 42170, 32441, 63879, 6602, 22745, 24592, 43688, 57196,\n 4364, 71081, 49906, 47058, 21807, 53126, 50363, 43622, 22181,\n 49965, 51556, 31139, 4877, 53959, 7920, 34761, 5756, 45501,\n 21501, 42109, 10509, 1371, 69864, 21526, 32828, 9423, 63129,\n 3306, 4880, 69016, 19136, 53522, 12473, 32541, 34931, 15882,\n 64019, 63065, 58511, 31275, 10826, 26685, 18014, 16786, 10273,\n 23133, 21783, 43088, 22520, 20908, 30507, 67737, 29171, 63035,\n 30229, 48693, 58210, 29117, 58588, 1611, 16959, 61691, 34362,\n 62062, 18827, 9804, 36294, 39340, 50968, 27784, 68894, 17036,\n 37744, 63299, 29190, 26851, 22059, 35457, 60046, 30164, 4391,\n 23830, 59122, 49491, 7742, 13998, 69744, 51578, 26517, 55619,\n 53783, 34434, 52530, 65736, 2897, 13258, 14501, 8819, 11958,\n 35112, 27219, 42472, 41206, 63191, 22130, 10514, 25325, 48632,\n 49433, 1451, 10701, 36973, 59304, 63751, 29666, 46234, 60368,\n 10640, 52103, 18389, 71062, 47760, 61799, 2903, 17964, 29519,\n 59318, 11250, 44489, 19591, 70074, 24959, 32873, 18062, 4287,\n 50493, 38983, 69471, 1847, 4439, 59869, 18091, 70502, 26024,\n 40706, 23472, 35929, 52633, 36106, 42578, 3466, 29901, 43472,\n 203, 13017, 20882, 65170, 38584, 43875, 57653, 30070, 14729,\n 54487, 20871, 17657, 22909, 30571, 27844, 57732, 68409, 48518,\n 20537, 66848, 45155, 44685, 39356, 59153, 10128, 19469, 39501,\n 65900, 50505, 3002, 11433, 47732, 49941, 17637, 50045, 857,\n 10687, 8639, 30481, 16223, 29776, 20021, 24413, 11352, 36570,\n 56275, 65703, 16506, 26244, 11613, 30192, 70674, 22132, 21698,\n 57092, 59542, 65689, 15626, 9128, 20641, 14521, 25972, 36139,\n 19589, 31646, 21879, 38843, 43782, 71302, 71313, 67596, 13124,\n 44631, 27811, 3115, 58606, 53645, 2843, 1276, 60489, 43414,\n 68676, 2991, 69746, 11741, 9234, 23478, 12113, 52976, 750,\n 23518, 60958, 47780, 65807, 11296, 59215, 32493, 19823, 5575,\n 150, 36149, 29875, 39134, 10382, 63185, 45674, 37593, 66088,\n 48723, 48153, 20876, 30027, 22017, 55879, 70664, 20246, 24954,\n 51069, 64030, 31806, 1410, 2298, 21191, 36638, 71125, 58122,\n 60892, 21242, 26887, 29115, 59812, 47166, 18726, 38803, 25557,\n 41675, 38224, 54835, 69450, 60596, 46998, 63631, 61406, 25036,\n 9100, 58143, 32355, 41397, 65488, 44901, 48421, 68980, 18517,\n 36050, 66179, 16202, 68786, 2870, 47445, 16353, 9111, 15428,\n 16536, 68747, 15666, 60926, 63778, 29283, 55835, 32796, 13545,\n 69082, 58397, 21520, 25695, 50262, 2968, 4654, 63853, 34028,\n 48146, 22705, 8336, 41342, 68473, 4903, 3372, 10408, 55667,\n 23665, 18591, 71349, 40837, 54789, 68667, 32705, 6529, 44070,\n 22661, 66161, 16695, 45186, 33726, 15471, 55304, 10853, 47354,\n 30527, 62476, 24731, 60258, 54862, 61100, 63825, 42489, 16890,\n 53146, 61862, 3537, 33316, 21428, 34296, 37484, 15585, 55014,\n 59971, 40036, 43123, 44034, 6883, 30465, 39289, 61930, 43878,\n 6918, 36277, 43399, 19403, 42432, 64299, 36053, 36075, 19192,\n 4803, 54278, 58799, 27589, 2775, 28594, 41233, 27337, 39068,\n 27356, 4399, 41317, 14380, 18123, 58926, 44036, 68489, 51785,\n 68174, 50, 22918, 29342, 66188, 30892, 27526, 47594, 41910,\n 41709, 48758, 61751, 56403, 64631, 19882, 56440, 65069, 45441,\n 35827, 32890, 56987, 40752, 47667, 28552, 28723, 10404, 66218,\n 11370, 31720, 34912, 58985, 530, 22437, 37112, 64822, 21614,\n 16052, 33712, 50536, 68173, 5743, 62774, 57415, 7586, 40557,\n 39931, 20774, 8532, 35859, 30733, 58783, 16913, 11749, 19228,\n 59744, 18112, 16845, 13630, 18358, 57114, 51010, 66907, 30431,\n 32895, 16474, 60876, 31984, 49946, 70052, 70239, 21857, 19203,\n 19370, 25159, 15586, 66344, 58792, 33025, 12683, 3884, 70153,\n 16255, 44473, 43268, 7782, 51077, 2984, 63007, 10825, 17301,\n 60509, 47516, 19438, 16312, 66526, 8068, 35177, 44689, 69554,\n 32356, 14962, 8385, 42694, 39314, 65018, 27505, 16741, 9958,\n 45033, 44148, 37345, 66333, 42964, 8827, 42609, 59111, 9213,\n 59972, 58070, 3196, 27036, 1599, 39622, 20465, 6210, 57202,\n 21905, 56037, 26017, 67351, 42547, 63325, 32433, 43368, 65522,\n 70658, 25556, 4448, 56979, 57800, 646, 8193, 59032, 29763,\n 69420, 487, 16941, 12116, 25154, 58409, 3698, 6838, 65781,\n 39723, 62598, 32553, 41412, 46030, 7222, 8946, 38868, 31314,\n 10067, 12123, 10949, 30188, 52004, 51977, 13657, 41021, 7907,\n 51608, 32226, 65213, 41903, 35284, 25931, 60529, 28472, 40791,\n 33, 1655, 13474, 26153, 66976, 22317, 13839, 12579, 14250,\n 58454, 70462, 49298, 54242, 52365, 49094, 51775, 2874, 48138,\n 43980, 21421, 52319, 17810, 1472, 42441, 13785, 22404, 20154,\n 48242, 35591, 9001, 10822, 45110, 29194, 58905, 26160, 54365,\n 64500, 71018, 4398, 29707, 63234, 53179, 26330, 4276, 44822,\n 21654, 15460, 60204, 7708, 35928, 23238, 30321, 65110, 43599,\n 41341, 13336, 28455, 4098, 30072, 70472, 45272, 60771, 54208,\n 42500, 29034, 12799, 51822, 69651, 57696, 13817, 63769, 30619,\n 53626, 17685, 23149, 4894, 9609, 1088, 51250, 9568, 28131,\n 25164, 5407, 47807, 66922, 31876, 11517, 52889, 46206, 55671,\n 33153, 16787, 47315, 24401, 26862, 9714, 29930, 19864, 11662,\n 40614, 30491, 45001, 30258, 56789, 10533, 12060, 43945, 55762,\n 12526, 64101, 6055, 18936, 44975, 10783, 62353, 33884, 41771,\n 3422, 40228, 64972, 69060, 46864, 50859, 56161, 65846, 43079,\n 69993, 66073, 55261, 2990, 25799, 15282, 6765, 26463, 36560,\n 37769, 32320, 1574, 50218, 13455, 69298, 70889, 54061, 5450,\n 6863, 49133, 46016, 19251, 1654, 66733, 18170, 15001, 52896,\n 63660, 8308, 57966, 5565, 71327, 21068, 14557, 6648, 9266,\n 2373, 19700, 51278, 70728, 46294, 38013, 64177, 50822, 49309,\n 15890, 39746, 54122, 54525, 15852, 23327, 50997, 33744, 68390,\n 66429, 4550, 45170, 14773, 30019, 67933, 45718, 6982, 19189,\n 66787, 58668, 50046, 69736, 62945, 29950, 47786, 65792, 38005,\n 68621, 37589, 32776, 28960, 58575, 65816, 57101, 14594, 3576,\n 67031, 57789, 43393, 46138, 58021, 56551, 42847, 69459, 67418,\n 10838, 32254, 2487, 16728, 52772, 1854, 70013, 27797, 63629,\n 56098, 38027, 1089, 49009, 37297, 6879, 18065, 67492, 4427,\n 17275, 9853, 46575, 35293, 5820, 23254, 17832, 12997, 10396,\n 62435, 66797, 31664, 54279, 21815, 32344, 5547, 67891, 67289,\n 44500, 18604, 42491, 66741, 310, 47304, 4934, 23461, 48788,\n 31017, 64051, 33476, 5146, 50838, 51618, 5555, 41309, 68074,\n 63855, 34823, 38824, 11677, 8673, 3488, 49400, 41587, 65984,\n 37111, 35557, 70564, 51493, 830, 1114, 31892, 55126, 65523,\n 17814, 53141, 45798, 50120, 6213, 25932, 37587, 43257, 10388,\n 17395, 21208, 70474, 65687, 9353, 29049, 21392, 10820, 28490,\n 47260, 48981, 38720, 30421, 21845, 21270, 63823, 18480, 47635,\n 34239, 30602, 13646, 57338, 30569, 42440, 61156, 1008, 26657,\n 54858, 12471, 27665, 55628, 69930, 39861, 4830, 17577, 30218,\n 71246, 19004, 5243, 10905, 62459, 47691, 15065, 896, 61457,\n 40551, 20843, 49467, 22122, 24771, 33646, 58924, 39232, 41194,\n 10707, 43999, 35279, 3377, 40119, 22829, 69923, 8221, 57791,\n 56220, 54400, 59089, 43156, 27295, 43405, 61811, 36861, 68121,\n 64266, 19409, 54227, 25875, 42122, 49588, 32224, 17781, 49407,\n 43217, 17751, 43520, 65343, 66852, 37315, 25187, 68662, 6476,\n 54515, 8342, 25628, 20860, 44480, 35181, 45825, 24367, 69080,\n 37900, 3011, 20029, 54030, 38175, 26582, 42219, 64052, 6991,\n 61753, 3335, 8592, 68743, 56917, 20437, 28158, 48593, 67506,\n 35595, 13047, 21535, 11167, 41745, 32238, 68471, 59981, 50041,\n 62095, 20699, 7745, 9972, 4795, 64959, 2493, 17370, 20743,\n 34871, 46735, 17965, 56550, 67458, 45084, 23146, 46074, 5760,\n 21753, 64319, 5635, 55739, 71057, 44356, 22164, 34178, 21178,\n 20394, 54280, 70784, 18073, 62417, 63541, 25497, 49729, 26613,\n 6129, 10592, 11113, 67254, 18201, 4841, 3550, 46309, 57190,\n 14947, 31875, 24907, 4052, 13037, 462, 51448, 28743, 46007,\n 64223, 30868, 18969, 53529, 3725, 42724, 49880, 46338, 15572,\n 67634, 381, 25339, 20432, 14565, 57433, 59856, 30007, 48658,\n 12437, 47137, 34072, 46103, 46546, 43227, 52203, 50181, 24300,\n 13741, 18176, 60822, 46713, 14247, 18816, 6832, 66610, 58613,\n 44994, 51764, 39905, 40675, 4289, 2360, 56650, 7307, 8864,\n 52261, 63667, 15643, 43353, 46174, 33377, 29842, 18536, 63383,\n 8686, 34854, 66626, 8266, 54093, 22061, 51768, 29435, 31802,\n 12706, 10438, 45209, 69584, 49933, 11935, 59046, 70172, 6397,\n 43188, 66113, 52355, 4084, 17606, 41161, 34671, 57422, 34779,\n 11512, 14251, 65827, 43986, 11337, 10026, 34552, 23803, 26081,\n 59320, 23489, 40462, 10369, 11676, 61106, 36636, 51560, 39405,\n 51797, 54272, 51310, 9129, 31436, 21333, 66428, 53802, 42953,\n 51065, 66491, 12535, 14584, 58102, 2630, 31260, 49832, 16300,\n 11508, 12386, 51238, 62763, 13808, 20370, 49737, 53941, 53630,\n 61584, 59050, 40740, 45726, 70721, 61815, 24089, 33101, 45889,\n 45200, 71270, 7195, 51351, 10890, 17919, 23662, 8617, 6697,\n 3740, 63840, 34043, 54723, 20953, 55706, 26928, 4732, 51421,\n 25418, 51201, 65067, 65149, 416, 29808, 35732, 15498, 67384,\n 60513, 23483, 15843, 9341, 62497, 69229, 63687, 16462, 42842,\n 65521, 13976, 50975, 37654, 52559, 957, 12024, 21469, 51915,\n 43385, 60160, 29062, 63317, 21258, 57701, 51261, 51511, 68808,\n 46970, 25199, 43665, 52594, 63949, 30207, 70838, 51005, 17378,\n 15082, 67702, 41009, 36836, 62824, 6406, 36284, 65424, 45465,\n 46665, 40582, 45482, 51509, 64493, 34269, 62159, 34612, 37877,\n 64322, 26916, 68101, 63803, 28624, 70374, 52254, 62898, 708,\n 65366, 33074, 59845, 34699, 14284, 21089, 15248, 49082, 680,\n 923, 39862, 28638, 51808, 62404, 62942, 59913, 13711, 28663,\n 24796, 42558, 65314, 7506, 478, 70790, 62780, 44410, 4913,\n 35419, 50623, 64092, 693, 28533, 12429, 37062, 45702, 10980,\n 10865, 40286, 8359, 54222, 54212, 32477, 801, 16733, 47509,\n 39974, 63130, 42375, 11855, 38630, 13906, 50981, 20567, 23582,\n 12219, 16724, 18962, 57374, 43183, 34705, 25428, 14468, 2592,\n 21684, 15243, 28203, 25282, 51, 65470, 30387, 48735, 69414,\n 14340, 16478, 23817, 41840, 39079, 41637, 22969, 2586, 19337,\n 841, 41366, 22967, 6818, 14375, 39607, 59342, 44827, 24902,\n 46815, 1710, 59996, 55290, 38137, 44703, 34227, 52842, 48884,\n 4633, 15544, 60143, 69962, 18033, 24933, 40759, 64706, 12964,\n 48883, 66801, 21301, 50827, 25346, 25411, 37384, 31119, 43153,\n 29719, 13781, 8629, 36985, 53084, 66036, 49157, 16469, 61158,\n 63243, 44876, 26067, 40602, 4312, 23546, 46053, 1981, 67181,\n 32900, 39736, 67824, 7095, 33974, 3849, 31948, 1683, 52957,\n 31723, 13389, 48384, 35119, 31746, 63326, 50357, 32536, 42978,\n 43799, 55784, 58710, 34333, 55336, 33672, 63025, 3932, 925,\n 10743, 27160, 41813, 21889, 16056, 14480, 19026, 64191, 12945,\n 26231, 10649, 45544, 22077, 53430, 36009, 32153, 54164, 67453,\n 34304, 48477, 56142, 57156, 35080, 22627, 59436, 39852, 35214,\n 7286, 65607, 20132, 8531, 23298, 45387, 28474, 39832, 70325,\n 26679, 29198, 502, 63479, 13966, 29688, 54998, 33007, 21171,\n 14809, 45678, 54810, 16984, 33164, 56245, 57519, 50977, 53137,\n 42655, 60677, 23240, 5364, 148, 54580, 60580, 65878, 42437,\n 66694, 11997, 15012, 15979, 56667, 44648, 10129, 53011, 53055,\n 7393, 18098, 43952, 32759, 29027, 62031, 70798, 49042, 53466,\n 46506, 5327, 27392, 37627, 57858, 62595, 49643, 1286, 42915,\n 51218, 65397, 66755, 12931, 14656, 41595, 58577, 13502, 65617,\n 1196, 32003, 26301, 17623, 53905, 3126, 46057, 25551, 26977,\n 34303, 21194, 59113, 45940, 16847, 41994, 2419, 53932, 43252,\n 16782, 44526, 68386, 66541, 53116, 10283, 21103, 3100, 10998,\n 36311, 55459, 63934, 12767, 10190, 31650, 23653, 7678, 49620,\n 43699, 64853, 45150, 1042, 54142, 36320, 48075, 11886, 36044,\n 40134, 806, 35643, 65828, 53195, 30334, 30772, 35558, 53043,\n 17977, 43231, 13048, 16408, 53154, 53489, 33206, 66184, 7747,\n 59125, 32074, 36832, 47229, 63716, 27325, 31625, 22654, 8411,\n 65557, 62545, 66109, 24223, 17090, 58920, 31120, 51588, 66873,\n 54922, 17899, 65041, 17665, 43731, 45576, 54866, 9848, 57296,\n 322, 62496, 68690, 42150, 14270, 44042, 40278, 244, 13028,\n 5819, 25048, 61022, 21757, 390, 12896, 65388, 20127, 20007,\n 26074, 27055, 20131, 9459, 2136, 11655, 57662, 16051, 16990,\n 35032, 3007, 69347, 23052, 22750, 51634, 6016, 66400, 9577,\n 900, 28484, 41385, 21167, 30764, 16445, 44538, 69593, 33262,\n 66212, 37699, 25332, 28803, 1726, 71115, 4855, 68918, 39380,\n 15521, 24141, 29231, 36250, 11819, 28138, 37160, 28192, 28582,\n 38927, 70323, 49577, 15899, 71283, 21707, 54936, 69202, 62681,\n 35673, 56394, 28129, 62110, 2815, 70202, 30197, 10046, 57997,\n 7790, 23142, 12433, 65136, 25870, 7551, 35418, 62434, 55640,\n 21962, 23162, 69763, 1801, 38542, 20302, 54879, 14616, 49861,\n 68241, 25249, 42468, 62214, 45334, 48385, 70941, 65872, 13292,\n 44492, 18783, 27816, 24631, 33893, 32529, 14698, 38439, 45758,\n 13537, 30128, 48974, 4472, 41222, 12729, 54130, 23212, 32849,\n 27657, 51432, 6544, 15664, 40825, 22067, 56961, 27658, 35677,\n 9050, 53308, 28444, 66240, 3147, 8578, 37667, 786, 66234,\n 1373, 42293, 70615, 56302, 67012, 46806, 21728, 18959, 9196,\n 31799, 57686, 29396, 64420, 49249, 49894, 15682, 22485, 51958,\n 58074, 29937, 28274, 34647, 69324, 5727, 65499, 21703, 54140,\n 58366, 16065, 55172, 24445, 69203, 66504, 19398, 27754, 20744,\n 61547, 63496, 30155, 66171, 63107, 19423, 38592, 60112, 65087,\n 4001, 19325, 29866, 30331, 2745, 31405, 67673, 49558, 66974,\n 5375, 221, 38177, 44583, 65851, 49099, 49866, 12897, 62157,\n 64721, 66569, 61503, 42980, 9007, 17505, 27317, 28262, 28899,\n 35781, 15630, 23844, 47626, 14438, 22809, 46844, 57178, 67494,\n 24787, 48334, 20986, 29697, 2721, 23624, 23595, 22256, 1895,\n 52793, 47710, 49469, 27763, 42848, 42947, 9404, 43498, 8927,\n 69365, 8237, 49302, 49341, 21947, 49899, 47560, 18378, 49869,\n 29333, 5759, 36195, 15497, 71230, 3090, 58120, 26170, 1963,\n 46823, 32478, 5830, 46175, 63302, 39019, 12010, 50278, 11815,\n 68209, 40477, 27892, 18508, 55925, 32036, 18836, 58388, 3933,\n 26637, 35895, 31388, 47565, 1182, 25276, 69056, 57397, 60267,\n 29625, 5452, 36698, 16010, 53258, 6714, 57337, 67070, 1367,\n 30145, 6733, 9637, 14665, 53349, 29927, 54913, 56895, 6830,\n 53744, 998, 37621, 51326, 18076, 31188, 61725, 34129, 61966,\n 67735, 21315, 61896, 10281, 63739, 7672, 63454, 69668, 56116,\n 66314, 37089, 52043, 10168, 42304, 46194, 42571, 14471, 36082,\n 67812, 37936, 55141, 356, 57461, 3792, 65962, 65663, 58371,\n 5393, 10948, 33428, 39225, 58311, 14232, 60884, 58856, 54117,\n 14662, 61727, 28939, 49121, 44673, 26312, 14792, 15436, 1826,\n 32704, 46716, 53703, 62361, 55616, 5667, 51419, 70870, 62035,\n 28270, 38938, 12069, 19530, 15860, 55608, 15197, 10736, 15621,\n 48386, 23789, 27924, 63441, 37173, 31216, 11076, 61099, 6865,\n 29532, 38215, 68482, 449, 49932, 52281, 14835, 3476, 42416,\n 44728, 46539, 20416, 20619, 57326, 56980, 36635, 5026, 51676,\n 6554, 63963, 67647, 62571, 55284, 8779, 19077, 58635, 12137,\n 25000, 35370, 41698, 69487, 43789, 39627, 49381, 45609, 28925,\n 15299, 10264, 405, 20738, 67583, 57852, 17982, 29312, 284,\n 54360, 55272, 60045, 49609, 43069, 18005, 36525, 15439, 47900,\n 70381, 51130, 12482, 37360, 68138, 19692, 14187, 37597, 29963,\n 717, 22292, 29043, 24405, 15376, 18315, 43416, 59321, 22566,\n 3, 38282, 46052, 7072, 7395, 15413, 4265, 69861, 45219,\n 57054, 16062, 27114, 4769, 69989, 48952, 6061, 15963, 58640,\n 31285, 26864, 48002, 5660, 28024, 60025, 67326, 57115, 19202,\n 64985, 43350, 60790, 71384, 2264, 69677, 3866, 62155, 65167,\n 57431, 54725, 55061, 58176, 29320, 52115, 66447, 45703, 14239,\n 38328, 26621, 12633, 49179, 40804, 66695, 68868, 27518, 17230,\n 31800, 30167, 25245, 34769, 889, 43203, 38032, 48812, 8034,\n 60089, 24474, 68945, 30951, 45818, 6052, 17476, 44052, 2334,\n 29437, 34829, 2873, 37714, 16594, 26940, 52496, 45770, 7328,\n 66005, 25068, 22083, 50714, 41845, 41003, 31716, 24559, 8993,\n 21014, 45290, 40458, 10342, 59995, 26447, 54473, 37826, 67607,\n 60246, 38993, 56048, 58189, 15996, 37880, 55563, 40413, 32318,\n 42390, 7472, 42282, 28216, 39230, 27157, 8105, 4234, 62330,\n 3512, 42333, 36062, 30614, 7386, 59620, 55285, 61531, 37333,\n 21881, 33196, 57343, 66837, 41916, 4618, 58339, 59936, 6655,\n 39827, 64550, 3393, 6042, 23612, 40542, 37625, 39207, 45968,\n 53277, 61974, 36688, 8932, 52856, 39144, 46601, 20449, 45202,\n 4675, 70602, 22879, 63470, 48252, 8784, 50983, 19899, 29906,\n 33964, 48533, 46887, 1123, 40705, 31353, 28944, 52574, 18807,\n 64869, 37536, 57775, 4112, 68371, 49416, 66149, 22289, 49526,\n 67197, 27485, 20770, 53798, 23019, 10231, 41160, 45346, 41402,\n 54247, 46073, 52537, 19969, 12567, 34587, 33841, 16496, 35237,\n 48507, 51973, 12294, 28222, 26278, 67028, 12172, 2514, 34313,\n 41184, 52317, 65931, 44067, 70234, 52460, 9104, 63427, 14119,\n 42372, 56787, 9800, 20425, 63892, 49284, 61366, 64799, 17983,\n 61082, 36680, 56817, 53811, 44384, 32986, 48597, 3666, 64781,\n 24726, 41666, 17705, 29382, 49139, 52918, 21682, 40687, 64671,\n 68188, 32196, 42135, 25805, 378, 12569, 39294, 66373, 69877,\n 50351, 17078, 59921, 65537, 37824, 9215, 63915, 20796, 70062,\n 50732, 6992, 11705, 45878, 55763, 56028, 57543, 60664, 11377,\n 10625, 54971, 61713, 46489, 59947, 28494, 66261, 99, 49791,\n 31194, 38157, 41256, 27191, 56957, 50670, 12802, 44188, 42178,\n 58570, 57129, 67999, 26189, 53373, 37850, 68988, 28844, 7937,\n 11429, 26976, 64175, 44950, 4210, 67545, 38852, 57865, 58870,\n 14099, 54203, 46395, 5967, 57473, 30366, 54924, 18472, 52968,\n 40708, 68992, 62534, 58781, 8562, 70190, 23670, 71101, 26416,\n 65682, 16934, 38319, 40698, 23584, 57336, 23805, 66194, 20866,\n 30210, 64005, 51625, 37992, 71105, 19246, 21419, 51875, 29533,\n 64857, 66074, 29073, 6669, 911, 51682, 54039, 227, 44382,\n 68607, 68666, 2331, 20166, 14437, 24145, 48647, 9667, 64487,\n 10454, 31341, 52601, 7298, 64652, 29232, 14650, 11194, 43912,\n 59098, 55520, 23606, 27952, 27714, 46252, 52801, 29148, 40205,\n 14747, 48770, 41358, 383, 10109, 25734, 65318, 38715, 776,\n 21547, 18733, 17320, 41034, 28246, 20164, 48008, 27311, 17198,\n 12014, 42240, 4652, 43522, 49158, 3875, 57191, 11154, 31291,\n 54974, 2570, 909, 16211, 64433, 30061, 40117, 57263, 9151,\n 1395, 66536, 22351, 6311, 70688, 2614, 27478, 37290, 54917,\n 29797, 29662, 26957, 64738, 44505, 69058, 51604, 61384, 17269,\n 39871, 15730, 12454, 33395, 41726, 9430, 13981, 29909, 2279,\n 57734, 8303, 57218, 19147, 53882, 59440, 70937, 48222, 2656,\n 55809, 43073, 9444, 65977, 48686, 16369, 59288, 20524, 22055,\n 60415, 70053, 57695, 60207, 5712, 63653, 36140, 46738, 8699,\n 15151, 18595, 7020, 32583, 1176, 25472, 35589, 50124, 64040,\n 54168, 70662, 45827, 20740, 69634, 50679, 25132, 64539, 6770,\n 63906, 12959, 55989, 6220, 47768, 58284, 52829, 6658, 1899,\n 9562, 24862, 67863, 5720, 14335, 17626, 13369, 9057, 70530,\n 9314, 38354, 53300, 10304, 62761, 16672, 51423, 22211, 63869,\n 30721, 5669, 49473, 54899, 64517, 11497, 67455, 6869, 65393,\n 9449, 60418, 35449, 13300, 49695, 43633, 39441, 56670, 33746,\n 5054, 22898, 28182, 59697, 71258, 63010, 7104, 4468, 61740,\n 17091, 53921, 59033, 17871, 5391, 16920, 1897, 69598, 56457,\n 67611, 17372, 53731, 50715, 32964, 53014, 56079, 28667, 12023,\n 31784, 43555, 16406, 64770, 17950, 40183, 52229, 46591, 6165,\n 14496, 42456, 69426, 32723, 65793, 3917, 38837, 68199, 41631,\n 18156, 32263, 27002, 65733, 14803, 2502, 48027, 40774, 6782,\n 58523, 49552, 57109, 38614, 49999, 68695, 7417, 21173, 49971,\n 57200, 55569, 17070, 61735, 50153, 66727, 13676, 68093, 25279,\n 64568, 58762, 7052, 25225, 8560, 15329, 11268, 16091, 54407,\n 661, 12746, 68645, 9394, 28824]), 'Q': array([ 7524, 32330, 22060, 17190, 6328, 10287, 4533, 14947, 14077,\n 20386, 33624, 23827, 20677, 26181, 11737, 6154, 30227, 27605,\n 24248, 16515, 4880, 22464, 36042, 7672, 31998, 23816, 17650,\n 12692, 30862, 36872, 23047, 10295, 32689, 10964, 982, 12766,\n 28401, 27239, 25394, 32143, 31590, 35043, 12121, 22327, 6327,\n 33542, 35735, 6785, 36806, 33605, 809, 21685, 21126, 30791,\n 12410, 5254, 28692, 29541, 30219, 26560, 31098, 13728, 1845,\n 6468, 23576, 28990, 36033, 16640, 5809, 25174, 18263, 13737,\n 5023, 991, 13588, 20818, 13343, 12629, 3154, 16979, 34897,\n 5593, 23308, 31688, 35939, 26713, 19635, 25538, 19642, 5566,\n 3337, 4743, 7711, 24165, 18777, 2679, 3245, 28731, 23177,\n 2317, 17435, 37182, 3738, 29460, 21314, 318, 26528, 34222,\n 13786, 2726, 19229, 15462, 8492, 8665, 27744, 11998, 13283,\n 8597, 5688, 20156, 29735, 32762, 28761, 32239, 9262, 9961,\n 7203, 1305, 31191, 30521, 4302, 28334, 21217, 17816, 10677,\n 34989, 11763, 7085, 5019, 30545, 10513, 25188, 28216, 11214,\n 19691, 13985, 21153, 3550, 22718, 19332, 12142, 25952, 25683,\n 28653, 35522, 6353, 3378, 6678, 13079, 15532, 21958, 32194,\n 1768, 26114, 20177, 12907, 6911, 11663, 37194, 13945, 37724,\n 18413, 20804, 36563, 17138, 23323, 5241, 17234, 12727, 21131,\n 19267, 8840, 23388, 7677, 7810, 20317, 9561, 35397, 24017,\n 26102, 18867, 8818, 15287, 23291, 14389, 12945, 8031, 11915,\n 3192, 21640, 4494, 16329, 13044, 28735, 828, 22754, 4692,\n 17786, 27738, 1277, 2829, 37331, 7000, 16749, 32485, 24673,\n 32089, 3160, 997, 26564, 4985, 2199, 26984, 23448, 25244,\n 31531, 4146, 18349, 19429, 32576, 2930, 20540, 35609, 35818,\n 477, 35334, 5889, 27120, 29738, 5051, 4131, 23541, 13499,\n 15179, 21492, 31995, 24982, 1833, 371, 11836, 25084, 30362,\n 11703, 30052, 36487, 27702, 2612, 21768, 28959, 24542, 31916,\n 37153, 19089, 11322, 15564, 12899, 5722, 37327, 23214, 25479,\n 425, 31258, 30587, 6244, 22616, 116, 36120, 20928, 4876,\n 25747, 28022, 3408, 17758, 33962, 22850, 29377, 11281, 26556,\n 34007, 8029, 27374, 26930, 14971, 33176, 26782, 28746, 7342,\n 6253, 14793, 22685, 11753, 16853, 31525, 23598, 12897, 22798,\n 6418, 4561, 18076, 4206, 34090, 30141, 20110, 7073, 24886,\n 36862, 24000, 36364, 6308, 6919, 30517, 16981, 33646, 16570,\n 17865, 37620, 19569, 11326, 4903, 4657, 1348, 31505, 4882,\n 15500, 22378, 8451, 21436, 28116, 11399, 15981, 30055, 22473,\n 17591, 19920, 27887, 19655, 17819, 11091, 19972, 21080, 16903,\n 2580, 22526, 12681, 25235, 19305, 33434, 35365, 22755, 32180,\n 23527, 29811, 1298, 18937, 35894, 24821, 29413, 29201, 17613,\n 19560, 5446, 26484, 19574, 7930, 7269, 6830, 22619, 17559,\n 7253, 18562, 10588, 11614, 36877, 19743, 10558, 30868, 36646,\n 24039, 10826, 2714, 18571, 31398, 36910, 22649, 10369, 7673,\n 11231, 1395, 28087, 18436, 417, 6902, 14651, 629, 21648,\n 7284, 1763, 29214, 6098, 26112, 11099, 20811, 8568, 7822,\n 32054, 33060, 29160, 36955, 11234, 2263, 19062, 18881, 5571,\n 19131, 9575, 11526, 5950, 34678, 6793, 37752, 21272, 28245,\n 13722, 1732, 34538, 28119, 22236, 3910, 2260, 5517, 31799,\n 392, 21961, 3392, 2344, 5248, 29957, 11386, 1962, 3335,\n 17471, 11405, 20439, 11419, 12356, 29894, 2584, 23269, 25866,\n 34377, 13286, 19461, 4366, 8576, 7722, 7614, 19496, 14593,\n 36677, 11254, 32966, 889, 9, 13066, 37361, 37673, 27314,\n 14435, 4162, 31778, 5437, 28049, 18678, 34022, 17505, 21714,\n 27491, 31323, 34734, 22400, 29164, 6514, 27894, 7062, 4987,\n 1402, 27776, 1567, 14504, 7367, 35995, 36858, 31597, 13778,\n 13280, 35339, 15276, 4072, 19586, 1322, 24451, 32457, 26985,\n 13376, 9416, 28121, 18004, 36349, 28201, 29655, 5685, 18106,\n 31020, 33594, 15328, 16664, 30320, 25460, 2007, 4122, 37564,\n 31053, 11871, 37289, 17677, 26096, 33368, 23185, 18958, 11599,\n 29550, 35873, 36795, 9703, 9155, 11957, 30438, 35942, 14275,\n 7450, 24508, 29434, 9136, 28368, 2805, 22301, 2196, 2453,\n 4039, 35096, 30185, 27715, 24288, 27967, 16203, 30864, 24333,\n 18849, 25309, 585, 10056, 1075, 10930, 1387, 4859, 17721,\n 13338, 24115, 22089, 1782, 19725, 4187, 7479, 32908, 36993,\n 7317, 27619, 8215, 13160, 20247, 34430, 7955, 18973, 16184,\n 30234, 34902, 35224, 8594, 2730, 2769, 1548, 33692, 15306,\n 12418, 22948, 14032, 1060, 26022, 20892, 21903, 29934, 28531,\n 2599, 28027, 37273, 29202, 19406, 5883, 36489, 37654, 678,\n 33430, 32852, 34739, 30964, 11204, 25705, 22819, 19790, 31694,\n 14338, 34965, 10668, 8863, 21527, 22912, 34817, 29649, 9312,\n 24221, 32851, 10855, 5190, 3090, 35227, 29196, 26999, 32063,\n 837, 12694, 14804, 33106, 33832, 20467, 10615, 3430, 6653,\n 805, 18040, 30718, 14000, 17127, 16952, 12374, 6386, 32111,\n 5497, 9340, 14917, 32881, 25110, 18778, 33820, 27922, 24454,\n 7649, 29701, 4863, 29124, 29896, 13647, 16201, 32311, 26612,\n 3912, 8281, 10624, 8005, 21082, 35259, 29218, 10086, 7950,\n 28675, 1327, 129, 20815, 4311, 16252, 1813, 22196, 7836,\n 26494, 24488, 20929, 17342, 5701, 10517, 30025, 18541, 33074,\n 5774, 29249, 15857, 35510, 19408, 5346, 25448, 6252, 24854,\n 15464, 6075, 21180, 24541, 18903, 7558, 10614, 5213, 29960,\n 9148, 29356, 29300, 10879, 16035, 31461, 31701, 30132, 22716,\n 29360, 16944, 15813, 20623, 4576, 22408, 30502, 28942, 14222,\n 17689, 31925, 26217, 4060, 35254, 5597, 4205, 21637, 6827,\n 2987, 35299, 29610, 7413, 17753, 13322, 25791, 5542, 26500,\n 33079, 8131, 19119, 37256, 24899, 22315, 11066, 26338, 36559,\n 18826, 3791, 23483, 15167, 37728, 28402, 8756, 37323, 37119,\n 31223, 23114, 10932, 19637, 3232, 15764, 12765, 4719, 11853,\n 20652, 13333, 24810, 24624, 15746, 19551, 22213, 27725, 25695,\n 35804, 16968, 10809, 14970, 22965, 8514, 14412, 9484, 27617,\n 25795, 34303, 5272, 5669, 13423, 16864, 34786, 30753, 34887,\n 15949, 7920, 7808, 7924, 18364, 7331, 3887, 20076, 400,\n 34116, 31613, 21226, 11454, 11343, 9114, 4574, 7684, 31639,\n 1873, 25075, 11002, 23466, 13808, 27550, 4552, 33425, 25626,\n 21162, 30317, 12106, 20821, 27945, 4413, 29076, 12611, 19226,\n 4824, 9300, 16202, 37773, 6257, 12819, 4168, 15309, 18740,\n 23989, 21747, 7542, 11388, 32679, 6026, 14174, 19610, 9229,\n 2833, 11039, 6542, 33847, 35665, 27582, 515, 16400, 16005,\n 3965, 2217, 37531, 17882, 24398, 34987, 17243, 36685, 22826,\n 8795, 16217, 2684, 750, 16034, 20932, 12934, 3753, 34320,\n 16489, 10623, 7466, 3992, 11818, 34622, 12554, 18465, 15553,\n 14422, 17616, 14930, 8161, 22240, 12398, 7858, 1450, 22812,\n 3097, 16227, 30977, 31357, 12001, 33085, 11725, 26026, 20989,\n 28812, 20188, 18805, 81, 7535, 5001, 35122, 27189, 34274,\n 11692, 8527, 7830, 15402, 25617, 18608, 9915, 37651, 29347,\n 34033, 10965, 9288, 25754, 8512, 16558, 6965, 34123, 10400,\n 14806, 17608, 36754, 23621, 27188, 19019, 10248, 33960, 9227,\n 11034, 18833, 21477, 15108, 18754, 14794, 18565, 15172, 15311,\n 36246, 25484, 19836, 14446, 31259, 3935, 7644, 16199, 24173,\n 902, 33486, 18748, 14051, 6693, 25349, 6943, 8288, 2176,\n 32037, 1793, 12990, 37260, 8765, 33231, 14081, 8363, 35230,\n 29158, 5175, 36617, 16254, 29900, 26246, 13469, 6680, 10455,\n 4964, 7052, 18329, 5229, 31632, 13662, 27126, 16154, 22642,\n 36020, 8585, 37726, 2191, 26320, 7704, 7570, 34799, 26191,\n 13238, 29521, 36297, 6983, 10854, 30777, 36787, 6366, 22237,\n 6348, 13937, 15275, 11704, 12505, 11959, 20723, 22690, 30394,\n 27037, 2057, 36544, 5138, 12984, 2153, 27246, 33768, 32535,\n 23537, 4351, 34681, 11637, 32752, 2100, 3635, 30754, 8237,\n 8151, 1226, 21101, 29055, 19086, 37410, 5216, 1561, 31896,\n 7470, 24105, 31467, 26712, 3727, 37348, 21355, 34512, 28999,\n 25062, 36626, 23990, 25515, 487, 29023, 24443, 16739, 20906,\n 6797, 31691, 7239, 19944, 24281, 5969, 26878, 28474, 12889,\n 20391, 29515, 24575, 4244, 28426, 34948, 25377, 30373, 8647,\n 4814, 25059, 18293, 32756, 17780, 27472, 34473, 8285, 20388,\n 18619, 37158, 7936, 18340, 31535, 26281, 5057, 16471, 11941,\n 27624, 19011, 22661, 24153, 5028, 24666, 18292, 20468, 1687,\n 5981, 11353, 4504, 17927, 10598, 6136, 27591, 5510, 9531,\n 1333, 6355, 10551, 36794, 7271, 8027, 34878, 30082, 24184,\n 25466, 31567, 1258, 2108, 16973, 22069, 32225, 9680, 8657,\n 5309, 37632, 16920, 2183, 18566, 8227, 353, 37335, 28924,\n 7372, 25450, 23755, 21704, 46, 28443, 6628, 7590, 5083,\n 19873, 12987, 18889, 6082, 13519, 13294, 14017, 4889, 33100,\n 1924, 26350, 19859, 5582, 23042, 25553, 26444, 26314, 7235,\n 26411, 19971, 26337, 29388, 26172, 11122, 18750, 32878, 35337,\n 25975, 10750, 35781, 3802, 26100, 13193, 18538, 29114, 25373,\n 6416, 1356, 9821, 28936, 1854, 28736, 28192, 3462, 22107,\n 17902, 19051, 9277, 4597, 16610, 5977, 1005, 5803, 26185,\n 21325, 37042, 32052, 18857, 29058, 37665, 21896, 33781, 17563,\n 2464, 34747, 16855, 29512, 30480, 11249, 14972, 15065, 23019,\n 35177, 5228, 15391, 22957, 18414, 30110, 28446, 27739, 14418,\n 14755, 4321, 22562, 34057, 4612, 3674, 37304, 20344, 6716,\n 14091, 34220, 998, 25821, 32297, 35286, 34814, 36611, 37046,\n 36510, 33755, 22813, 1031, 6027, 33752, 25099, 32262, 17158,\n 27451, 890, 31541, 34395, 37434, 17033, 3118, 8571, 20627,\n 22507, 4723, 34726, 7076, 29545, 3409, 3404, 1139, 4130,\n 31401, 33506, 34840, 35178, 35937, 208, 28219, 20761, 20851,\n 21256, 22175, 9031, 6831, 8185, 13301, 29204, 16700, 15942,\n 30144, 11755, 22759, 34735, 23932, 33416, 22771, 13920, 31548,\n 24812, 19778, 3428, 6876, 29853, 27525, 4714, 17988, 8802,\n 22565, 28802, 22605, 15799, 15070, 7267, 15827, 28811, 35839,\n 14379, 30625, 5350, 706, 16026, 32200, 13419, 26853, 5808,\n 34991, 29614, 30160, 28509, 3031, 9129, 28379, 6245, 15922,\n 8776, 33985, 16612, 23384, 27365, 3963, 18917, 16535, 32725,\n 28085, 12260, 6501, 11420, 15622, 27520, 10242, 14624, 37679,\n 7821, 28032, 7007, 15808, 24080, 26428, 3395, 31699, 5711,\n 27250, 28303, 35137, 17501, 19645, 12240, 35302, 6849, 19730,\n 4855, 29309, 16717, 21591, 5008, 33270, 23931, 33588, 22365,\n 37570, 11744, 24794, 5899, 7393, 31738, 5708, 4767, 19241,\n 37486, 33972, 23127, 26076, 7204, 29517, 14047, 16299, 30098,\n 18584, 26438, 6228, 24932, 34635, 19791, 16874, 26997, 2085,\n 4952, 27580, 4927, 5719, 20120, 24394, 10804, 16279, 6865,\n 13899, 4338, 4653, 26850, 33449, 32280, 13630, 27847, 6203,\n 30020, 27384, 27494, 19990, 5698, 20441, 26928, 18593, 28707,\n 24, 9080, 3894, 28745, 14878, 34939, 18472, 17997, 16214,\n 37113, 6229, 24402, 23614, 19046, 36876, 27571, 6459, 14563,\n 15515, 31062, 13070, 8957, 22942, 2556, 8584, 30519, 4542,\n 34968, 12468, 16055, 33485, 8661, 27260, 14139, 939, 15890,\n 9086, 13192, 24425, 3463, 10838, 16092, 33734, 37734, 33380,\n 12113, 12864, 36490, 8979, 17673, 1906, 25224, 13639, 32192,\n 8077, 20581, 35283, 25220, 13712, 28108, 32501, 12835, 17279,\n 3240, 33345, 36455, 18081, 20256, 32216, 24939, 22435, 13993,\n 18319, 7234, 25305, 6807, 15829, 3307, 21531, 21930, 31776,\n 22352, 34064, 37338, 35659, 891, 8983, 12502, 6758, 16764,\n 7153, 8835, 20799, 35717, 22877, 35810, 7574, 25785, 19748,\n 36553, 2595, 37405, 26702, 27874, 23529, 10050, 30771, 36792,\n 5268, 12862, 36210, 10013, 7777, 20068, 10082, 4232, 12028,\n 9683, 8400, 8336, 73, 24057, 27611, 27763, 31055, 17440,\n 37308, 35677, 35619, 8487, 12561, 37258, 31942, 30131, 7438,\n 3857, 19494, 2686, 7778, 3142, 787, 10819, 13307, 20262,\n 17887, 1173, 9559, 5207, 26926, 14466, 9503, 6205, 23587,\n 9376, 22325, 27761, 5585, 17135, 7248, 12135, 404, 36337,\n 36928, 34869, 6150, 28970, 2571, 20168, 11324, 22430, 23081,\n 34736, 27833, 29796, 11323, 11033, 18820, 29229, 22271, 19480,\n 20984, 22101, 31234, 19896, 28097, 30893, 11401, 35655, 7885,\n 13313, 6544, 2256, 29798, 22273, 15439, 21529, 20571, 9529,\n 13055, 13969, 214, 7295, 11643, 22136, 25989, 25642, 18020,\n 35153, 16594, 19177, 30074, 27279, 6337, 33204, 18582, 9279,\n 6180, 1657, 11750, 15673, 9689, 18812, 31831, 12820, 3908,\n 16880, 17541, 4894, 36838, 14391, 7565, 3966, 20738, 3741,\n 16659, 3305, 18327, 8440, 4658, 558, 31043, 2448, 189,\n 34409, 5035, 16538, 6858, 19968, 11842, 26426, 1961, 11940,\n 14125, 12052, 32834, 34045, 29505, 10239, 28397, 18476, 23139,\n 11028, 35724, 12789, 11044, 32516, 37188, 14890, 24957, 18570,\n 37611, 22548, 233, 25938, 14795, 34677, 23161, 1472, 34634,\n 35694, 9434, 30512, 11040, 28770, 10286, 28864, 630, 25658,\n 33648, 13347, 23856, 29126, 5836, 17293, 17371, 4369, 33301,\n 21255, 10278, 13525, 20172, 23218, 37237, 19170, 28522, 1556,\n 9311, 25355, 19706, 16785, 19544, 19589, 30420, 8347, 35997,\n 37661, 11104, 29839, 2501, 22016, 33570, 8608, 27687, 22635,\n 3620, 31771, 33190, 4616, 4059, 28668, 28529, 16209, 730,\n 34788, 31803, 6, 21846, 36098, 5866, 13127, 17981, 6671,\n 33517, 21448, 28874, 14155, 21869, 26714, 16607, 28684, 17509,\n 15709, 7209, 16037, 22653, 5973, 18298, 29557, 13235, 16882,\n 34188, 27596, 21468, 19057, 17519, 23641, 25950, 13457, 11685,\n 277, 18498, 15450, 35068, 21900, 17474, 6908, 5501, 15387,\n 21070, 35569, 5444, 30422, 10196, 32764, 27621, 11962, 35249,\n 20130, 4806, 35110, 31310, 35602, 33937, 9694, 15672, 24450,\n 25953, 6415, 19063, 25820, 3696, 4893, 4704, 16546, 12273,\n 16127, 22288, 34239, 36003, 17370, 37641, 5758, 28001, 1756,\n 23574, 2637, 9093, 6601, 690, 37514, 2746, 12668, 24625,\n 8606, 9847, 8653, 9319, 35750, 23826, 2006, 20580, 26034,\n 2277, 26268, 31786, 25388, 21847, 13202, 3648, 7384, 11783,\n 4589, 29220, 37468, 29981, 28899, 23297, 56, 8858, 1478,\n 18425, 14735, 14215, 30137, 2852, 5115, 34215, 5757, 37701,\n 35719, 12480, 16365, 8443, 22393, 22643, 16998, 14621, 34661,\n 6042, 13935, 33116, 27729, 11981, 16984, 17645, 10097, 31775,\n 33887, 36220, 24909, 14303, 6254, 37020, 25132, 17443, 25955,\n 17623, 36599, 6398, 32199, 2435, 22773, 23792, 35006, 1189,\n 33386, 28214, 37029, 33216, 8218, 37163, 3828, 21593, 37698,\n 11145, 6099, 18771, 11031, 6188, 1685, 30640, 31156, 29800,\n 34502, 17761, 12464, 29262, 11406, 33509, 23901, 10919, 18983,\n 14121, 14722, 6482, 37264, 6582, 13190, 37753, 27831, 32332,\n 471, 35604, 11361, 19613, 34742, 21469, 28535, 33317, 8221,\n 31939, 35915, 15053, 26547, 20031, 18154, 18157, 24665, 35296,\n 2643, 24729, 25374, 5902, 1106, 8930, 5217, 18615, 3004,\n 17602, 9968, 31522, 895, 26287, 37365, 12094, 30592, 5919,\n 9908, 28383, 32581, 35022, 19923, 18102, 20780, 11136, 4145,\n 17130, 37610, 12419, 23172, 26732, 9755, 6821, 12027, 25944,\n 786, 30018, 3577, 15145, 36201, 24276, 9738, 7686, 20430,\n 28431, 3896, 27027, 13392, 11266, 22245, 34270, 17711, 33548,\n 29138, 36658, 1030, 2551, 30034, 4046, 26539, 13414, 17659,\n 6891, 31288, 27140, 16696, 36413, 32374, 26876, 15830, 16779,\n 11566, 1432, 1362, 9606, 24824, 27949, 3479, 20210, 14344,\n 26938, 35256, 23790, 6003, 36722, 17376, 20054, 9887, 34264,\n 181, 11351, 20414, 37271, 4138, 30701, 19367, 12548, 30904,\n 4777, 31322, 22509, 30823, 21134, 16905, 27618, 7775, 30453,\n 32930, 403, 3458, 24321, 15917, 26943, 3923, 12147, 5785,\n 6138, 37520, 29094, 1406, 34024, 29193, 6872, 33670, 36706,\n 6739, 18671, 25904, 7563, 20772, 7392, 10921, 24705, 27153,\n 37282, 28778, 117, 372, 35263, 24668, 2570, 23779, 27348,\n 6594, 9981, 26944, 1289, 22416, 11539, 11181, 17814, 12913,\n 25729, 12831, 10463, 18453, 31283, 11966, 13401, 19593, 27291,\n 18412, 37267, 9173, 17498, 12682, 5387, 9631, 10544, 31157,\n 1121, 37005, 13972, 16834, 28044, 9382, 29845, 27252, 24822,\n 8094, 21732, 25322, 28575, 30096, 34498, 14307, 11160, 3120,\n 14737, 18259, 30329, 26390, 31230, 26245, 25163, 12717, 6843,\n 16582, 35690, 37671, 23292, 26924, 145, 4266, 22878, 23199,\n 28868, 2458, 9865, 16113, 35537, 12529, 31641, 12155, 8397,\n 29552, 5564, 4687, 9377, 32872, 2280, 25191, 24345, 35738,\n 36667, 2863, 36183, 21596, 10914, 821, 25528, 10805, 37542,\n 1037, 14620, 30572, 27052, 10592, 34108, 8040, 24421, 24420,\n 36892, 21344, 33215, 2498, 27391, 5176, 27381, 26692, 5638,\n 20947, 34314, 12090, 20566, 1400, 7876, 104, 4799, 35106,\n 5834, 11147, 17073, 18459, 27012, 30400, 29922, 12296, 34706,\n 13444, 21262, 6256, 3311, 36757, 37013, 1156, 10184, 15507,\n 25143, 19704, 9071, 2087, 18967, 12474, 20023, 19109, 8779,\n 28872, 24933, 15399, 22340, 13224, 25542, 8980, 1006, 13018,\n 26345, 20113, 10244, 26068, 23394, 25730, 17525, 7807, 30332,\n 35260, 11094, 24246, 3134, 2809, 29653, 30036, 17129, 22780,\n 20480, 24191, 8687, 1066, 12184, 12057, 7106, 35584, 30952,\n 10317, 15858, 29805, 10900, 37045, 31571, 1773, 37047, 24643,\n 34362, 33905, 39, 25780, 32032, 4511, 33899, 14112, 18878,\n 27371, 4111, 16529, 6497, 11656, 6434, 25856, 7831, 15183,\n 12882, 7016, 28991, 37561, 35506, 4582, 16846, 7031, 11223,\n 32789, 34775, 14066, 15733, 23494, 30327, 5626, 31665, 23843,\n 26318, 12341, 478, 11834, 5530, 10162, 33508, 31050, 6701,\n 1796, 5781, 2318, 30928, 23579, 16453, 23612, 10701, 28937,\n 37413, 3690, 24989, 23758, 25816, 24376, 9965, 9177, 33958,\n 26957, 20574, 2383, 20044, 14239, 15191, 35950, 32915, 21428,\n 2675, 20037, 4782, 26751, 36723, 3515, 13917, 32425, 5559,\n 28873, 34389, 1942, 17750, 23740, 7579, 16052, 28545, 37639,\n 36004, 17383, 9138, 23839, 15517, 13960, 14217, 1715, 33818,\n 37683, 36207, 3960, 16828, 15617, 22884, 36236, 35648, 34899,\n 31822, 27347, 33194, 22009, 18398, 6408, 5534, 11958, 34933,\n 13746, 8866, 33908, 20300, 28724, 5619, 25175, 15215, 4465,\n 4453, 29493, 15519, 16308, 29234, 33710, 29643, 11490, 15080,\n 33984, 9216, 28224, 17853, 643, 1495, 7115, 34491, 11796,\n 1446, 19558, 23638, 21000, 14401, 32696, 23707, 14416, 27053,\n 32943, 11279, 3563, 33848, 30687, 24759, 25701, 22209, 11192,\n 35200, 7832, 26137, 12714, 12189, 22466, 2324, 2388, 6899,\n 16895, 30957, 20305, 14425, 21198, 4496, 15252, 20471, 19948,\n 34490, 9045, 31911, 8560, 37796, 37792, 7083, 29035, 10573,\n 4419, 19360, 25392, 26133, 22746, 2565, 3711, 24120, 3497,\n 36332, 15590, 18680, 12445, 3716, 5830, 14997, 34906, 13939,\n 3517, 7151, 14352, 11015, 27843, 13139, 139, 37043, 4195,\n 11129, 21515, 24315, 6687, 37437, 30093, 23067, 7328, 24399,\n 28905, 23355, 13558, 29242, 8694, 23693, 33554, 20564, 10773,\n 14367, 27700, 30807, 5311, 26509, 3418, 35264, 14106, 26525,\n 19132, 9858, 36317, 16014, 18535, 4271, 1105, 28358, 1041,\n 32729, 20129, 30616, 11657, 1222, 24733, 3761, 32596, 33401,\n 1428, 36311, 28561, 19125, 14073, 24636, 31110, 22638, 30775,\n 36649, 36041, 17891, 7352, 10032, 27431, 36978, 6801, 12526,\n 23054, 4491, 36075, 9901, 32395, 28695, 33447, 20223, 25364,\n 6097, 6403, 16827, 26685, 2337, 26125, 6311, 32690, 17018,\n 27474, 3707, 34921, 14473, 11726, 18839, 30222, 13357, 24828,\n 6917, 22786, 2319, 34508, 34974, 36465, 495, 35336, 2271,\n 3847, 5060, 36323, 23022, 31439, 14093, 32315, 35009, 32695,\n 25660, 31631, 31510, 11476, 26988, 33546, 22567, 25605, 26942,\n 5342, 22369, 5847, 7458, 4399, 3, 28861, 18109, 31973,\n 21981, 29235, 21519, 7254, 28637, 22415, 35964, 10067, 11991,\n 16792, 21976, 13490, 23600, 28648, 12182, 2765, 5933, 26144,\n 29739, 17330, 6113, 34061, 15401, 10118, 33799, 9508, 28069,\n 2721, 12015, 32536, 28045, 24830, 24645, 25835, 11697, 27929,\n 37711, 20896, 1626, 7915, 5957, 35821, 20663, 32027, 11467,\n 1391, 35040, 34605, 18886, 16208, 8789, 15203, 2623, 19553,\n 22656, 27629, 23571, 32624, 14481, 30017, 596, 21324, 10384,\n 30122, 25291, 13894, 12417, 1253, 8508, 28574, 2055, 11870,\n 26375, 6198, 36552, 31898, 29594, 23007, 21235, 34391, 25703,\n 6507, 6486, 16372, 32544, 13512, 34464, 2694, 33995, 7690,\n 37199, 9701, 16802, 18568, 14910, 27337, 12164, 7447, 29962,\n 6938, 17117, 489, 35503, 10493, 5504, 20830, 31216, 20034,\n 3229, 16724, 9523, 17670, 1581, 31201, 8939, 15905, 7376,\n 13590, 37326, 18010, 36056, 28775, 32629, 29554, 19789, 36009,\n 9647, 35819, 5494, 13536, 36191, 37228, 17939, 22659, 18638,\n 36662, 1725, 10221, 15979, 25304, 17660, 21200, 1647, 13719,\n 26239, 21293, 13381, 14301, 15888, 27417, 20465, 18691, 18724,\n 5731, 9792, 35949, 2322, 7795, 17057, 5031, 16306, 26394,\n 5199, 14649, 16732, 8425, 10989, 5911, 9678, 8778, 18171,\n 28721, 29430, 33627, 6239, 8890, 11801, 31416, 1574, 12110,\n 20793, 20937, 20783, 8748, 27770, 15754, 12971, 20941, 16509,\n 16969, 15918, 26427, 19818, 30232, 37064, 20160, 21412, 5518,\n 28168, 8754, 15094, 22395, 29622, 31301, 15221, 6854, 27532,\n 33058, 19306, 929, 6945, 16809, 13221, 20339, 7608, 21188,\n 22290, 16751, 31177, 30097, 1062, 29559, 4150, 15889, 9005,\n 25056, 15562, 21918, 11143, 34413, 35419, 34963, 21652, 6209,\n 29528, 6058, 21577, 14741, 28352, 17292, 28278, 9791, 20294,\n 37155, 14005, 18138, 14896, 20505, 5278, 2815, 36128, 17937,\n 5512, 10952, 7441, 25504, 6121, 2279, 14747, 36085, 33126,\n 22248, 34153, 12928, 36804, 4182, 5923, 34880, 6462, 5949,\n 33122, 6920, 31472, 30304, 13480, 14960, 9476, 11982, 28104,\n 2235, 22425, 1257, 1473, 689, 7562, 36175, 33598, 21550,\n 7220, 3873, 35541, 29730, 18136, 21016, 23247, 36068, 3055,\n 33583, 135, 17621, 5426, 33191, 36596, 37246, 19980, 34856,\n 13520, 17266, 35174, 30830, 18419, 33923, 25969, 3801, 27276,\n 32153, 26108, 7747, 2708, 22924, 23911, 20137, 9798, 20063,\n 26383, 10902, 15121, 28292, 33165, 1499, 33298, 30903, 20273,\n 28163, 24361, 10672, 12170, 171, 37092, 5244, 4034, 31107,\n 14100, 21872, 17172, 23338, 13142, 14189, 11421, 9010, 19196,\n 35707, 23514, 5628, 33687, 33034, 10236, 35350, 23867, 27439,\n 26309, 36609, 12149, 20543, 30562, 25857, 18120, 5873, 37235,\n 7347, 16144, 6267, 18249, 27085, 9729, 1455, 16013, 29529,\n 16155, 612, 21566, 14482, 11305, 7400, 13870, 23762, 13524,\n 32336, 11093, 16402, 20080, 20666, 22614, 1849, 30884, 11062,\n 14021, 31635, 24053, 37758, 19641, 27513, 31791, 28824, 10028,\n 12473, 16374, 20099, 1079, 17311, 13191, 27036, 23948, 33684,\n 34487, 18681, 17458, 16472, 35679, 17496, 30167, 5118, 17263,\n 23754, 27333, 5881, 6509, 10947, 31405, 2159, 6962, 33219,\n 12633, 37069, 8186, 7247, 30500, 15074, 13927, 34499, 32607,\n 35884, 14404, 20744, 5333, 12528, 13185, 20504, 10100, 4301,\n 8503, 14751, 11484, 3785, 19104, 37190, 6122, 14414, 2660,\n 12597, 16312, 7093, 24501, 36105, 30496, 21496, 21913, 13233,\n 35709, 19277, 6942, 35383, 27448, 19891, 37134, 34542, 12205,\n 29293, 4167, 15692, 31941, 37245, 25092, 34094, 23425, 24735,\n 2362, 12622, 37023, 17336, 7010, 21500, 4316, 20988, 13266,\n 11847, 1820, 17096, 12901, 28878, 14263, 2352, 4147, 8900,\n 18168, 17255, 22458, 31144, 23741, 13739, 1963, 14237, 17722,\n 10124, 9028, 1454, 8450, 33779, 10942, 8780, 1251, 9866,\n 29551, 20128, 4716, 1415, 18914, 36690, 23190, 16187, 33070,\n 19353, 18296, 9433, 27424, 21026, 14360, 18448, 15784, 8755,\n 25093, 37547, 25134, 8909, 18489, 28158, 7096, 2572, 12448,\n 33431, 21276, 37495, 12606, 14455, 6283, 16205, 24262, 13824,\n 25909, 23288, 13829, 22979, 33483, 37797, 28644, 5068, 37306,\n 8276, 1575, 17791, 21373, 35126, 6211, 5846, 24002, 10420,\n 16283, 11708, 2608, 9926, 1810, 37487, 6008, 7255, 28296,\n 37489, 13614, 29203, 199, 8261, 13113, 5828, 29705, 11939,\n 11774, 1493, 32582, 12078, 1113, 9845, 4518, 258, 11936,\n 9267, 12687, 9076, 30146, 13005, 18452, 14167, 7905, 4752,\n 25786, 21938, 22152, 14870, 29669, 20680, 4831, 14508, 20986,\n 33193, 35219, 7708, 34128, 18101, 11190, 10502, 37275, 27572,\n 37399, 16892, 11065, 36178, 3356, 21563, 24471, 36062, 13563,\n 24516, 30674, 34616, 22973, 35668, 10195, 19420, 35476, 8635,\n 4272, 33735, 33, 13796, 20715, 33577, 5641, 8421, 37206,\n 3058, 34429, 20385, 1199, 8683, 12786, 34248, 26753, 14449,\n 18146, 36933, 34316, 3399, 19686, 2193, 24703, 28863, 15155,\n 7351, 736, 28053, 28507, 34873, 23628, 27359, 11023, 25723,\n 9092, 15066, 18855, 15953, 28706, 1711, 37318, 28004, 14749,\n 604, 3024, 24802, 18234, 16524, 4728, 37392, 24466, 37543,\n 25916, 5553, 798, 22791, 18627, 29147, 17227, 28664, 24573,\n 33777, 5918, 21284, 8588, 36016, 23884, 1045, 29073, 34367,\n 10344, 17706, 15691, 650, 28208, 23474, 15906, 34002, 18151,\n 1755, 1757, 9853, 29247, 14968, 29961, 11414, 8851, 21430,\n 19596, 800, 5667, 14292, 20635, 36541, 1273, 4315, 13846,\n 16087, 8204, 36602, 22822, 31582, 870, 24457, 22450, 35342,\n 10051, 30473, 35788, 13151, 35107, 15612, 16746, 31878, 34636,\n 10953, 6510, 20207, 22571, 29859, 15485, 5493, 20483, 31348,\n 9873, 26856, 7838, 11437, 9354, 3039, 6039, 7908, 12760,\n 21488, 25014, 1954, 3139, 6455, 30338, 5999, 16386, 25336,\n 1505, 29579, 8260, 23899, 36080, 2804, 20271, 9474, 31175,\n 32889, 154, 10281, 17499, 26867, 25696, 3977, 1351, 26748,\n 8697, 29690, 2617, 17488, 12253, 32307, 18062, 35186, 2212,\n 36339, 17355, 17365, 28941, 35353, 33947, 26158, 18454, 15651,\n 7026, 30208, 14823, 3211, 33411, 16125, 37567, 30892, 1098,\n 28819, 18549, 30186, 16845, 34056, 30103, 16138, 4268, 25228,\n 5130, 2759, 2673, 20069, 16993, 2359, 30094, 556, 35258,\n 25270, 26130, 2311, 1784, 15231, 6688, 27274, 21548, 35115,\n 1698, 19296, 92, 6520, 7641, 23674, 25396, 13803, 30215,\n 26012, 35728, 7828, 35512, 27724, 33145, 4666, 32769, 362,\n 12071, 34328, 23954, 30206, 33955, 17394, 36194, 20721, 24455,\n 28672, 19475, 15177, 2314, 26051, 16236, 18707, 33276, 19025,\n 18458, 6531, 33033, 12127, 31149, 12614, 17399, 33101, 33239,\n 10577, 17712, 27925, 8318, 14874, 20787, 8582, 30010, 32142,\n 23475, 21395, 26357, 27240, 17976, 18883, 26363, 21611, 31400,\n 13244, 18100, 16780, 8934, 31073, 29431, 23941, 34652, 16269,\n 25187, 2539, 9302, 5193, 30814, 144, 31868, 30336, 33290,\n 24001, 31937, 30140, 27065, 26644, 30189, 10973, 6720, 36443,\n 12872, 35348, 6158, 32495, 32994, 26180, 7444, 11901, 14683,\n 26507, 22131, 31844, 9722, 703, 7028, 25137, 6048, 6884,\n 22334, 37400, 20038, 32499, 4581, 26496, 11973, 16588, 23335,\n 13297, 12289, 25688, 3662, 17095, 7884, 4729, 23071, 10223,\n 12220, 26956, 13483, 21985, 29031, 5026, 12293, 1946, 31979,\n 11775, 32931, 14265, 15723, 15897, 26250, 1543, 37024, 6002,\n 30169, 1178, 30253, 10501, 1092, 866, 31343, 27743, 3640,\n 25496, 9745, 27946, 23267, 25189, 2291, 35208, 16297, 14489,\n 21842, 12678, 8641, 35847, 23241, 22225, 25977, 394, 13516,\n 10922, 32921, 28354, 35852, 8278, 28447, 37548, 7697, 10231,\n 9215, 27506, 28611, 28174, 15737, 14528, 12248, 14957, 24657,\n 5816, 37690, 17776, 298, 36886, 111, 4980, 7038, 37343,\n 365, 31354, 36557, 23273, 15671, 13261, 22449, 10653, 32310,\n 12246, 19993, 11920, 4013, 1678, 15601, 11394, 17572, 216,\n 27962, 20418, 16850, 30649, 8244, 12016, 37122, 28092, 35235,\n 5202, 782, 20416, 20265, 36296, 16652, 32016, 2692, 6791,\n 3494, 23392, 4161, 4765, 240, 18712, 26510, 4982, 4196,\n 8532, 27570, 863, 606, 6928, 29599, 9567, 9730, 35590,\n 35289, 4112, 18000, 22417, 34862, 28277, 35048, 20085, 19498,\n 11600, 14271, 32522, 8631, 28643, 28612, 1011, 16191, 4918,\n 21227, 14079, 16196, 3231, 22405, 36414, 2197, 30590, 32141,\n 21973, 1983, 33720, 2305, 1187, 24543, 4358, 18715, 5784,\n 33228, 1986, 4102, 20417, 28195, 24179, 10116, 5602, 15092,\n 13727, 27096, 31267, 33113, 29982, 7953, 36390, 852, 27914,\n 30427, 28857, 9452, 31952, 9733, 5452, 18789, 17367, 5341,\n 33323, 11488, 26646, 33809, 17288, 33159, 24452, 35542, 24019,\n 15778, 20169, 2628, 8277, 34206, 9341, 10686, 22010, 36021,\n 4960, 31667, 20193, 31470, 13710, 25616, 24035, 34164, 34764,\n 24630, 11173, 13699, 34475, 18952, 18743, 15637, 27490, 35634,\n 12171, 15984, 37524, 6677, 11680, 935, 30428, 17915, 12009,\n 7033, 35505, 34917, 34104, 31506, 26661, 28388, 23913, 18939,\n 23748, 19914, 21827, 36034, 31176, 6666, 13816, 12638, 35732,\n 12929, 10535, 13577, 16776, 35806, 17361, 3862, 16151, 30442,\n 6566, 23517, 12238, 34154, 23806, 23682, 4219, 19921, 17911,\n 37359, 26128, 23771, 15295, 33061, 14848, 37186, 15364, 31379,\n 22846, 1422, 5673, 35667, 2499, 7232, 24592, 17053, 7793,\n 12562, 15272, 13153, 19054, 10443, 2224, 37508, 3778, 5713,\n 37232, 29459, 36655, 34466, 8152, 11782, 24472, 23607, 10726,\n 15268, 14358, 472, 9609, 26325, 17077, 971, 4364, 31337,\n 17968, 27701, 19779, 2456, 11086, 26526, 25417, 26033, 13406,\n 8767, 26493, 2838, 10877, 12579, 29033, 6458, 34824, 14599,\n 3750, 24343, 21579, 30697, 15354, 17409, 9655, 33395, 22296,\n 27534, 9403, 6512, 9956, 19380, 22932, 4471, 15043, 37676,\n 8617, 947, 16605, 10912, 29272, 19176, 36752, 15302, 36184,\n 29163, 1004, 17456, 23246, 12041, 22192, 5551, 19727, 28203,\n 23375, 34825, 37623, 14631, 35117, 2472, 14437, 19565, 136,\n 14476, 31067, 32568, 17555, 16625, 6505, 30711, 27421, 1929,\n 7071, 36556, 25509, 23500, 5049, 10024, 13336, 9969, 7448,\n 1896, 3952, 21003, 29429, 21366, 26826, 10626, 35044, 2568,\n 9380, 14790, 30511, 15853, 5096, 7185, 12787, 10291, 17217,\n 35803, 19171, 20953, 23426, 830, 20795, 24571, 35751, 33047,\n 36361, 35666, 3413, 20354, 37779, 36798, 628, 9315, 37775,\n 33282, 4515, 8855, 19456, 17624, 9448, 14568, 26731, 9116,\n 1805, 10016, 28628, 7421, 27177, 4246, 589, 8211, 22929,\n 12761, 20625, 6757, 28764, 20481, 36467, 32156, 5592, 17020,\n 13752, 33309, 13859, 4140, 6135, 31074, 5298, 1607, 20560,\n 16878, 25531, 18665, 25263, 20840, 2871, 25815, 24504, 29725,\n 1968, 17679, 37096, 33615, 30996, 34529, 1734, 19534, 3256,\n 8372, 23277, 11146, 25078, 17173, 28688, 8329, 34035, 16787,\n 13442, 23848, 22719, 9677, 37473, 36208, 16883, 12336, 6976,\n 88, 8052, 15625, 34886, 26170, 13599, 36613, 25664, 17271,\n 3332, 12517, 22516, 27344, 31039, 23440, 28741, 10374, 19770,\n 15396, 26903, 27042, 37078, 25502, 6178, 4255, 33953, 19804,\n 6397, 24423, 24081, 22109, 28054, 5570, 18397, 9346, 12329,\n 3824, 15756, 4671, 18576, 3814, 27623, 4238, 1117, 9861,\n 2416, 24332, 25921, 27539, 19463, 17427, 6802, 20497, 1421,\n 29136, 12785, 29072, 11500, 34557, 10301, 31555, 31271, 3983,\n 24857, 32350, 11921, 8467, 28139, 3906, 1461, 33108, 34245,\n 9011, 13910, 13253, 16999, 13535, 14692, 14346, 18787, 9074,\n 2562, 31443, 7373, 25170, 13831, 3958, 21379, 23811, 31374,\n 21781, 18112, 20631, 14231, 23056, 30626, 34008, 23160, 7760,\n 16861, 20367, 16361, 14868, 9085, 11767, 27518, 2671, 25914,\n 9951, 3435, 242, 433, 30469, 24750, 7155, 30339, 14797,\n 30204, 15442, 28464, 291, 14584, 32602, 32466, 34528, 14018,\n 30648, 26200, 13108, 24584, 7292, 34763, 15639, 6320, 5854,\n 9524, 7286, 1077, 3806, 19881, 13721, 2294, 24553, 13908,\n 13450, 25775, 19251, 33765, 33048, 22235, 31957, 34327, 34582,\n 21242, 17875, 17774, 31790, 12056, 30039, 13145, 29157, 13059,\n 32913, 23611, 6768, 36814, 31627, 8206, 5010, 34767, 25966,\n 12381, 30727, 12103, 1535, 26502, 24498, 11269, 7289, 34820,\n 30383, 17119, 6575, 27268, 24432, 1339, 19387, 4265, 9786,\n 36515, 30510, 20516, 2928, 24065, 7694, 3215, 140, 31152,\n 21750, 11112, 30360, 21730, 11272, 6414, 9419, 8490, 29516,\n 27913, 13158, 25185, 19021, 6918, 25812, 12312, 18676, 1615,\n 27910, 8056, 3191, 7952, 30689, 30696, 2801, 20431, 18432,\n 3218, 6061, 24879, 10860, 14070, 17850, 29450, 26538, 28106,\n 21588, 4297, 9344, 35540, 28560, 23721, 20072, 8509, 17901,\n 22595, 25559, 9571, 18392, 10249, 21807, 11457, 30136, 12700,\n 30392, 14374, 25520, 20589, 9806, 29494, 30750, 35784, 14867,\n 13717, 30541, 34138, 32040, 32551, 6494, 19844, 2497, 34653,\n 27648, 36860, 27475, 27383, 16501, 25335, 31136, 22233, 17649,\n 29995, 20287, 14729, 3624, 10512, 17822, 10333, 20478, 10938,\n 29387, 21431, 14617, 23870, 30596, 3251, 8836, 7893, 11139,\n 31965, 31335, 32580, 944, 32067, 15992, 6168, 19020, 20956,\n 33719, 32843, 18176, 32533, 33267, 31782, 12949, 35080, 34992,\n 23644, 28968, 18783, 12195, 18007, 5011, 25216, 27600, 30668,\n 12574, 32061, 28253, 17952, 36269, 23590, 22368, 23507, 33754,\n 16031, 11411, 12172, 34884, 36788, 21206, 28064, 16991, 30474,\n 30172, 24384, 20021, 6842, 7469, 11261, 6296, 27094, 11229,\n 17929, 29382, 18163, 31154, 34624, 20577, 19582, 19135, 2372,\n 15173, 3138, 26892, 30325, 15789, 2547, 6828, 26940, 24555,\n 17956, 31289, 2001, 30454, 37501, 20726, 24468, 27636, 1380,\n 11312, 9917, 26629, 23254, 9718, 19680, 335, 14262, 19079,\n 7626, 24275, 22219, 31581, 23032, 19206, 35190, 20332, 4706,\n 10170, 13840, 25391, 14179, 15357, 7667, 8731, 23138, 16493,\n 10531, 27275, 30011, 22194, 3937, 7129, 3181, 7388, 19965,\n 27528, 14625, 8717, 2779, 19722, 10387, 8823, 41, 19783,\n 8737, 2419, 16902, 10770, 31517, 17326, 5041, 27800, 8225,\n 20436, 15586, 36131, 14153, 23659, 10009, 21962, 16836, 11403,\n 827, 26340, 31921, 17248, 23726, 36934, 6941, 17576, 2078,\n 9339, 3681, 5312, 25259, 17962, 457, 3799, 7412, 35008,\n 10929, 26848, 20508, 29025, 807, 11547, 29323, 6592, 25958,\n 33491, 2680, 14901, 8419, 33435, 34737, 25346, 35945, 9825,\n 32279, 36497, 194, 16894, 12532, 22885, 13259, 25959, 27951,\n 35559, 9682, 12734, 16795, 27857, 4222, 21559, 13769, 24745,\n 6352, 24639, 22847, 36251, 31083, 21540, 4800, 9478, 1515,\n 21716, 11289, 5249, 23457, 16611, 19072, 20306, 37768, 13225,\n 23903, 26225, 2685, 16259, 34092, 15171, 1670, 35484, 32978,\n 22628, 16195, 16391, 13459, 18779, 22023, 1624, 24440, 34889,\n 10784, 16469, 12598, 24684, 17094, 33040, 20946, 24985, 5383,\n 6364, 25699, 2850, 11815, 29428, 3088, 32354, 22694, 16565,\n 20337, 27731, 15427, 21516, 14808, 14381, 25456, 18613, 5942,\n 18602, 18526, 31080, 30551, 27393, 2502, 18297, 14055, 16294,\n 25071, 8749, 12251, 26617, 6016, 27157, 10742, 11113, 33038,\n 9417, 20103, 23533, 15199, 22898, 12310, 754, 24717, 28144,\n 27527, 5399, 5293, 5924, 20, 32905, 34302, 30194, 37355,\n 14961, 12852, 23759, 1518, 28326, 27267, 6762, 25868, 17703,\n 32532, 30440, 27134, 1960, 37060, 32283, 6116, 31511, 14660,\n 15082, 5665, 23976, 940, 37109, 33604, 20290, 23473, 18752,\n 36760, 6614, 37747, 5825, 13341, 33361, 25126, 19409, 15312,\n 28318, 23919, 1894, 32811, 9243, 10689, 3079, 24364, 1027,\n 26603, 5413, 8664, 33428, 4429, 25278, 12657, 10098, 24946,\n 9196, 24515, 23206, 32959, 10763, 31550, 17202, 2255, 7860,\n 33281, 7800, 2753, 27856, 10521, 4100, 27016, 24300, 36981,\n 34371, 11334, 24357, 14715, 24677, 7629, 22243, 20346, 24108,\n 26877, 18443, 8807, 36295, 30243, 9343, 14926, 19190, 5886,\n 2297, 1086, 9868, 3571, 29003, 3117, 34046, 24808, 13312,\n 5364, 24297, 24867, 9903, 24517, 32137, 29015, 21590, 35064,\n 1507, 19609, 12887, 9824, 28800, 13351, 23340, 23310, 5144,\n 31072, 26389, 14242, 35201, 7070, 30835, 22751, 20215, 9375,\n 28908, 14141, 36724, 11776, 34509, 16566, 31569, 29793, 11595,\n 37216, 32700, 35532, 1486, 33982, 10106, 12221, 7402, 5403,\n 25029, 19516, 162, 7162, 36942, 36639, 9641, 4972, 26247,\n 20859, 9110, 27508, 9295, 26915, 3736, 223, 15939, 2597,\n 12004, 14154, 280, 22014, 33248, 10549, 16654, 19503, 24878,\n 6134, 3823, 17180, 31767, 37178, 3959, 26111, 33512, 32435,\n 23271, 19731, 9150, 32863, 27764, 24251, 11053, 4026, 30569,\n 10222, 30273, 32338, 24728, 33539, 299, 17561, 1001, 35021,\n 13922, 17294, 20834, 2652, 22484, 28129, 12215, 27141, 4944,\n 2845, 23004, 24163, 21197, 24372, 17302, 26913, 16478, 5917,\n 23157, 9253, 9022, 32734, 11079]), 'W': array([ 1049, 1348, 3327, 13596, 12728, 11198, 4735, 13801, 7439,\n 1877, 12989, 2200, 13172, 8187, 10062, 7514, 140, 4703,\n 11464, 9001, 1164, 5283, 4783, 6039, 2115, 10664, 5559,\n 5945, 9545, 280, 790, 12286, 9191, 9356, 8336, 696,\n 10357, 796, 14133, 6238, 12647, 3622, 8517, 4565, 2802,\n 14191, 1468, 13076, 230, 15340, 13213, 14641, 8573, 6243,\n 6023, 2811, 8787, 218, 12564, 349, 523, 12344, 15374,\n 10000, 10176, 12056, 2087, 6591, 7311, 12917, 1399, 14815,\n 9010, 14417, 13506, 9808, 15365, 15152, 12490, 4602, 645,\n 9209, 4848, 13536, 6859, 13353, 9497, 200, 13402, 10750,\n 11020, 4498, 7550, 6847, 1546, 10090, 3428, 13807, 8871,\n 9242, 9100, 631, 15326, 3455, 10262, 2615, 14709, 9129,\n 2229, 439, 2512, 14172, 8392, 10700, 14872, 14710, 11188,\n 4165, 6634, 9261, 11555, 4372, 2308, 11962, 1781, 8027,\n 3495, 3347, 8215, 3193, 1667, 4193, 12354, 14813, 6167,\n 12735, 13604, 12425, 5348, 6030, 13683, 2889, 3746, 8741,\n 4362, 1800, 1654, 12612, 15298, 9880, 7042, 3391, 13798,\n 12376, 7341, 4329, 5229, 6340, 9224, 11545, 4508, 15256,\n 8232, 5424, 6648, 8706, 3344, 11538, 6192, 10538, 3054,\n 13910, 9102, 10611, 5032, 3, 11524, 14302, 5427, 14084,\n 15224, 1033, 15104, 14122, 2030, 12111, 1031, 11760, 3866,\n 8619, 14707, 11568, 4162, 10472, 11342, 11995, 15100, 408,\n 13388, 12706, 3617, 11403, 3517, 9044, 5997, 9774, 15016,\n 3441, 11704, 5519, 10307, 10586, 11127, 11843, 13796, 7869,\n 5684, 3002, 14378, 15051, 8176, 12608, 11589, 7528, 4649,\n 13254, 2883, 13473, 12021, 207, 11827, 33, 13039, 11322,\n 1253, 3362, 3301, 9087, 9696, 13186, 4135, 7982, 4699,\n 12166, 10889, 5862, 9047, 11492, 9803, 6532, 14771, 5961,\n 6102, 9072, 2001, 6236, 11367, 6671, 14981, 10669, 14332,\n 4520, 13400, 7921, 5138, 8748, 2037, 7157, 102, 11055,\n 2585, 1271, 1440, 13138, 9814, 14528, 14472, 6589, 3350,\n 825, 4938, 14631, 6915, 13435, 8494, 10578, 14212, 1808,\n 12292, 3753, 7431, 6544, 10362, 2019, 2670, 7900, 10192,\n 7901, 4886, 2310, 14571, 6632, 10377, 14683, 15215, 5693,\n 4061, 43, 14624, 10649, 13679, 3849, 11343, 5828, 4315,\n 14988, 12389, 4003, 3736, 2915, 10540, 6769, 8236, 2836,\n 11661, 1795, 10133, 296, 13210, 2392, 8901, 4447, 6300,\n 5320, 7487, 6644, 3786, 4829, 4839, 6380, 2170, 1427,\n 6531, 14786, 14141, 9871, 9743, 14170, 14633, 1157, 12756,\n 1011, 5352, 3505, 7772, 9437, 2066, 2223, 2660, 13631,\n 13832, 9521, 3013, 13501, 2397, 7927, 3973, 11897, 10823,\n 8804, 1016, 6641, 1082, 8071, 4540, 7122, 922, 3200,\n 7756, 14579, 9355, 5311, 1815, 3533, 4616, 7539, 14076,\n 8431, 4777, 12692, 10506, 11441, 2732, 11957, 4521, 7015,\n 14967, 13681, 10380, 13169, 5884, 9451, 6715, 6024, 5091,\n 8138, 15281, 11310, 9456, 8005, 13825, 13843, 8412, 5988,\n 11256, 2254, 6852, 4889, 12116, 14613, 9877, 2378, 384,\n 5609, 4661, 10330, 9472, 14482, 14305, 1985, 11058, 9344,\n 5624, 14916, 9181, 7214, 4220, 9409, 5218, 4555, 2887,\n 5021, 10294, 4379, 7016, 3653, 14821, 15214, 7245, 6048,\n 13102, 3165, 6277, 7170, 13687, 8293, 14163, 8315, 15250,\n 10606, 15273, 15373, 7696, 9313, 12741, 12573, 6580, 10457,\n 14135, 9569, 10643, 11193, 464, 12560, 7575, 11997, 7257,\n 11458, 12718, 15070, 14011, 1680, 13381, 12621, 13152, 2058,\n 8506, 13725, 7560, 10345, 13236, 98, 10865, 1990, 10692,\n 13209, 1805, 8813, 1711, 5531, 6067, 14767, 6982, 14186,\n 2242, 8320, 11924, 7187, 14789, 7089, 4401, 12230, 2241,\n 3187, 8666, 4802, 4199, 6327, 3628, 808, 4676, 3572,\n 1423, 7866, 12767, 10697, 3957, 11347, 7571, 1846, 2882,\n 9777, 15028, 10412, 3264, 5869, 14581, 4786, 12683, 14278,\n 11123, 8043, 15286, 1261, 11418, 12045, 6183, 9683, 13114,\n 4321, 8519, 2498, 12690, 10848, 6851, 8948, 3733, 3462,\n 12420, 13167, 279, 2608, 5604, 3741, 5492, 5836, 7647,\n 13547, 14493, 12579, 2065, 7001, 3892, 3524, 12806, 7398,\n 15139, 1150, 15309, 13185, 14071, 10650, 6863, 5779, 7907,\n 11832, 13371, 2251, 13555, 675, 3135, 12536, 836, 320,\n 10685, 6787, 4853, 1991, 7483, 3578, 5291, 11215, 10282,\n 14884, 9167, 3551, 12392, 5909, 1562, 9541, 565, 15294,\n 8113, 3185, 3607, 14194, 11439, 11970, 10448, 4412, 13962,\n 2137, 13273, 9973, 3186, 4473, 9292, 1994, 11992, 1437,\n 1832, 175, 12470, 1441, 5735, 5857, 12575, 153, 10794,\n 14698, 3815, 4760, 5800, 6421, 8927, 7469, 9660, 7437,\n 11652, 434, 10318, 2885, 12407, 15255, 13548, 3915, 8661,\n 3106, 4711, 14248, 8015, 12068, 8080, 10124, 12844, 6887,\n 7989, 9333, 2285, 2377, 12959, 372, 4091, 5924, 12698,\n 15240, 2165, 11791, 11290, 6381, 10311, 2483, 10997, 12581,\n 12367, 12018, 2798, 2277, 11651, 3314, 2791, 14463, 5279,\n 8369, 11205, 10128, 3609, 12658, 8189, 11113, 3791, 6020,\n 4677, 6408, 5662, 5161, 1220, 5842, 13761, 6237, 5518,\n 12839, 9852, 13106, 13054, 971, 9643, 7911, 2851, 4905,\n 8515, 3163, 3980, 809, 15012, 14987, 11152, 13466, 13868,\n 2171, 339, 5771, 8288, 11877, 11884, 5727, 7281, 11579,\n 10194, 11063, 5694, 9449, 5798, 4047, 1788, 1385, 8133,\n 2765, 1019, 680, 5533, 5250, 3843, 9962, 10171, 13376,\n 11086, 6521, 6608, 12693, 10333, 9982, 7861, 15096, 1053,\n 11501, 5802, 10537, 7568, 1640, 93, 6228, 5153, 5940,\n 8429, 13155, 12096, 13581, 2835, 2178, 7031, 4089, 11724,\n 2080, 5812, 4997, 1208, 13866, 6694, 9314, 5019, 4085,\n 11936, 9183, 143, 6310, 12696, 10075, 3363, 3926, 44,\n 12134, 5628, 8841, 14012, 13566, 3594, 10310, 168, 12257,\n 1519, 3254, 8123, 13749, 5956, 7288, 4492, 1878, 11682,\n 9049, 12466, 6768, 9639, 9082, 11061, 11980, 12089, 9601,\n 2701, 5898, 954, 3631, 7142, 6283, 12347, 3718, 5103,\n 12348, 3096, 8470, 11393, 485, 11022, 9886, 14599, 8608,\n 4266, 12539, 8504, 12378, 1254, 11351, 2327, 2005, 13,\n 4622, 13875, 10963, 4598, 4563, 8036, 8484, 12181, 14203,\n 540, 6590, 4249, 13926, 14854, 8638, 14356, 8135, 13305,\n 12924, 1910, 686, 14693, 8889, 9744, 15202, 3365, 823,\n 9086, 13268, 5377, 10713, 8134, 8366, 7330, 10785, 10173,\n 3180, 13275, 11037, 2654, 14969, 9850, 5131, 3024, 14532,\n 11305, 190, 73, 11260, 1895, 2923, 5383, 14418, 173,\n 11144, 14181, 5877, 9251, 9390, 10419, 1637, 2668, 5733,\n 11356, 14926, 1508, 15098, 8381, 817, 5413, 865, 14465,\n 13553, 6068, 15197, 9887, 11170, 14830, 11646, 15172, 14319,\n 7871, 1376, 7728, 13350, 14128, 11826, 6581, 2250, 11853,\n 13338, 7009, 5632, 8785, 12555, 2987, 6326, 4779, 9332,\n 9983, 6595, 14902, 3315, 5499, 4476, 13257, 10107, 5271,\n 2800, 12232, 13969, 6886, 13763, 2333, 6303, 4259, 265,\n 8468, 15320, 2340, 9323, 7957, 7892, 4884, 232, 1222,\n 1362, 2663, 4818, 11824, 14017, 2125, 15267, 2927, 9965,\n 3625, 12724, 13871, 12791, 5948, 11771, 6383, 1162, 8767,\n 13188, 12440, 4761, 4489, 4293, 5224, 13063, 7775, 3416,\n 869, 8866, 13967, 7256, 8557, 4664, 5755, 1505, 13452,\n 10603, 8538, 15382, 3004, 14583, 12451, 10901, 6389, 11610,\n 2070, 1726, 8563, 13956, 9889, 3401, 5745, 2833, 9297,\n 14502, 4887, 14196, 13720, 11602, 5716, 313, 11488, 4900,\n 3005, 6782, 8363, 7585, 1481, 746, 5020, 6538, 1995,\n 902, 10036, 6763, 10686, 5791, 4632, 12321, 10303, 2363,\n 13728, 11859, 13902, 2043, 5360, 8365, 9763, 11044, 10193,\n 12241, 11532, 13951, 12109, 12037, 2233, 12494, 9276, 6708,\n 8355, 1831, 12315, 6147, 6129, 9826, 832, 2501, 12261,\n 894, 10527, 2465, 11952, 10616, 7737, 13672, 5159, 12060,\n 8646, 2669, 15164, 14202, 8283, 10983, 8456, 3231, 7260,\n 8912, 766, 8295, 6344, 898, 14273, 10496, 8155, 4274,\n 12115, 2544, 1021, 7030, 6833, 10202, 8153, 7093, 4055,\n 12393, 4211, 14456, 4493, 1708, 5524, 9724, 6835, 6621,\n 13237, 8756, 11625, 2914, 6419, 8420, 14909, 12064, 13193,\n 7396, 10123, 13354, 3396, 2942, 5973, 11229, 9632, 9228,\n 11444, 551, 11450, 14741, 1148, 10609, 10493, 10954, 11285,\n 4523, 9823, 10326, 6880, 112, 7457, 3151, 6206, 1580,\n 11150, 5010, 5746, 15377, 4639, 9175, 802, 9957, 5669,\n 9785, 3490, 7121, 8830, 682, 15274, 7259, 14602, 9873,\n 1129, 5154, 4299, 12085, 6455, 13036, 8532, 2192, 4950,\n 8793, 326, 12202, 2196, 2261, 13537, 5658, 8726, 3762,\n 9384, 6568, 4965, 12587, 9113, 8836, 4007, 3855, 8485,\n 12251, 8577, 10857, 14021, 12911, 4911, 13394, 29, 14389,\n 14263, 4714, 2287, 15182, 13907, 22, 5517, 8233, 11362,\n 253, 3276, 6617, 15262, 7862, 11879, 10239, 14281, 716,\n 8944, 2419, 6146, 8911, 15405, 11423, 5222, 8801, 5344,\n 10910, 9702, 8170, 6526, 4936, 14227, 13336, 7616, 9324,\n 13698, 9736, 393, 8664, 12898, 4324, 4229, 9436, 12105,\n 12017, 8268, 9218, 345, 8635, 5325, 9159, 10660, 7368,\n 14803, 12979, 3985, 4792, 7210, 11369, 12162, 9749, 15043,\n 14949, 9308, 11529, 5922, 14474, 2342, 7847, 12565, 11743,\n 4300, 12394, 8828, 9768, 5055, 3555, 1097, 1397, 7978,\n 14107, 6204, 4463, 5866, 8734, 4883, 9061, 5868, 9805,\n 3083, 12042, 10079, 3842, 8650, 636, 4134, 5192, 4548,\n 14529, 14996, 13332, 13497, 11141, 1389, 250, 8438, 1409,\n 387, 5878, 3074, 10337, 10772, 960, 3695, 15357, 7684,\n 9415, 15075, 15184, 8191, 11176, 6008, 11734, 13666, 4286,\n 5307, 12628, 9334, 981, 12506, 6916, 5935, 1675, 6874,\n 12500, 11739, 3515, 14288, 11525, 9649, 2290, 14894, 10110,\n 4149, 11248, 13494, 8591, 11479, 2362, 1728, 711, 332,\n 13288, 14903, 7308, 13932, 606, 12765, 15119, 6385, 11506,\n 2356, 9802, 4582, 11814, 10787, 12317, 15296, 8049, 13745,\n 12421, 6678, 4932, 12180, 11574, 4074, 12401, 2810, 1744,\n 12574, 657, 4819, 7622, 2848, 13922, 4292, 8310, 794,\n 1275, 5588, 4833, 9018, 4215, 6223, 14209, 13343, 8371,\n 12057, 9136, 1605, 11965, 3257, 3389, 881, 8388, 7049,\n 3157, 7299, 2325, 6933, 10145, 9773, 10668, 6919, 15108,\n 3251, 7573, 4603, 14622, 5351, 1118, 3229, 421, 13080,\n 13248, 11634, 3871, 3577, 12754, 133, 5150, 9055, 14184,\n 6967, 9215, 10808, 7232, 12000, 7263, 14056, 13091, 7906,\n 1205, 7719, 2293, 3895, 1835, 10449, 11615, 6677, 14964,\n 11395, 2641, 12964, 5584, 11893, 13299, 3023, 945, 1421,\n 5300, 11299, 13787, 1268, 5327, 11785, 5835, 7314, 2408,\n 9623, 14085, 9466, 7124, 12523, 5963, 4604, 303, 10170,\n 13156, 13588, 11754, 9731, 7280, 7980, 10894, 7120, 8243,\n 6780, 14787, 2399, 12763, 5615, 6331, 4143, 2010, 9021,\n 10348, 9201, 3470, 5921, 10949, 2956, 1921, 659, 2449,\n 11671, 2257, 13557, 14448, 9402, 12458, 5782, 679, 10573,\n 4809, 9004, 14335, 6502, 13496, 13322, 13339, 11775, 5542,\n 3643, 6640, 6854, 8101, 7612, 2122, 13132, 12211, 11662,\n 7667, 9912, 7802, 14082, 7023, 14618, 608, 9282, 2426,\n 1024, 10074, 428, 9217, 11551, 12812, 1587, 14029, 9554,\n 14292, 10613, 575, 4084, 8851, 9645, 15078, 818, 4248,\n 8628, 2984, 3591, 11835, 9956, 10291, 9546, 10529, 10025,\n 14328, 6783, 10278, 7824, 5412, 12525, 8347, 3917, 9515,\n 11076, 14136, 1710, 11985, 4481, 2705, 3291, 1856, 6718,\n 15280, 14116, 13900, 6083, 5786, 7848, 11499, 8824, 2830,\n 7761, 8341, 12220, 11478, 13819, 11588, 3422, 15015, 6056,\n 5946, 11180, 12171, 592, 12123, 4522, 9531, 11737, 9414,\n 8192, 5947, 5833, 5809, 2549, 10926, 12719, 4359, 2517,\n 3449, 11558, 6571, 6947, 12977, 3360, 4121, 142, 6565,\n 5173, 13202, 2717, 4082, 2207, 10040, 530, 9916, 10682,\n 2289, 9753, 189, 4480, 5190, 3204, 5718, 14221, 3637,\n 10696, 10352, 150, 5763, 14737, 12851, 4813, 1628, 5369,\n 5248, 7484, 3125, 7929, 8526, 5186, 12764, 10617, 8298,\n 1548, 6405, 10063, 7530, 3244, 12760, 7808, 8331, 13743,\n 122, 135, 5553, 5575, 1631, 2587, 8093, 9169, 4750,\n 3595, 12403, 1507, 7140, 12055, 1142, 11318, 10190, 12792,\n 6462, 10758, 3137, 10738, 12946, 4381, 10622, 13542, 2161,\n 5104, 8097, 2736, 9304, 12772, 6864, 14345, 8660, 5174,\n 5508, 1194, 1086, 13903, 15230, 11406, 5309, 14744, 13147,\n 2492, 5864, 11293, 1810, 14123, 9986, 4018, 10485, 6586,\n 10026, 6042, 14930, 4432, 470, 1140, 13899, 7423, 7345,\n 4762, 6584, 12992, 3280, 4575, 1998, 5962, 8609, 3147,\n 10488, 978, 9419, 13486, 2247, 6639, 4038, 8931, 6554,\n 3065, 1803, 14318, 4035, 3996, 155, 8393, 10965, 9134,\n 11659, 931, 6754, 6559, 13360, 803, 5451, 1709, 11896,\n 1950, 5075, 4145, 1929, 12499, 4338, 2992, 6440, 11200,\n 11550, 4331, 6041, 8786, 13977, 14955, 427, 6553, 2270,\n 422, 617, 1250, 7258, 5685, 13012, 9734, 9502, 9867,\n 8663, 14958, 1282, 6548, 13717, 3519, 6979, 6659, 1511,\n 6012, 11280, 14275, 11314, 2994, 13158, 11578, 10093, 1241,\n 8098, 12651, 9670, 15190, 5109, 3166, 1248, 13447, 10322,\n 3256, 1635, 5156, 13431, 5965, 5977, 13115, 2586, 6397,\n 13768, 2906, 2718, 12874, 11709, 8696, 4611, 14396, 13599,\n 12015, 2312, 2412, 11800, 11668, 14262, 9719, 7230, 2739,\n 14290, 6104, 11060, 12727, 9923, 10588, 10653, 10670, 8821,\n 2962, 11104, 11920, 12413, 1243, 9832, 9812, 74, 14861,\n 2074, 11990, 13561, 6111, 1338, 4042, 466, 8094, 15346,\n 3669, 13727, 6097, 6314, 10363, 15236, 2035, 10385, 4970,\n 12558, 1722, 13089, 12700, 15201, 844, 7320, 5558, 7853,\n 4827, 3357, 7860, 10644, 10680, 14503, 4850, 324, 11594,\n 14577, 11637, 11309, 13945, 8541, 213, 7792, 9110, 5713,\n 4148, 12729, 15352, 1245, 14190, 13837, 11219, 1939, 12278,\n 9144, 11539, 4040, 15131, 10807, 10800, 11340, 762, 3115,\n 9603, 2757, 10153, 7081, 9035, 9838, 8389, 1175, 13805,\n 14889, 5523, 13262, 2479, 6324, 7284, 7114, 3007, 6904,\n 11095, 4719, 8416, 7758, 14380, 13583, 12013, 14700, 13508,\n 40, 9328, 12836, 9172, 8562, 11116, 5040, 2209, 8796,\n 1195, 11077, 10411, 10043, 6276, 10518, 587, 433, 1967,\n 9834, 23, 3554, 3897, 10187, 1543, 1556, 11581, 8126,\n 1837, 1823, 15121, 13369, 880, 12669, 6296, 14030, 1565,\n 2123, 11601, 2880, 5321, 5334, 13238, 3415, 14068, 9895,\n 10572, 12759, 9123, 11833, 14504, 4976, 12655, 7282, 6395,\n 12713, 2573, 12129, 1180, 11794, 10055, 13541, 5709, 2217,\n 13629, 4955, 8539, 11115, 3727, 7249, 1262, 14124, 9692,\n 1444, 11329, 10874, 12610, 2045, 2516, 12668, 753, 4855,\n 3503, 12464, 6401, 5394, 5064, 13579, 4360, 7721, 14798,\n 13420, 695, 12554, 14462, 15395, 12337, 8239, 15390, 8491,\n 7714, 9809, 11857, 14053, 11174, 3156, 8996, 2134, 14659,\n 4105, 3525, 15067, 4952, 6070, 12190, 424, 12032, 7446,\n 1337, 15122, 10991, 11906, 15143, 6334, 7303, 13989, 5568,\n 10979, 5087, 8747, 4250, 3821, 9573, 8988, 14187, 13124,\n 8018, 15004, 11729, 15058, 10519, 4305, 11069, 14353, 2640,\n 13965, 14870, 6942, 5407, 10509, 7993, 6625, 13705, 3734,\n 70, 9212, 11298, 3939, 14214, 5765, 8064, 13710, 9704,\n 7186, 6095, 3316, 4612, 6323, 4374, 1089, 7967, 1799,\n 4753, 13078, 7056, 8188, 5163, 1043, 9756, 13738, 2525,\n 11379, 560, 12798, 188, 12883, 10593, 9287, 1237, 1095,\n 15038, 9827, 10832, 4943, 4179, 11321, 13642, 4106, 7599,\n 633, 9688, 4394, 925, 10143, 14446, 5526, 2723, 15423,\n 535, 7883, 2016, 6856, 13966, 9517, 2790, 12572, 4311,\n 1212, 15411, 4515, 8496, 477, 1826, 304, 6136, 1178,\n 10140, 161, 9732, 3954, 1361, 12119, 1069, 1372, 6089,\n 8773, 14921, 14373, 8799, 1771, 6297, 8378, 8443, 9227,\n 12748, 6999, 504, 2999, 13252, 12159, 9677, 4618, 7482,\n 4594, 6107, 6884, 12306, 6332, 12785, 454, 6663, 15398,\n 13660, 10479, 12664, 14764, 15138, 13177, 14268, 13535, 14593,\n 2278, 14506, 1256, 10086, 14148, 5801, 7162, 13688, 4138,\n 11380, 6971, 1668, 8598, 8323, 2919, 1721, 11891, 11341,\n 12169, 6392, 15379, 5593, 10121, 4219, 8947, 6364, 619,\n 4115, 3001, 12170, 3790, 9156, 4004, 4428, 12469, 15307,\n 10869, 12076, 6105, 10358, 5017, 3028, 12927, 6739, 1892,\n 3834, 13917, 10210, 5899, 8832, 14984, 3501, 14114, 4335,\n 9363, 9236, 6492, 10051, 12163, 10057, 4093, 101, 2838,\n 15422, 2985, 10984, 9593, 2982, 5211, 2140, 4209, 9302,\n 14347, 5987, 12682, 4859, 8722, 8226, 1897, 13066, 5551,\n 9613, 1433, 5613, 14412, 11143, 15278, 14914, 2191, 13678,\n 795, 11522, 10445, 3075, 9301, 14947, 4488, 10031, 5929,\n 6675, 2607, 712, 13378, 13853, 10361, 13503, 1806, 8725,\n 11323, 9003, 12775, 4340, 1445, 10936, 233, 10228, 1652,\n 15318, 6150, 12352, 8063, 9761, 11939, 9077, 10948, 2936,\n 14034, 1576, 10722, 307, 282, 3943, 5540, 15155, 3080,\n 2536, 4510, 260, 13505, 936, 8751, 5911, 3124, 2053,\n 6417, 14348, 2934, 6938, 292, 5698, 15394, 3920, 9273,\n 2336, 6691, 14956, 5468, 7835, 10830, 720, 3222, 9118,\n 3262, 11866, 2423, 14919, 2226, 5730, 13987, 15151, 13964,\n 5894, 9542, 15040, 12154, 11722, 1012, 12585, 3650, 3696,\n 7154, 517, 6827, 2623, 11377, 14022, 1405, 5985, 15244,\n 585, 12803, 5953, 3652, 1400, 10736, 1536, 7458, 8715,\n 243, 1636, 7749, 13495, 2978, 6279, 9839, 14381, 12897,\n 8594, 8884, 9788, 10543, 3479, 12087, 8426, 6004, 7788,\n 3898, 1234, 2151, 128, 4070, 12815, 7072, 2759, 9352,\n 5630, 6991, 9480, 7250, 7295, 3217, 6292, 11584, 7211,\n 8296, 1754, 258, 3670, 693, 5650, 11585, 283, 7650,\n 13606, 13639, 11257, 13382, 14792, 5471, 9288, 7773, 7774,\n 15177, 11001, 6798, 14834, 1211, 2783, 5697, 11603, 4597,\n 4685, 13165, 11975, 1431, 3118, 1957, 8068, 1669, 12319,\n 9469, 11088, 11860, 11783, 14494, 6701, 10390, 8151, 15183,\n 9816, 14660, 8164, 13194, 11376, 5392, 8617, 3367, 8339,\n 14321, 6245, 1335, 3155, 1583, 13645, 11421, 7, 13563,\n 2855, 5113, 13444, 8952, 741, 1494, 9322, 9204, 10355,\n 2976, 1343, 9188, 10281, 8345, 13485, 11918, 6945, 10635,\n 2644, 11359, 9365, 1311, 14485, 7651, 7931, 14126, 8587,\n 3648, 821, 609, 532, 14211, 1717, 6196, 6910, 10673,\n 2696, 10444, 9336, 3478, 3091, 1763, 13130, 10021, 8714,\n 9340, 11161, 1364, 4494, 12646, 6091, 9318, 9992, 12826,\n 7630, 11844, 872, 12776, 3893, 15211, 11967, 7629, 10676,\n 4336, 10203, 1171, 15369, 14369, 3485, 14410, 12517, 9030,\n 10672, 117, 6765, 5285, 3425, 4944, 7587, 15259, 11765,\n 4683, 15336, 2256, 3929, 5292, 6900, 4942, 4402, 12954,\n 798, 13760, 4435, 13650, 14637, 4465, 5683, 2384, 13918,\n 5379, 3434, 8008, 48, 6402, 5975, 8975, 7289, 13673,\n 1394, 13812, 10470, 800, 15146, 3883, 8965, 9205, 2781,\n 6487, 4001, 2991, 9791, 1528, 3998, 7285, 13594, 14522,\n 1684, 5483, 11949, 5547, 849, 11459, 4554, 13973, 10135,\n 8872, 10523, 11254, 14769, 11989, 14208, 13126, 12206, 7054,\n 30, 13697, 5958, 12932, 12071, 357, 5681, 14860, 11007,\n 12006, 4163, 15007, 6735, 1540, 10763, 2410, 13532, 10498,\n 9837, 12978, 12351, 15275, 9750, 7036, 15207, 13684, 11332,\n 11057, 7660, 13821, 14516, 5136, 8437, 3725, 6078, 8692,\n 2000, 12710, 2893, 4967, 1309, 13261, 1393, 11206, 10629,\n 10268, 7704, 12204, 13117, 13813, 6061, 4151, 13269, 6320,\n 14322, 10340, 4167, 1881, 15008, 7377, 1533, 1560, 14512,\n 6451, 14843, 2801, 11036, 12299, 11898, 9553, 12993, 13870,\n 3540, 14941, 12329, 3108, 10072, 6260, 13920, 7269, 1917,\n 7736, 5737, 6767, 8271, 2754, 10938, 14334, 3082, 5938,\n 10084, 10312, 7668, 12541, 15135, 3297, 8968, 6474, 9131,\n 9714, 7533, 6454, 11587, 2541, 1769, 13792, 7083, 10781,\n 1658, 14896, 14729, 598, 13994, 92, 2452, 5187, 13071,\n 7532, 8087, 10410, 11190, 4507, 5456, 12814, 235, 2351,\n 12213, 8949, 13085, 11352, 5876, 5950, 11830, 82, 8258,\n 3393, 13620, 14621, 66, 9619, 15345, 2447, 7987, 9525,\n 2926, 5359, 10179, 8669, 5430, 12508, 14609, 7199, 2060,\n 1937, 3547, 4790, 4871, 3336, 7932, 3299, 9310, 3491,\n 8668, 9326, 5227, 7465, 13543, 7402, 7633, 13753, 1797,\n 3178, 14087, 3541, 10739, 10195, 6539, 5114, 12982, 5272,\n 6981, 14851, 4150, 458, 7805, 2949, 1369, 8081, 1561,\n 10510, 3825, 14664, 10288, 11213, 4571, 3113, 7493, 10798,\n 4782, 13584, 14254, 14416, 13570, 8359, 13232, 13057, 13802,\n 4836, 12734, 11598, 11173, 590, 6187, 14715, 4036, 2075,\n 5957, 3346, 12250, 7188, 12552, 3279, 1617, 7944, 9906,\n 14026, 14665, 2834, 8531, 5005, 2026, 13759, 10575, 12381,\n 10474, 13984, 10005, 3824, 14368, 6475, 11838, 2403, 12225,\n 9988, 10783, 8972, 6423, 13446, 9142, 2997, 2832, 13657,\n 4467, 1411, 5750, 7029, 13612, 1432, 10853, 3856, 8614,\n 9917, 8325, 4353, 9202, 1903, 2899, 9576, 1134, 11890,\n 4303, 4635, 8814, 5429, 14408, 129, 8658, 863, 1306,\n 9987, 13638, 8737, 1941, 14588, 1380, 7619, 1913, 5719,\n 13574, 13416, 1510, 11333, 14069, 1595, 14036, 5638, 11008,\n 11324, 9559, 6679, 12863, 14102, 13219, 1584, 4010, 10831,\n 11296, 13055, 8413, 3456, 6738, 8914, 1221, 15125, 12889,\n 13785, 12825, 1902, 1914, 8346, 9000, 7827, 11230, 10833,\n 1386, 1683, 13856, 3584, 8257, 10229, 12913, 13653, 8984,\n 2560, 12208, 15185, 14428, 2565, 12925, 3318, 15174, 5234,\n 6025, 14961, 12891, 3680, 8364, 425, 8900, 10520, 9862,\n 5511, 8707, 7875, 2606, 8880, 11120, 3806, 2664, 12146,\n 7596, 5789, 1113, 8475, 4000, 5739, 11281, 364, 611,\n 5262, 10508, 6969, 4946, 8307, 13200, 3452, 3633, 2131,\n 9011, 7888, 9622, 2119, 12373, 620, 7010, 9602, 10809,\n 10729, 4241, 11834, 354, 5326, 13940, 9807, 13522, 1055,\n 8543, 1123, 5090, 3472, 9335, 10790, 5742, 1176, 5035,\n 12259, 8966, 6803, 6549, 10151, 3742, 11679, 10728, 2806,\n 10801, 11411, 7193, 2295, 13869, 7253, 19, 11412, 1368,\n 6280, 7956, 543, 8326, 9560, 7283, 12600, 12140, 8940,\n 4519, 15372, 9556, 9695, 6619, 2805, 6003, 6713, 8177,\n 4060, 2527, 14108, 6669, 4377, 13509, 13627, 9450, 11224,\n 2933, 7521, 15158, 641, 12965, 10334, 5610, 9056, 5601,\n 14146, 11275, 4653, 11096, 3836, 9267, 7990, 1116, 13586,\n 7205, 6748, 4288, 2283, 14567, 7349, 1009, 11346, 7683,\n 5027, 2922, 6000, 12048, 7531, 7786, 1681, 13578, 801,\n 10955, 1697, 3792, 1030, 10500, 2530, 13045, 10703, 10371,\n 8262, 5393, 5256, 7455, 2311, 3226, 3882, 5772, 8144,\n 5856, 15295, 667, 7496, 10760, 9387, 4831, 14370, 4966,\n 5761, 8119, 14992, 3334, 980, 11778, 10859, 12027, 12563,\n 13031, 15072, 3838, 2605, 11692, 1824, 13334, 4500, 7070,\n 6774, 7752, 6951, 6495, 441, 1890, 11520, 10542, 14760,\n 14179, 4785, 8699, 9672, 5529, 11327, 7143, 3644, 2109,\n 13249, 4862, 6522, 11087, 13708, 15077, 12460, 512, 14067,\n 4746, 11294, 11121, 8131, 15368, 2652, 13618, 13224, 7032,\n 8620, 12399, 3962, 14774, 5235, 8322, 7145, 9386, 1509,\n 14751, 6359, 7435, 11003, 5990, 805, 5328, 2662, 12860,\n 11388, 9108, 11917, 6605, 3382, 12738, 1755, 14591, 7236,\n 12816, 14650, 8904, 1213, 5009, 2831, 259, 10566, 3810,\n 2360, 7312, 10225, 2357, 1787, 8566, 5167, 3989, 3567,\n 1349, 4842, 11247, 14600, 9503, 10681, 9499, 4385, 8671,\n 14431, 103, 8235, 3890, 14304, 14868, 50, 6346, 13908,\n 3799, 14891, 418, 7131, 15397, 5310, 14078, 6654, 10688,\n 8690, 14142, 4296, 5916, 13220, 3214, 6248, 9792, 12783,\n 381, 14627, 15254, 5708, 14826, 518, 10597, 3564, 13851,\n 5094, 2358, 6484, 12050, 11742, 13911, 1184, 5887, 8640,\n 13246, 12504, 4436, 6785, 10654, 11795, 5117, 9075, 8551,\n 6775, 15385, 8048, 12196, 10111, 4692, 212, 13879, 4380,\n 13549, 14225, 12094, 420, 8335, 3568, 14632, 2568, 7296,\n 3158, 15243, 15285, 5774, 11617, 7799, 5627, 13515, 13833,\n 8834, 220, 10892, 445, 8476, 6054, 10888, 3337, 5396,\n 6459, 13062, 9958, 8645, 9937, 4240, 8278, 13331, 8169,\n 3377, 11749, 319, 498, 1027, 13692, 7922, 1502, 6867,\n 7706, 3041, 15333, 12328, 13292, 6335, 10947, 5367, 666,\n 12704, 3285, 12363, 14518, 12412, 13750, 14806, 7949, 14997,\n 1512, 10751, 4254, 7844, 13367, 2775, 14005, 8161, 487,\n 10628, 3673, 3964, 6812, 10524, 308, 11745, 14543, 184,\n 13110, 2103, 12365, 5431, 9033, 4504, 1716, 9439, 9393,\n 10053, 5474, 10224, 1974, 11595, 7648, 5249, 14101, 15391,\n 3267, 12966, 2841, 12324, 3860, 9607, 13264, 734, 8442,\n 15192, 4238, 13826, 297, 3880, 10602, 3981, 1616, 14652,\n 5690, 9550, 4073, 8085, 988, 2288, 6116, 1152, 1746,\n 6049, 899, 7000, 1354, 14350, 14291, 4276, 5510, 947,\n 14266, 14620, 10582, 4996, 5049, 1377, 11717, 14551, 10752,\n 12886, 6954, 4171, 238, 4826, 6162, 185, 11867, 4638,\n 3003, 3321, 5125, 4550, 12402, 739, 14365, 1020, 6968,\n 7506, 780, 11316, 8877, 6034, 7702, 867, 15042, 7544,\n 7689, 11675, 10161, 4358, 5281, 8818, 8387, 7880, 6081,\n 5505, 8768, 12712, 11750, 12307, 14433, 10443, 1649, 12686,\n 7395, 5507, 14070, 1375, 4979, 11319, 7261, 1287, 8377,\n 9720, 8951, 6839, 11971, 15110, 804, 15017, 11032, 2563,\n 10528, 1497, 11160, 976, 4234, 694, 7903, 12709, 8942,\n 3102, 14231, 834, 519, 6914, 13891, 7899, 6345, 14360,\n 14407, 8549, 15366, 4316, 6725, 1347, 14401, 4765, 12143,\n 6354, 13516, 2489, 14104, 9712, 12938, 3388, 2455, 1607,\n 11818, 11925, 1599, 9899, 6157, 12514, 8667, 8304, 11944,\n 1225, 11993, 1537, 12270, 12940, 11978, 6085, 12691, 9666,\n 4678, 10432, 5478, 12526, 15376, 1259, 11354, 9959, 5288,\n 9769, 13797, 13860, 3338, 1381, 15115, 8670, 11927, 5918,\n 4317, 3767, 3732, 9605, 605, 3261, 5333, 14043, 9759,\n 1987, 9031, 11665, 7293, 2843, 12003, 208, 10788, 13460,\n 373, 9487, 15270, 8367, 1216, 4332, 11607, 14180, 12799,\n 12167, 2198, 5058, 1324, 8319, 2513, 11626, 685, 285,\n 4066, 12330, 11375, 8181, 10085, 4099, 6543, 6653, 12622,\n 14177, 5034, 8259, 6477, 13822, 5252, 6499, 12887, 2703,\n 7820, 2554, 6084, 1688, 728, 14678, 9534, 2824, 6577,\n 4195, 3093, 1079, 3760, 5063, 12355, 8272, 3283, 5382,\n 1099, 10691, 11552, 12127, 4269, 2471, 2184, 11792, 9711,\n 626, 7640, 13212, 77, 983, 7077, 11158, 3250, 9262,\n 3162, 194, 6909, 14362, 12461, 9443, 10884, 444, 11107,\n 6450, 11484, 5039, 3303, 3322, 2869, 5225, 13662, 4339,\n 8401, 8779, 8654, 8395, 14649, 7670, 1447, 12217, 6680,\n 10648, 1294, 10812, 7160, 15375, 8909, 14028, 1044, 6120,\n 5673, 616, 11991, 5201, 6960, 416, 11302, 7784, 14081,\n 7885, 8514, 10149, 15188, 9953, 14697, 14573, 1200, 4399,\n 12580, 1813, 10913, 5806, 2708, 8781, 219, 1725, 5158,\n 671, 8214, 13174, 11400, 4468, 13181, 4290, 6242, 12894,\n 2003, 2121, 7453, 14161, 6740, 5467, 10813, 9927, 4497,\n 4062, 12243, 10280, 1925, 9587, 11192, 9527, 3061, 15159,\n 5784, 2020, 12770, 4536, 15064, 5137, 12584, 3537, 14663,\n 4002, 11940, 2609, 1592, 3282, 12941, 4207, 13789, 692,\n 773, 12654, 12588, 7166, 13850, 11090, 13397, 15193, 10138,\n 5897, 6841, 4325, 11136, 2284, 3110, 8943, 8788, 15407,\n 1801, 8782, 14046, 13180, 14093, 6425, 7323, 2006, 4236,\n 9254, 1883, 14755, 13052, 8831, 10601, 3681, 3608, 5585,\n 130, 7507, 4590, 861, 7572, 8588, 10548, 9392, 4126,\n 6161, 8029, 6193, 11869, 5822, 7347, 6262, 8999, 12637,\n 8651, 2509, 13018, 2996, 5989, 8376, 12684, 120, 4172,\n 432, 2697, 14614, 2054, 3445, 10038, 9662, 10748, 5504,\n 3896, 13433, 8004, 13259, 8997, 10835, 9508, 13893, 9225,\n 7800, 1084, 8073, 3419, 12012, 7388, 6318, 12737, 12638,\n 1742, 14541, 10116, 13026, 1070, 11684, 6578, 7155, 7697,\n 7561, 7315, 4647, 1970, 13978, 2237, 8040, 7090, 6189,\n 14763, 13182, 3029, 11498, 4256, 12034, 8360, 6948, 6667,\n 9278, 10757, 2205, 13972, 9689, 8744, 7852, 14137, 1305,\n 6699, 15025, 76, 4663, 12545, 7061, 13159, 1506, 10231,\n 9065, 9040, 2872, 7218, 3874, 8229, 11802, 1743, 7526,\n 4613, 4710, 12377, 15055, 7172, 10645, 3443, 1346, 3051,\n 900, 12884, 7673, 15418, 13483, 9329, 13489, 9697, 7529,\n 13934, 962, 4796, 8797, 2315, 8687, 7666, 79, 1768,\n 7495, 10030, 961, 14159, 7272, 7776, 13067, 15403, 8065,\n 6006, 4849, 10230, 8145, 5450, 1265, 3575, 2204, 3016,\n 8194, 11655, 5340, 7046, 10966, 1934, 14171, 3496, 15308,\n 6550, 6714, 4449, 11947, 10834, 10293, 7863, 12240, 12850,\n 1873, 2202, 8597, 994, 3378, 13129, 9946, 10117, 6723,\n 2896, 6666, 14346, 14673, 172, 2089, 15203, 14244, 6121,\n 1620, 8783, 8778, 5688, 10988, 7449, 5762, 1238, 6865,\n 9725, 1330, 6496, 7087, 3127, 10169, 7147, 15059, 5135,\n 6872, 9068, 12274, 8718, 4322, 4348, 10406, 9872, 12743,\n 2380, 3676, 1492, 9865, 8279, 9500, 6964, 14545, 7126,\n 5378, 8771, 10626, 1627, 3740, 88, 2271, 14799, 7519,\n 6311, 13162, 722, 2029, 13456, 10512, 681, 4460, 12084,\n 10903, 1135, 1112, 9098, 8115, 1488, 4230, 1623, 2581,\n 1415, 10561, 11976, 9687, 9471, 13290, 5986, 9941, 8011,\n 7491, 4425, 11286, 4312, 231, 3097, 13713, 14134, 4268,\n 10880, 5951, 8333, 14423, 14047, 4333, 2177, 5374, 14883,\n 6482, 14372, 15234, 5454, 4852, 5043, 1066, 14242, 6429,\n 9979, 2262, 7705, 15303, 5230, 1812, 4961, 8536, 1038,\n 9841, 6418, 402, 11635, 4650, 10439, 9654, 3235, 6278,\n 6693, 1844, 10890, 5823, 3705, 12450, 7574, 11222, 10718,\n 6125, 15416, 3459, 6757, 8386, 7975, 1871, 6562, 9120,\n 14276, 11573, 4807, 7150, 7801, 14536, 2398, 75, 7270,\n 14817, 9903, 3520, 7138, 2575, 1181, 9351, 10476, 11179,\n 4749, 9179, 12659, 12423, 4496, 13330, 4101, 4361, 13111,\n 7726, 13121, 6338, 1673, 6601, 1843, 703, 12750, 7685,\n 14875, 9088, 13675, 6527, 8240, 13818, 11839, 7943, 2276,\n 12616, 5298, 8125, 1143, 7656, 13241, 6132, 8977, 9505,\n 8729, 104, 12193, 4567, 5570, 8488, 4929, 13560, 1723,\n 603, 8533, 7692, 12877, 1138, 5829, 11560, 4175, 7107,\n 13758, 2429, 13529, 13205, 2725, 4527, 8662, 14607, 11042,\n 3969, 5101, 4748, 10375, 14189, 2316, 11238, 14301, 11784,\n 7195, 12472, 718, 14162, 9375, 2875, 11108, 4139, 6932,\n 1001, 1332, 1281, 11567, 10503, 11779, 11958, 3730, 1469,\n 4652, 10098, 567, 15396, 5336, 14933, 8222, 920, 7158,\n 6355, 7226, 14151, 11645, 11915, 13007, 13020, 4981, 10309,\n 3611, 6668, 11942, 1046, 14871, 4544, 11452, 15204, 11082,\n 12822, 10618, 13476, 7830, 1151, 457, 4637, 11025, 121,\n 3020, 13011, 5355, 11212, 342, 4054, 4985, 14836, 3671,\n 14863, 322, 14500, 5093, 3431, 2187, 923, 7414, 6336,\n 6138, 5932, 10244, 442, 9549, 11809, 14132, 1317, 13576,\n 5233, 15219, 15049, 3471, 9781, 13643, 2374, 10716, 6443,\n 5775, 14822, 12892, 6211, 3906, 14523, 15130, 8012, 4457,\n 4395, 310, 2779, 9381, 4011, 11973, 4291, 6144, 12676,\n 643, 14155, 1207, 9833, 4069, 8883, 12731, 353, 9629,\n 1953, 819, 11774, 12631, 11782, 13191, 9059, 9960, 13242,\n 8941, 3269, 1809, 1568, 14025, 7625, 12524, 3770, 3228,\n 5633, 10634, 3183, 12033, 8761, 417, 274, 1460, 7588,\n 3508, 6883, 2890, 4736, 10261, 9362, 4056, 4806, 12907,\n 2948, 2279, 9647, 4530, 7840, 4610, 375, 4814, 583,\n 8868, 4907, 10235, 3175, 11453, 6231, 7192, 10659, 9616,\n 6939, 4724, 4793, 10069, 5577, 15047, 13144, 4158, 2920,\n 2221, 13520, 2395, 11530, 755, 2593, 5888, 2746, 5341,\n 13894, 9002, 6898, 3701, 14207, 1527, 2874, 7952, 1626,\n 6252, 6490, 1247, 9796, 5459, 14816, 7470, 9291, 10683,\n 11681, 2959, 5560, 11253, 6082, 4569, 12147, 4723, 4583,\n 11140, 13070, 3832, 12784, 4737, 15317, 10904, 7467, 14960,\n 5479, 14444, 12529, 7399, 8174, 4146, 14439, 6087, 1850,\n 4893, 3043, 1661, 714, 1531, 13852, 13125, 9896, 4052,\n 1292, 3604, 10771, 3658, 7287, 3570, 3805, 10106, 12100,\n 4834, 5118, 2101, 4205, 1334, 6974, 10384, 5295, 7109,\n 13060, 778, 13565, 13059, 7079, 4631, 8862, 7605, 2291,\n 5304, 5172, 927, 3514, 9691, 5134, 3467, 8520, 7355,\n 2375, 9797, 6649, 11446, 13983, 3173, 6356, 3539, 5244,\n 6802, 12275, 10587, 1664, 2577, 6411, 7309, 9757, 10976,\n 12371, 10054, 570, 1392, 94, 5194, 11878, 10924, 2414,\n 10273, 2646, 4302, 14850, 4044, 4445, 8061, 5181, 1867,\n 3227, 6636, 5827, 7464, 9543, 178, 14945, 10306, 11194,\n 10850, 15003, 8674, 14064, 6400, 6564, 5982, 2172, 9146,\n 3081, 6980, 14796, 10902, 11801, 2401, 12314, 7874, 7473,\n 14761, 9609, 3198, 814, 993, 9058, 6212, 14106, 5196,\n 10873, 7322, 11489, 11903, 13195, 15175, 509, 12429, 1563,\n 8991, 3476, 8926, 14605, 13239, 5050, 13461, 4228, 9856,\n 8219, 12419, 9407, 1128, 8396, 1155, 5120, 8408, 14415,\n 11565, 3948, 4730, 1645, 15120, 6316, 9161, 11624, 4506,\n 1524, 6386, 12498, 452, 1794, 7442, 4026, 1296, 8079,\n 14671, 7118, 8575, 554, 6469, 2753, 5452, 5070, 8362,\n 13318, 4788, 12231, 438, 1889, 3672, 2208, 13006, 14962,\n 10583, 5210, 4451, 653, 2347, 9478, 3309, 11142, 14169,\n 14977, 10285, 7995, 11357, 1735, 1715, 4531, 5406, 12623,\n 12097, 11457, 6794, 10207, 1567, 13873, 9848, 13258, 2264,\n 203, 858, 12697, 12544, 246, 8108, 8072, 2582, 569,\n 2918, 10786, 3579, 6230, 3211, 1228, 386, 14509, 6154,\n 6504, 10019, 1467, 11304, 5217, 13341, 7646, 2611, 1571,\n 5056, 14670, 6131, 4086, 2868, 8735, 449, 1879, 4585,\n 12853, 1585, 14480, 14352, 14849, 2499, 1784, 5820, 8466,\n 5715, 3373, 3553, 3850, 6184, 4552, 11414, 904, 1464,\n 3869, 10839, 2330, 12301, 8410, 3990, 13898, 12025, 5046,\n 5834, 3566, 10607, 11218, 2645, 9389, 12962, 5155, 10777,\n 4454, 8252, 7525, 6825, 14066, 10435, 14704, 13803, 10856,\n 5850, 6816, 9610, 3516, 3098, 4027, 13235, 11372, 482,\n 9907, 8842, 8790, 10996, 298, 13904, 5682, 5105, 11845,\n 9970, 6501, 9027, 4450, 4897, 5068, 5994, 12380, 940,\n 10247, 14645, 6990, 12258, 11871, 4469, 7731, 1110, 14238,\n 10295, 6253, 1048, 12820, 8324, 35, 13493, 12395, 5959,\n 1558, 14939, 8402, 12024, 3450, 11996, 10930, 3641, 4914,\n 8503, 5824, 8897, 11582, 10699, 6011, 3073, 12185, 7870,\n 6315, 12726, 9771, 7401, 8806, 4712, 3120, 11854, 5942,\n 3177, 1621, 6075, 15402, 193, 11221, 15288, 14061, 10770,\n 6163, 10201, 5571, 1804, 11823, 10301, 14526, 6766, 12448,\n 10900, 4751, 7886, 2232, 14303, 256, 4183, 6616, 4392,\n 12291, 10092, 2506, 2555, 2064, 8354, 14153, 4525, 12266,\n 7534, 2417, 11547, 5581, 11899, 14054, 8981, 11858, 4926,\n 5635, 8701, 5583, 10130, 7316, 11072, 12827, 7843, 131,\n 13199, 2175, 1853, 3965, 3417, 9234, 5435, 6439, 10271,\n 11894, 13712, 12482, 1120, 2837, 14127, 1452, 1786, 9158,\n 15341, 5003, 15095, 8206, 14740, 12443, 7576, 5722, 14411,\n 14239, 5626, 15046, 2195, 8120, 9101, 3988, 5416, 2771,\n 5874, 4694, 7540, 9608, 11274]), 'C': array([ 3373, 2036, 10207, 8997, 9128, 8878, 11481, 3312, 8534,\n 2966, 7047, 6228, 13156, 724, 7981, 5358, 170, 2726,\n 65, 8418, 10635, 8970, 13374, 9347, 4943, 5057, 5337,\n 2116, 2234, 3051, 7840, 5086, 10733, 4650, 9734, 11933,\n 12321, 10337, 11348, 5300, 9627, 10916, 6519, 1535, 9771,\n 3975, 7498, 13285, 9386, 11128, 2969, 12632, 1931, 2883,\n 12477, 12042, 5, 5983, 1608, 6702, 2110, 4930, 5297,\n 11936, 404, 5483, 11341, 11079, 7712, 4288, 3723, 8491,\n 857, 2624, 13314, 9273, 12338, 6338, 4944, 4076, 11357,\n 2068, 13096, 12894, 4922, 6714, 4858, 2765, 1233, 10860,\n 11356, 6047, 4846, 9251, 6314, 4702, 13061, 12023, 8303,\n 10697, 1153, 11390, 12155, 12292, 12327, 9088, 9110, 7888,\n 8789, 6855, 8099, 12364, 10405, 13044, 11645, 5717, 410,\n 9138, 4107, 10034, 888, 10994, 11785, 2606, 1660, 207,\n 10108, 12120, 1992, 9221, 12279, 3403, 5405, 2375, 5363,\n 2681, 2832, 1130, 10166, 7500, 10813, 6064, 2262, 9144,\n 6184, 2066, 3932, 12024, 8996, 3397, 4604, 8732, 6434,\n 842, 4393, 7257, 239, 8661, 11140, 8628, 11729, 1810,\n 8836, 147, 6689, 8799, 218, 5764, 11598, 104, 3416,\n 9408, 5597, 9650, 3085, 5637, 12917, 3722, 1047, 3672,\n 5539, 6347, 3689, 10574, 9307, 5183, 2028, 1020, 7307,\n 7258, 11273, 2666, 3748, 337, 3587, 2686, 3112, 7116,\n 8502, 6827, 10129, 12745, 4064, 6457, 5223, 10286, 10934,\n 2923, 4496, 9377, 13261, 4174, 4519, 10753, 6983, 13373,\n 7310, 12429, 2661, 6743, 1097, 9925, 8755, 7360, 1392,\n 7638, 3726, 11650, 6570, 8218, 10911, 8385, 1296, 4912,\n 7524, 4101, 12843, 10775, 6857, 11084, 777, 10556, 9644,\n 219, 3241, 7566, 1589, 7080, 11964, 13272, 12821, 7773,\n 11145, 9025, 11400, 211, 11708, 4436, 12529, 10390, 6754,\n 10834, 9687, 9201, 12453, 7876, 12536, 11070, 11616, 9924,\n 3791, 844, 7985, 1332, 13380, 11134, 12273, 11163, 5851,\n 3211, 3251, 9451, 5580, 11311, 3876, 1483, 9745, 2491,\n 10722, 11611, 11339, 4136, 3653, 6883, 10504, 12189, 7523,\n 6734, 1263, 2935, 4875, 4012, 3611, 2257, 3378, 4381,\n 12257, 5212, 6720, 772, 5856, 10366, 5880, 3759, 10610,\n 3998, 3755, 2506, 4805, 590, 4619, 2122, 10749, 1319,\n 7838, 4065, 7652, 4826, 886, 8846, 12674, 7630, 5481,\n 2277, 1090, 11235, 10181, 1438, 3504, 3471, 5174, 2920,\n 835, 11191, 4975, 4094, 364, 12295, 12819, 254, 4308,\n 593, 9001, 8065, 1432, 2463, 9345, 40, 7329, 2770,\n 4433, 1436, 1185, 9170, 10952, 12116, 6589, 1048, 5186,\n 11619, 11454, 10392, 13182, 2490, 12316, 11075, 8654, 4111,\n 2915, 11954, 6461, 5755, 2608, 3682, 1314, 5234, 10145,\n 6387, 5187, 6769, 5242, 6298, 478, 8657, 639, 2159,\n 8806, 5705, 7664, 761, 10517, 11748, 12834, 611, 3276,\n 11312, 688, 7088, 8687, 4443, 5097, 12125, 7767, 12877,\n 10424, 10541, 6558, 5532, 8404, 11977, 7906, 10328, 10943,\n 12648, 5628, 12697, 10985, 5492, 2782, 7299, 2947, 13145,\n 4278, 11462, 8508, 12761, 1647, 2588, 6931, 8264, 5030,\n 10019, 10900, 289, 7067, 3999, 12537, 11412, 10696, 11363,\n 4675, 6864, 13196, 3955, 3704, 183, 10318, 7809, 2986,\n 4175, 8309, 10261, 7232, 5694, 10690, 3309, 7776, 12403,\n 94, 5230, 11307, 868, 6691, 2361, 11646, 4881, 6490,\n 12768, 9359, 6939, 2332, 3268, 10651, 5647, 6745, 6680,\n 12997, 12075, 6196, 9879, 10845, 9278, 2717, 12239, 2208,\n 3052, 4685, 10038, 11623, 299, 11772, 12961, 5207, 9404,\n 551, 5035, 11487, 7236, 4343, 11941, 1790, 6978, 2994,\n 12210, 616, 11123, 9165, 4261, 5668, 7296, 11570, 6988,\n 8845, 10243, 12775, 152, 3041, 8543, 286, 4506, 2292,\n 7231, 2565, 9678, 3945, 10260, 2097, 3980, 1010, 12444,\n 1188, 5407, 4394, 12040, 12379, 12447, 11648, 7113, 4896,\n 12711, 7041, 13339, 703, 9315, 10365, 7154, 12594, 3631,\n 9989, 11474, 899, 614, 4259, 1675, 10636, 5792, 12016,\n 11846, 8134, 458, 5422, 1863, 2639, 2044, 12315, 1623,\n 6239, 5991, 5425, 2296, 3511, 871, 8597, 6590, 5397,\n 2690, 8713, 1858, 4075, 10817, 2402, 4716, 5840, 5104,\n 7346, 2675, 8104, 809, 12525, 7378, 4555, 2707, 2354,\n 3083, 2687, 4355, 3125, 10506, 4117, 4362, 11415, 13146,\n 3133, 1813, 9824, 6812, 6548, 11557, 7159, 1237, 3427,\n 8825, 13259, 9707, 6427, 1273, 5497, 13049, 12936, 7456,\n 9491, 12410, 10289, 6073, 8055, 10809, 1126, 874, 11658,\n 11736, 2403, 4856, 6165, 9692, 1440, 12947, 2660, 5659,\n 12608, 12704, 11130, 7281, 12443, 11745, 11521, 2145, 13081,\n 1229, 12332, 11361, 8517, 8722, 2715, 4972, 4803, 3623,\n 193, 6970, 6916, 2030, 4197, 3113, 9950, 13087, 7211,\n 565, 3117, 11486, 7677, 10841, 9743, 4133, 8919, 12455,\n 13332, 10042, 9760, 2356, 5864, 11428, 3470, 222, 1842,\n 3487, 11473, 1886, 10270, 6578, 12688, 1771, 3518, 6066,\n 9931, 12859, 3067, 6459, 7079, 10946, 7610, 13269, 11194,\n 2985, 12375, 9188, 6324, 9193, 523, 12244, 5015, 9346,\n 11725, 4348, 3690, 12171, 3902, 6374, 13310, 11613, 3592,\n 8154, 4615, 7518, 5369, 4031, 12348, 7322, 3157, 7936,\n 10525, 12679, 8577, 9331, 6187, 10671, 2881, 4244, 6555,\n 8527, 664, 5924, 9534, 684, 3846, 8965, 11062, 8324,\n 7199, 11396, 7109, 9341, 10738, 9713, 6449, 12868, 11352,\n 3003, 7455, 10520, 10837, 5200, 1754, 12503, 4771, 7916,\n 1367, 12113, 579, 133, 2980, 9351, 12435, 12758, 2131,\n 12875, 13308, 1116, 1699, 7977, 11475, 10277, 1164, 7160,\n 5820, 8426, 1745, 12439, 11208, 1891, 7648, 10630, 1636,\n 6111, 8989, 1919, 13095, 1562, 8525, 1618, 8600, 5456,\n 6255, 12772, 12675, 10063, 8913, 848, 7501, 7547, 10798,\n 1137, 1741, 10885, 4125, 12302, 8645, 6438, 7597, 3467,\n 1077, 12538, 11820, 10107, 10816, 169, 8842, 9970, 6132,\n 3425, 4239, 9755, 9963, 9104, 7875, 2933, 1941, 12614,\n 4014, 7554, 12683, 11869, 468, 321, 895, 12317, 13183,\n 6318, 2777, 3036, 3839, 2400, 7717, 12066, 2829, 2207,\n 6774, 9223, 11092, 13241, 11652, 12402, 1591, 10275, 5812,\n 11280, 12299, 1642, 12087, 1442, 7910, 1042, 5355, 9209,\n 10689, 4204, 10499, 2046, 13168, 662, 4285, 2551, 226,\n 3213, 9582, 6635, 12698, 10575, 4083, 11854, 276, 450,\n 10054, 6258, 12539, 3161, 1144, 266, 1465, 6035, 2165,\n 12605, 2222, 3480, 354, 5074, 5277, 11402, 9127, 8972,\n 9072, 68, 12184, 6306, 4334, 1808, 12631, 7807, 6495,\n 13045, 7823, 82, 11929, 6837, 7074, 12707, 6982, 1276,\n 1011, 9214, 12395, 6705, 5611, 7966, 2625, 7707, 5125,\n 9619, 9502, 10191, 12763, 12014, 9832, 9584, 6934, 172,\n 9253, 11360, 6206, 9693, 1183, 9087, 4751, 11901, 1124,\n 4960, 8171, 191, 8570, 8945, 3754, 2135, 11910, 3449,\n 5193, 10805, 1860, 589, 7883, 9233, 12057, 3944, 265,\n 3314, 2501, 7574, 1017, 5380, 502, 23, 5825, 12751,\n 8114, 7263, 8522, 8408, 5311, 8484, 6325, 558, 3671,\n 5459, 2264, 7304, 9325, 10822, 3095, 8704, 5786, 1533,\n 1056, 3901, 13275, 3976, 10735, 1922, 6847, 3060, 10709,\n 541, 1960, 1902, 12111, 6362, 10364, 8221, 10265, 4410,\n 12520, 8473, 953, 1857, 8407, 3994, 5054, 8470, 6115,\n 11577, 10381, 4359, 11063, 12401, 1722, 10796, 1063, 1052,\n 2692, 9703, 8103, 9393, 5771, 8339, 3388, 3453, 10211,\n 8429, 11596, 1761, 11890, 12488, 24, 6647, 5157, 4138,\n 3262, 5622, 7619, 1782, 5310, 9016, 6496, 13282, 1820,\n 11182, 9988, 163, 8164, 11215, 9312, 966, 8675, 1929,\n 1066, 3741, 1945, 5367, 877, 7669, 5565, 9943, 1418,\n 5936, 278, 519, 7546, 5724, 1108, 3566, 90, 7153,\n 8620, 3441, 11751, 5010, 6058, 7459, 12035, 6995, 7291,\n 12238, 4219, 10206, 4911, 10727, 5635, 3001, 10996, 10100,\n 8575, 4515, 12004, 6396, 13398, 2231, 1007, 12756, 8477,\n 12731, 10550, 11706, 8764, 10634, 9938, 1113, 10489, 3942,\n 4850, 10638, 376, 3603, 122, 11036, 2002, 5513, 6485,\n 12433, 9873, 3582, 11816, 3275, 7169, 8829, 11519, 3892,\n 8768, 8172, 5475, 12484, 9757, 10919, 10451, 3093, 8556,\n 12421, 9212, 5444, 12056, 10868, 3061, 1511, 2417, 3604,\n 4216, 5119, 4593, 7174, 5811, 11615, 2188, 4915, 6422,\n 12987, 1806, 2941, 2071, 3632, 4322, 4670, 6175, 7166,\n 2273, 7176, 1816, 6466, 2293, 8519, 7986, 3575, 9677,\n 1306, 7615, 13093, 11417, 9481, 440, 2281, 6941, 569,\n 8495, 5629, 7366, 7853, 13189, 3143, 1165, 3784, 6516,\n 12065, 11198, 5740, 11204, 8084, 1501, 661, 86, 7326,\n 13126, 11452, 5822, 7604, 245, 11522, 3164, 12052, 6836,\n 5248, 2392, 5255, 12870, 4622, 12583, 2386, 12134, 8606,\n 10497, 8329, 4840, 11135, 462, 11186, 10769, 12164, 13329,\n 2557, 2699, 13231, 4973, 161, 413, 4150, 8312, 8330,\n 7533, 5176, 12913, 13294, 6401, 9917, 9960, 12689, 8431,\n 4808, 2570, 12077, 5590, 9166, 10905, 12368, 6454, 8039,\n 13236, 8509, 1018, 1280, 12197, 12363, 7999, 12784, 5928,\n 9905, 64, 7772, 4050, 2983, 5192, 3501, 4452, 6129,\n 12795, 10237, 3968, 5229, 6326, 13377, 2085, 1463, 2634,\n 12856, 8690, 1639, 5484, 1723, 2879, 6205, 7111, 1909,\n 3914, 8189, 5011, 479, 1613, 11867, 7640, 11416, 2010,\n 2393, 4168, 6312, 1492, 12117, 7577, 552, 12718, 3820,\n 11411, 890, 3529, 10625, 12642, 475, 632, 10128, 6380,\n 5733, 9116, 6282, 8366, 3046, 5099, 8513, 13185, 10599,\n 12489, 5083, 2171, 6910, 1138, 2934, 6394, 2708, 7528,\n 436, 12941, 3862, 10446, 195, 7341, 10291, 3933, 7282,\n 10282, 12572, 7039, 2626, 13325, 379, 11365, 6911, 8894,\n 13330, 4871, 5909, 9381, 8028, 9680, 10406, 10185, 6122,\n 656, 2318, 2891, 4270, 10144, 4894, 3461, 1956, 3845,\n 9606, 455, 9900, 3409, 5447, 5439, 2227, 5328, 3679,\n 12607, 12482, 9686, 12928, 2908, 2048, 9256, 1635, 5449,\n 12681, 3761, 11157, 6563, 8726, 12743, 5675, 7908, 8138,\n 6604, 3886, 10941, 4426, 2423, 2229, 9778, 12547, 4209,\n 8649, 12013, 11345, 12426, 6659, 3922, 230, 11705, 3098,\n 10213, 2958, 3608, 5719, 6511, 4745, 9320, 3383, 5944,\n 11384, 11444, 12797, 10744, 13092, 8886, 9566, 7435, 5061,\n 8969, 9944, 8369, 10415, 11459, 1772, 5796, 11872, 5362,\n 2849, 10668, 9828, 7862, 4836, 11369, 718, 13213, 1000,\n 3499, 11897, 9062, 12560, 1473, 5426, 9413, 10189, 110,\n 4267, 3343, 5657, 2991, 13017, 6719, 10768, 1862, 305,\n 4726, 7563, 6190, 10452, 3246, 1760, 9538, 374, 10248,\n 12515, 3270, 11790, 11271, 12070, 95, 7495, 10434, 11533,\n 9044, 5485, 8573, 8344, 87, 1512, 8680, 2555, 8931,\n 7272, 415, 11585, 6526, 7082, 4812, 13019, 171, 10540,\n 7754, 2163, 6514, 3399, 4425, 13192, 8583, 496, 816,\n 4052, 4217, 7482, 6897, 10460, 12866, 4492, 4748, 13229,\n 9861, 5332, 3614, 1179, 1363, 3286, 2943, 13110, 11278,\n 7251, 12107, 12876, 5253, 13014, 6484, 11701, 7512, 11176,\n 6573, 4759, 8665, 3502, 4631, 2349, 7131, 1333, 6756,\n 1765, 2258, 5002, 5975, 6455, 3764, 2380, 5205, 12590,\n 2542, 9403, 6997, 370, 9965, 4385, 3371, 2776, 12425,\n 8880, 7241, 4693, 7336, 5279, 6528, 9489, 8239, 4880,\n 773, 5612, 9689, 5256, 3899, 7244, 3059, 11261, 11302,\n 12762, 3649, 10163, 3192, 5862, 6183, 12701, 12497, 3019,\n 3729, 1285, 2267, 7092, 11281, 3108, 4289, 8356, 7857,\n 12901, 13155, 10587, 7084, 5317, 11032, 6575, 1611, 13222,\n 10419, 8914, 7187, 7787, 3412, 9152, 6388, 4757, 10273,\n 3242, 3579, 8006, 9284, 8447, 5387, 9906, 4042, 3840,\n 7005, 2926, 7076, 10612, 1825, 8864, 2645, 12266, 11956,\n 10339, 10438, 7973, 9866, 4258, 533, 13018, 2251, 817,\n 1476, 4834, 12146, 156, 3948, 11993, 10936, 8133, 10454,\n 2328, 11420, 11229, 745, 5068, 4601, 10899, 6167, 9309,\n 7127, 9372, 5678, 42, 7210, 6339, 2289, 4870, 2533,\n 5945, 11133, 7757, 10453, 4511, 5700, 10658, 3418, 5848,\n 11438, 12703, 11924, 4241, 1598, 3279, 2719, 6375, 9374,\n 12524, 740, 5334, 5977, 8320, 4481, 2515, 5126, 11881,\n 9616, 6681, 6600, 13144, 9098, 5526, 8029, 8030, 4106,\n 6173, 9416, 196, 3094, 8817, 9261, 11022, 5383, 9358,\n 6602, 11008, 787, 9270, 1688, 12133, 4988, 12206, 10884,\n 247, 400, 9844, 11871, 6414, 7293, 6322, 1167, 11721,\n 4514, 12137, 11266, 8469, 5847, 7856, 7858, 4985, 8544,\n 7172, 12982, 6274, 12059, 6369, 10188, 11898, 2861, 2572,\n 4079, 5775, 11196, 9499, 13012, 7871, 7845, 4549, 6371,\n 10165, 5574, 7483, 7508, 10741, 5093, 9395, 9791, 5592,\n 7634, 2408, 2706, 5159, 8681, 11787, 4502, 5185, 4084,\n 298, 850, 2434, 3369, 1875, 4999, 11833, 11675, 6061,\n 3969, 8863, 2006, 6683, 9624, 8744, 1508, 834, 4302,\n 9060, 6081, 8899, 3843, 13320, 3202, 8707, 8008, 132,\n 8792, 956, 2219, 9683, 9005, 10332, 4558, 8294, 11817,\n 13263, 1696, 11683, 9301, 4644, 5640, 9829, 2464, 5546,\n 1993, 513, 4758, 7530, 2308, 5191, 7651, 12358, 2086,\n 3500, 9985, 10886, 12419, 8021, 12992, 10995, 990, 4255,\n 3135, 8277, 7305, 12557, 8064, 2504, 9927, 11502, 1043,\n 12026, 9487, 9363, 1490, 12307, 6913, 9821, 1545, 7587,\n 6556, 1120, 3934, 1397, 9167, 1538, 12921, 6649, 250,\n 4635, 3734, 9625, 11097, 2460, 51, 698, 10359, 6568,\n 13122, 5235, 9137, 8771, 123, 3354, 11344, 5742, 3609,\n 10795, 4341, 6577, 7797, 11091, 2848, 10754, 1064, 1763,\n 12848, 10353, 1987, 9438, 10313, 182, 2814, 444, 13265,\n 5381, 11584, 5761, 5455, 3864, 9496, 6677, 10770, 1408,\n 9490, 12603, 8348, 8668, 4623, 6381, 9268, 4605, 11407,\n 13204, 10688, 8160, 6133, 5583, 7540, 11189, 401, 8206,\n 10501, 448, 7848, 12694, 7526, 2195, 1903, 12007, 1173,\n 7600, 7609, 9523, 11676, 3249, 10702, 13293, 2663, 7805,\n 2685, 11633, 13151, 9672, 2955, 8512, 3137, 9239, 8571,\n 5918, 11147, 7971, 6247, 4243, 12422, 7415, 11889, 10420,\n 7770, 8217, 11103, 11826, 6120, 4098, 403, 6372, 2897,\n 6950, 9995, 4253, 4589, 3628, 8888, 6072, 9529, 13080,\n 8380, 6727, 2209, 638, 6723, 3688, 8298, 1245, 3920,\n 3433, 10060, 4730, 12230, 8794, 12508, 1282, 8679, 2297,\n 4211, 6874, 10001, 1107, 11735, 6814, 5301, 11907, 9545,\n 983, 10731, 6023, 7001, 11844, 4961, 10030, 136, 3713,\n 8903, 5627, 801, 6476, 2050, 326, 10620, 602, 8676,\n 3346, 8925, 10253, 10677, 9131, 4490, 5431, 7442, 2335,\n 12746, 6917, 8555, 1445, 1513, 7398, 12789, 8018, 5641,\n 4177, 10140, 13117, 4855, 6816, 4838, 10944, 3423, 7085,\n 3366, 1666, 6087, 366, 11427, 11987, 6650, 11975, 2482,\n 4188, 9539, 5660, 11828, 8313, 11884, 78, 6141, 7203,\n 9123, 13312, 10607, 3334, 10757, 1177, 11694, 6841, 6616,\n 11274, 6475, 5164, 9288, 1940, 9294, 7702, 1141, 150,\n 1209, 9636, 2098, 8851, 9843, 5650, 8240, 198, 2385,\n 10319, 4563, 654, 7947, 7489, 865, 8466, 258, 7015,\n 5646, 12427, 3329, 2657, 13179, 4739, 3737, 2617, 13002,\n 10155, 3071, 4612, 9252, 10246, 1301, 6397, 764, 10263,\n 11450, 10815, 4532, 5568, 7265, 934, 4580, 339, 8548,\n 10827, 618, 8337, 11458, 7567, 11574, 12954, 1096, 11717,\n 3893, 9587, 11040, 2211, 8520, 561, 12986, 6134, 1974,\n 12890, 8410, 8994, 12248, 12558, 6822, 8345, 4637, 5827,\n 3402, 10937, 10115, 9099, 6062, 4929, 4992, 10109, 597,\n 3960, 6482, 2140, 4295, 5589, 560, 8589, 5094, 7309,\n 4786, 9036, 7628, 2932, 4190, 8132, 12804, 11843, 10585,\n 2602, 2735, 11778, 8901, 9225, 7905, 4633, 1784, 3867,\n 4547, 528, 3900, 7790, 12213, 10877, 3380, 6423, 5178,\n 8974, 6164, 13212, 3172, 12135, 12944, 8368, 4373, 11193,\n 2878, 4910, 5810, 526, 6048, 12334, 9162, 1055, 2616,\n 9618, 1596, 4672, 9611, 11253, 12518, 3841, 2667, 12036,\n 6074, 6004, 1874, 3042, 2641, 12020, 12512, 3687, 11865,\n 6762, 6502, 175, 6696, 723, 3237, 11096, 2111, 9077,\n 11178, 10153, 9654, 1494, 7054, 1036, 8123, 762, 6771,\n 3261, 7957, 10800, 9694, 13029, 1961, 9314, 1347, 2369,\n 5281, 4903, 8416, 8027, 2644, 9696, 11571, 8083, 4713,\n 5872, 8384, 9241, 2130, 7024, 1172, 8731, 11220, 3979,\n 12938, 3551, 11798, 563, 7997, 5180, 1848, 12735, 11766,\n 2164, 2102, 7233, 4286, 8127, 11296, 10374, 7384, 7687,\n 1879, 4529, 9353, 1845, 4158, 2252, 7955, 9602, 3753,\n 12182, 2671, 3849, 6758, 5595, 12408, 4018, 12351, 3018,\n 8175, 9864, 12148, 13323, 406, 13147, 7920, 2152, 7150,\n 1978, 1346, 12218, 7216, 3047, 5354, 4118, 7721, 1234,\n 4458, 838, 3697, 2127, 12853, 7723, 5384, 12404, 9599,\n 5436, 706, 5985, 1624, 2429, 5829, 11376, 13317, 4543,\n 10322, 8267, 10103, 8286, 6335, 11112, 7582, 7737, 3199,\n 11564, 4349, 11637, 3895, 3984, 2964, 11265, 4364, 2176,\n 13114, 10259, 4430, 3389, 12154, 5194, 9876, 2906, 6529,\n 11967, 5923, 7193, 4340, 9548, 9731, 12571, 8161, 12943,\n 1218, 3635, 8500, 6091, 8462, 2828, 12225, 714, 10336,\n 3284, 1051, 7204, 5982, 12424, 5952, 9073, 5517, 2241,\n 8334, 13123, 11997, 9265, 6419, 5145, 749, 608, 2210,\n 10654, 3260, 7247, 11728, 4991, 3860, 8529, 10825, 11409,\n 12599, 1918, 4465, 6669, 7520, 3009, 8501, 4021, 8758,\n 5785, 7692, 11836, 2545, 11992, 8793, 7766, 2372, 5680,\n 8296, 10240, 2121, 2971, 4744, 5573, 4618, 1277, 1222,\n 3918, 13392, 4306, 3880, 6377, 8942, 2305, 4789, 12672,\n 1975, 8897, 1787, 8333, 4827, 5511, 10912, 7676, 8051,\n 2721, 4671, 12430, 11614, 8699, 8790, 4813, 3983, 1119,\n 6675, 153, 1738, 10829, 7194, 8862, 7653, 5692, 7639,\n 9802, 2742, 921, 9869, 10306, 11687, 3024, 5972, 11866,\n 4560, 10597, 2070, 8188, 10502, 6692, 199, 11789, 1836,\n 9976, 11626, 4718, 12923, 1389, 6083, 12457, 1287, 2136,\n 11994, 11132, 12166, 3701, 13186, 6400, 7655, 4886, 11368,\n 9237, 12739, 3829, 7752, 8701, 4100, 13184, 12646, 7683,\n 8475, 12881, 4324, 3341, 6460, 925, 5691, 11330, 13078,\n 6512, 12619, 6474, 1281, 10765, 5797, 6704, 1415, 8315,\n 10033, 12413, 116, 10050, 12095, 849, 11057, 2670, 5754,\n 11762, 10401, 5467, 1424, 2113, 8824, 1873, 4408, 2753,\n 3468, 10303, 2324, 7209, 3823, 8411, 13391, 10561, 3413,\n 10444, 8182, 5502, 1083, 5163, 10892, 3837, 4369, 5981,\n 11542, 6636, 10564, 6967, 6601, 4281, 7095, 13279, 2314,\n 10704, 10067, 11202, 9424, 3852, 11888, 4316, 1232, 7108,\n 7602, 7380, 6977, 9651, 8059, 19, 4941, 471, 6606,\n 2931, 4429, 3991, 10665, 9737, 1563, 4669, 7730, 8977,\n 9263, 7817, 2525, 920, 12835, 10276, 7913, 11845, 5258,\n 11718, 6424, 2727, 3408, 434, 8183, 722, 12567, 530,\n 2469, 9914, 4663, 1254, 6056, 11031, 5090, 9855, 12331,\n 4003, 12253, 1390, 665, 13000, 8201, 5766, 7978, 9549,\n 4733, 12, 12685, 4860, 7222, 2027, 8952, 8896, 10603,\n 6166, 4401, 1537, 8409, 6301, 3621, 7356, 8820, 12119,\n 10971, 9059, 27, 3176, 61, 8733, 3599, 3913, 6572,\n 12764, 3460, 7070, 1009, 5322, 1221, 12044, 9834, 8046,\n 6283, 3048, 12565, 1804, 5516, 6334, 10684, 696, 4310,\n 5875, 9531, 2115, 5423, 8403, 2967, 6332, 11305, 6722,\n 5515, 12900, 12450, 8609, 9556, 1703, 11152, 9011, 1162,\n 6202, 11818, 1024, 6030, 4198, 8659, 3731, 2415, 8077,\n 7915, 5529, 5743, 11580, 2780, 5476, 4778, 2759, 3236,\n 12610, 12256, 12792, 6300, 6055, 173, 11986, 7156, 11030,\n 9014, 10343, 8207, 13115, 13300, 1468, 9182, 6480, 11168,\n 8646, 9871, 6540, 4484, 13022, 3456, 7182, 572, 4688,\n 686, 11098, 5747, 1605, 5351, 442, 1876, 3919, 11044,\n 2913, 6001, 1430, 833, 556, 2662, 13306, 8387, 282,\n 2389, 142, 7023, 7369, 8035, 4470, 1176, 1510, 7895,\n 8414, 5776, 3521, 5359, 10048, 2852, 8016, 2473, 10746,\n 2020, 10345, 940, 456, 9848, 10137, 8834, 5531, 6506,\n 10648, 1472, 8047, 6729, 4378, 1326, 10914, 10204, 1329,\n 12827, 13291, 6660, 9092, 12058, 1379, 5026, 12251, 2778,\n 7297, 4389, 10254, 5684, 3795, 5043, 6221, 12012, 11172,\n 10720, 1068, 3103, 1955, 8000, 1467, 11354, 8341, 5339,\n 1773, 332, 8465, 12228, 10989, 11919, 9344, 7098, 11925,\n 8957, 11665, 6773, 1554, 4279, 9061, 7349, 7094, 7065,\n 7107, 3854, 8782, 11064, 9609, 10710, 5400, 7522, 831,\n 12028, 9486, 11308, 3732, 7344, 12597, 12530, 9126, 7198,\n 3394, 5566, 4494, 9849, 9280, 2383, 11779, 12388, 8377,\n 7952, 11257, 4982, 9579, 11998, 4676, 4876, 10003, 2808,\n 7689, 1716, 681, 5845, 3224, 1612, 7008, 8249, 13077,\n 5720, 8671, 1507, 7475, 1707, 6479, 1946, 3742, 9485,\n 10683, 10471, 457, 9135, 11696, 11873, 9157, 4916, 9323,\n 5213, 12308, 5320, 9675, 6523, 2838, 7949, 3324, 1990,\n 566, 12362, 9714, 13035, 7804, 2005, 8702, 7055, 277,\n 2984, 1139, 10378, 4311, 12360, 10006, 819, 571, 7926,\n 3806, 12326, 5922, 10893, 1354, 2456, 6793, 12446, 3089,\n 268, 4444, 1128, 4404, 10756, 2107, 4049, 12950, 775,\n 4655, 6252, 870, 7794, 5331, 6858, 9032, 1546, 1968,\n 6821, 5298, 4336, 8343, 6103, 8530, 8537, 7551, 8476,\n 194, 10721, 9565, 12322, 1015, 12589, 3989, 6195, 12169,\n 8724, 3384, 7393, 9151, 10383, 5169, 83, 1766, 1592,\n 1815, 12201, 2734, 6292, 12728, 5222, 8436, 770, 8788,\n 8611, 356, 744, 7594, 5974, 7796, 7746, 12931, 5808,\n 3175, 12626, 5000, 2910, 9021, 6893, 10874, 4551, 5111,\n 9457, 10547, 5262, 8192, 4714, 12156, 12548, 6244, 741,\n 5582, 4501, 6100, 12559, 4030, 2032, 11076, 8567, 10546,\n 12623, 3248, 6728, 8091, 1252, 4592, 2480, 4202, 6684,\n 13225, 9029, 2643, 2576, 4040, 7902, 10773, 9631, 2261,\n 768, 4660, 7034, 9710, 9563, 1731, 492, 5556, 6757,\n 7250, 7240, 8311, 10863, 10659, 568, 1044, 3935, 4001,\n 3158, 9264, 63, 8347, 8096, 12187, 11565, 12898, 7249,\n 10871, 11375, 3294, 5261, 1603, 10836, 6391, 12869, 1531,\n 7467, 3763, 1493, 10611, 10788, 10901, 2927, 1496, 4649,\n 991, 12765, 10694, 8539, 5973, 5962, 11647, 1013, 1499,\n 6295, 1228, 750, 3337, 3149, 4483, 10932, 7769, 7894,\n 9661, 12173, 11563, 6425, 11605, 2062, 7591, 7815, 1958,\n 2596, 5506, 11294, 8349, 3124, 6024, 10581, 9962, 6622,\n 7100, 2678, 7295, 4821, 3678, 1388, 6370, 12830, 7548,\n 7474, 8955, 192, 2705, 6110, 1901, 9912, 6559, 11381,\n 10072, 10894, 1565, 11862, 7800, 4763, 12677, 8117, 605,\n 11461, 7183, 952, 852, 960, 12387, 3661, 939, 2187,\n 943, 3711, 223, 10621, 11651, 13376, 343, 1849, 9955,\n 4055, 6642, 4686, 11531, 3090, 9468, 12132, 5156, 3446,\n 11950, 10244, 673, 7731, 4508, 1582, 8293, 2108, 1924,\n 5038, 6682, 10098, 2729, 9354, 1204, 5594, 8976, 4775,\n 10112, 367, 8266, 8074, 3379, 10752, 10888, 12068, 59,\n 898, 5330, 10065, 5188, 18, 7671, 12581, 11126, 6012,\n 6395, 7313, 3638, 8856, 11802, 7286, 13334, 6426, 4773,\n 12311, 2004, 5046, 7820, 5458, 8944, 2223, 1265, 1701,\n 5599, 1727, 2205, 6076, 2244, 7042, 3848, 7890, 10803,\n 2976, 52, 7385, 12349, 11581, 6330, 26, 9456, 10767,\n 3598, 8872, 12649, 7416, 11879, 6583, 5706, 10724, 4734,\n 6085, 2306, 4842, 13382, 5072, 6215, 13162, 12449, 4471,\n 11367, 13023, 2800, 1899, 2953, 13054, 9276, 8540, 2014,\n 12049, 10409, 3162, 7102, 1877, 9446, 1298, 9997, 9878,\n 7691, 8009, 11700, 3445, 7253, 9756, 3194, 5744, 5179,\n 906, 12081, 1972, 6467, 11827, 12873, 11, 4977, 8826,\n 1746, 8219, 1724, 9428, 1558, 5831, 2470, 647, 6582,\n 6887, 12637, 7521, 8688, 9063, 5346, 11041, 2831, 11677,\n 4128, 11512, 1262, 12357, 3069, 6840, 12550, 11822, 89,\n 7190, 13326, 10457, 11336, 6871, 5901, 1300, 8162, 894,\n 5586, 9002, 5162, 8622, 3190, 5980, 10669, 4801, 11640,\n 5294, 11349, 7161, 1679, 1002, 5999, 2745, 7603, 7312,\n 12150, 600, 8653, 4643, 10154, 5021, 422, 1700, 3804,\n 4969, 2284, 14, 4046, 11334, 466, 12647, 5005, 6293,\n 9089, 130, 5209, 5949, 4013, 8939, 3491, 1999, 8507,\n 8765, 2755, 10633, 1542, 974, 2198, 106, 9888, 6309,\n 881, 553, 8918, 1655, 11153, 10375, 2851, 8503, 811,\n 10465, 11081, 11501, 2337, 11379, 9786, 5572, 3465, 9933,\n 2129, 707, 2376, 13385, 111, 7990, 4793, 29, 6148,\n 2948, 11602, 5690, 438, 10519, 630, 4606, 1520, 8307,\n 1939, 10938, 9561, 2173, 387, 2443, 10927, 10479, 12509,\n 2882, 9726, 216, 2764, 6098, 6435, 9057, 8696, 10977,\n 8778, 5498, 7275, 5075, 11783, 9300, 1529, 8093, 5609,\n 7623, 8920, 8232, 3325, 8177, 3546, 12854, 10951, 4848,\n 12434, 12157, 6938, 7724, 7964, 2544, 13270, 8708, 8259,\n 9380, 7432, 10426, 1062, 13099, 3311, 10758, 11451, 4607,\n 9781, 7517, 11055, 11143, 9655, 11105, 12053, 12268, 2989,\n 267, 11483, 7374, 10272, 5843, 12678, 2805, 8301, 6106,\n 236, 6508, 270, 2426, 4576, 4283, 10766, 3303, 598,\n 3496, 4957, 6554, 4104, 8460, 2493, 11991, 3307, 10715,\n 7941, 7179, 11662, 4647, 12203, 9472, 10577, 12276, 9257,\n 4116, 7904, 601, 9435, 4390, 12734, 5618, 10842, 5995,\n 837, 10412, 9009, 9727, 1416, 11122, 6045, 5888, 13048,\n 4019, 11813, 12832, 7785, 4120, 1683, 6544, 12469, 5559,\n 12598, 4816, 5626, 2889, 12891, 35, 2979, 7516, 9700,\n 2763, 10801, 10675, 11477, 778, 7868, 5710, 10968, 12671,\n 12471, 8108, 3097, 5588, 5488, 7320, 8629, 3707, 12485,\n 1081, 3131, 11125, 10516, 1297, 635, 4939, 7742, 8753,\n 12355, 13287, 7395, 8424, 8467, 11506, 13381, 2467, 2498,\n 7682, 39, 3634, 300, 6965, 6095, 3992, 8453, 3357,\n 6603, 7581, 5374, 5486, 7571, 1395, 8031, 1349, 7448,\n 4319, 10662, 1896, 8582, 10571, 8211, 2212, 10949, 5081,\n 11635, 10778, 12112, 10195, 12185, 12611, 1630, 1811, 13181,\n 1417, 5088, 12312, 7144, 5772, 1997, 9387, 4918, 8063,\n 3082, 4777, 12522, 12183, 1103, 9269, 10298, 867, 316,\n 9083, 1074, 7951, 5302, 2970, 11337, 984, 5171, 2668,\n 5443, 2410, 12376, 1339, 7315, 10643, 7223, 4701, 11671,\n 447, 3733, 2331, 8220, 4914, 9524, 5265, 4788, 7480,\n 3517, 1588, 4628, 8246, 9807, 13198, 5518, 5569, 1785,\n 4712, 11682, 8406, 453, 8262, 8043, 9659, 7364, 2475,\n 9465, 4451, 9473, 3269, 5341, 10094, 12737, 1739, 6610,\n 3483, 861, 5867, 8841, 10538, 3170, 3513, 4806, 2767,\n 6291, 6109, 11653, 3777, 629, 9705, 2197, 4581, 7370,\n 8463, 4493, 11850, 7898, 7889, 6277, 9310, 13233, 2974,\n 9510, 13034, 10530, 10482, 13142, 10966, 6442, 9412, 373,\n 12606, 10396, 7789, 9847, 8402, 6271, 9648, 10391, 13337,\n 11603, 4149, 1381, 9474, 5379, 6919, 4711, 7138, 11589,\n 12123, 2363, 7014, 3985, 2702, 10748, 1482, 3766, 5824,\n 2772, 12204, 11808, 911, 8036, 9819, 75, 10875, 11343,\n 12350, 4835, 7987, 6984, 3781, 1061, 11310, 12713, 5794,\n 28, 2214, 10568, 6254, 9612, 2774, 500, 6862, 1199,\n 9173, 11518, 8121, 3710, 2438, 2623, 12951, 3977, 4413,\n 7464, 6657, 800, 9732, 3045, 10514, 1291, 386, 12300,\n 12180, 12580, 6046, 5122, 3076, 8912, 8934, 9736, 10338,\n 10312, 8664, 1709, 7928, 7622, 6270, 8648, 8318, 10087,\n 8677, 9789, 527, 2083, 1425, 11698, 12390, 5112, 6410,\n 1029, 10201, 120, 6565, 9717, 10706, 1747, 6951, 2298,\n 1925, 7860, 2750, 2439, 7261, 8890, 4387, 8224, 2394,\n 7703, 3891, 11087, 909, 9419, 12533, 2409, 12442, 11714,\n 1653, 11203, 2577, 4195, 2384, 8399, 11726, 13210, 3265,\n 85, 4627, 2762, 7578, 10624, 12498, 1398, 3239, 6716,\n 9784, 7145, 1054, 328, 2254, 13055, 8413, 1033, 2283,\n 11351, 7, 975, 9172, 11618, 12468, 1936, 7037, 5327,\n 509, 2720, 9935, 12570, 1100, 9884, 11860, 10226, 13052,\n 11689, 12284, 12721, 4225, 4668, 4095, 3319, 9667, 9384,\n 4666, 8148, 7812, 854, 9365, 7147, 8452, 9368, 6185,\n 10130, 105, 5734, 8227, 8948, 5034, 10799, 2117, 10113,\n 9238, 1634, 6735, 3751, 8012, 6097, 9192, 12659, 2477,\n 12115, 8857, 92, 8772, 9417, 12937, 1827, 929, 8261,\n 9980, 5575, 5993, 1864, 2795, 11695, 10802, 7590, 5921,\n 7262, 3429, 3882, 6768, 4590, 5992, 13086, 2618, 11104,\n 11940, 7386, 4038, 1948, 5855, 6020, 5832, 2592, 6813,\n 926, 11628, 4127, 697, 4307, 9946, 4924, 12260, 8907,\n 13250, 7260, 10512, 167, 11835, 11545, 184, 8454, 4613,\n 7057, 5837, 6618, 5558, 3062, 4140, 312, 13161, 1050,\n 3974, 1951, 9920, 2810, 3017, 6315, 5101, 3152, 7666,\n 10824, 948, 7413, 1328, 13113, 2870, 8682, 11231, 4276,\n 7462, 11373, 10200, 4421, 1788, 1161, 1178, 9875, 5177,\n 5144, 1865, 13106, 12791, 9846, 4552, 11403, 5767, 7843,\n 776, 7327, 13131, 8958, 7049, 12381, 3831, 11115, 3355,\n 11270, 8902, 9247, 5421, 3235, 12220, 1146, 3564, 8774,\n 2309, 9366, 43, 2700, 11388, 2201, 5782, 8378, 5894,\n 7877, 3970, 9704, 12071, 10258, 3896, 7130, 10224, 2896,\n 10664, 8119, 11190, 7412, 1457, 9746, 5601, 13027, 3122,\n 10085, 1160, 6807, 6760, 5382, 5642, 1249, 4864, 1336,\n 6952, 9433, 1453, 7873, 9373, 1793, 7347, 8831, 3593,\n 12909, 701, 7863, 2874, 9897, 5347, 12985, 613, 1461,\n 6712, 2695, 13070, 11821, 5148, 1543, 5821, 1448, 3642,\n 10078, 11240, 8152, 7301, 5470, 1075, 10493, 292, 2132,\n 10967, 5018, 9614, 5403, 9202, 11663, 13068, 9780, 13311,\n 11830, 9248, 8612, 498, 4229, 1226, 8449, 1396, 6824,\n 10852, 7195, 4609, 3847, 10304, 10208, 10484, 2142, 6281,\n 11840, 9156, 6494, 8421, 7649, 10726, 8572, 900, 11709,\n 4897, 5905, 3238, 9966, 2114, 13004, 11355, 8804, 11319,\n 8553, 5266, 5933, 2775, 1072, 11801, 11958, 883, 4724,\n 5430, 5963, 1259, 6826, 4651, 3313, 5051, 4293, 3298,\n 13390, 617, 12741, 10091, 6846, 6498, 4272, 3700, 6273,\n 264, 5211, 2341, 9224, 12452, 5770, 4396, 6881, 10134,\n 8801, 11453, 389, 4210, 10238, 1318, 11051, 7267, 2368,\n 4045, 8112, 6957, 10687, 7246, 4256, 11976, 11959, 855,\n 13375, 13058, 2575, 4086, 6253, 10018, 9339, 409, 8924,\n 10131, 1847, 2338, 9971, 12314, 12600, 1243, 73, 11468,\n 9783, 3567, 9227, 12259, 9493, 10111, 3352, 22, 6349,\n 3868, 2622, 2845, 5220, 11139, 4478, 12076, 10229, 3350,\n 7032, 11419, 12656, 6316, 10349, 12212, 4260, 1217, 13031,\n 2532, 1797, 8374, 6311, 2397, 1687, 12531, 318, 1818,\n 5084, 4147, 4503, 11642, 1456, 2396, 7881, 12684, 252,\n 10559, 8308, 3153, 9439, 10429, 10713, 3180, 5878, 3274,\n 1312, 12908, 5352, 970, 1477, 6914, 10340, 3498, 7844,\n 4810, 7287, 10041, 7799, 6456, 10210, 774, 9121, 5762,\n 8892, 6213, 1794, 4337, 11429, 8623, 6948, 10617, 1313,\n 8419, 12857, 3547, 10468, 11238, 8250, 11025, 11749, 10161,\n 9596, 8715, 11669, 7300, 2138, 12031, 907, 8773, 5489,\n 7886, 729, 2206, 12823, 3256, 8071, 1439, 8020, 5095,\n 1157, 5065, 3512, 81, 6227, 8274, 8756, 13368, 7142,\n 3762, 1478, 11752, 581, 4740, 4060, 7867, 10024, 4277,\n 10647, 12128, 13397, 3444, 5368, 3145, 10673, 2655, 9559,\n 9141, 2977, 12747, 4621, 7708, 10573, 12573, 847, 9679,\n 2007, 10268, 11948, 102, 12779, 971, 8464, 5947, 510,\n 4895, 5454, 10435, 13257, 5886, 13362, 5305, 3699, 9390,\n 5877, 3428, 13132, 4078, 8802, 10692, 4710, 10180, 279,\n 7359, 5996, 7745, 9856, 3794, 12973, 7726, 4673, 8326,\n 5874, 12905, 2421, 11859, 12377, 6280, 12415, 9506, 97,\n 11685, 7739, 1561, 4242, 12584, 7097, 1210, 2034, 4063,\n 12633, 1725, 8999, 11317, 6453, 12333, 7900, 4932, 2854,\n 6560, 2517, 8061, 9236, 6222, 12367, 3216, 10141, 904,\n 7201, 7882, 7930, 6662, 2925, 13206, 1046, 790, 11655,\n 1231, 1833, 10701, 12807, 9379, 10314, 628, 5189, 1708,\n 1005, 6259, 9065, 11716, 7477, 11540, 13199, 7466, 2326,\n 2091, 978, 8178, 9097, 11839, 3937, 3028, 3245, 4936,\n 8795, 6673, 11387, 9336, 8373, 6022, 11320, 12246, 4505,\n 10133, 3800, 9494, 9134, 4934, 7436, 1140, 8684, 2285,\n 5816, 11851, 5604, 12216, 10029, 8044, 6701, 11995, 12899,\n 6581, 5251, 13214, 11111, 1900, 8276, 12280, 8229, 6402,\n 11083, 10395, 3317, 6256, 3167, 8614, 12319, 4224, 4164,\n 10149, 4863, 5849, 1786, 2329, 6907, 4037, 2743, 2928,\n 3012, 9330, 1557, 10821, 10385, 538, 4832, 9800, 11649,\n 5732, 10066, 6717, 10691, 8943, 4388, 548, 1868, 9721,\n 9399, 2553, 6842, 2528, 4486, 3101, 4468, 3947, 10595,\n 5830, 4482, 11555, 9642, 7535, 13098, 6389, 13163, 9646,\n 9657, 6792, 1850, 735, 5418, 767, 10308, 5920, 5503,\n 2653, 6825, 7965, 10511, 5672, 3597, 3660, 4761, 3813,\n 9327, 555, 4090, 2269, 1324, 3779, 1644, 243, 1423,\n 12967, 8928, 2956, 9340, 10136, 7400, 13139, 1085, 9929,\n 6721, 6810, 9973, 13197, 7761, 5314, 4377, 4841, 7644,\n 6942, 8191, 13341, 2154, 3571, 6945, 5063, 12380, 4783,\n 11594, 3615, 2541, 3160, 4641, 8135, 9064, 2084, 10467,\n 13064, 8165, 1856, 5165, 3420, 554, 7288, 12889, 12288,\n 1744, 11325, 10356, 13365, 3961, 6217, 13121, 12222, 10440,\n 1012, 8821, 10036, 9317, 5042, 4130, 5448, 1663, 11612,\n 12345, 3627, 3613, 2185, 12003, 12399, 11657, 13079, 8251,\n 10810, 2573, 3472, 451, 4974, 2448, 9147, 1837, 2566,\n 5815, 11552, 5656, 327, 7164, 10023, 8930, 7709, 4653,\n 5939, 653, 4566, 13135, 7841, 11323, 11259, 10175, 9887,\n 11944, 1712, 12998, 4798, 2973, 2444, 6960, 4509, 2982,\n 4027, 5451, 10979, 9186, 476, 10590, 2137, 9114, 1070,\n 4967, 9371, 7010, 8228, 2092, 9249, 7440, 168, 9208,\n 6194, 3136, 1949, 12282, 7152, 8388, 13152, 10118, 5876,\n 13040, 2628, 11200, 12841, 5509, 6533, 126, 11900, 3612,\n 5746, 4652, 10168, 4214, 10158, 3536, 9779, 4940, 7711,\n 3252, 1570, 8073, 7851, 4559, 12490, 5080, 9652, 6628,\n 7473, 1257, 7680, 965, 10743, 10583, 1196, 7040, 6189,\n 2560, 12461, 10010, 12038, 3285, 505, 5791, 13069, 2301,\n 5676, 12810, 7248, 10797, 3967, 262, 2869, 2987, 937,\n 5651, 4989, 9591, 4318, 9898, 12976, 7392, 2445, 12896,\n 3801, 13030, 2822, 115, 13387, 7625, 10759, 9958, 12800,\n 7598, 11024, 8288, 2237, 12897, 652, 10981, 9146, 10310,\n 1656, 2094, 8932, 6284, 5653, 5464, 6230, 8749, 5799,\n 12781, 4853, 10459, 2494, 8961, 10192, 10608, 10217, 4446,\n 9632, 8124, 1859, 7810, 12927, 12080, 8331, 9530, 149,\n 3358, 6043, 12343, 5113, 7934, 10521, 146, 514, 3243,\n 37, 10922, 5778, 8576, 1894, 9550, 7768, 5564, 2574,\n 308, 3768, 4399, 6975, 1373, 9634, 11590, 9999, 5274,\n 1732, 5616, 3214, 8116, 12738, 4879, 2537, 685, 10448,\n 9768, 5984, 8230, 8933, 5699, 689, 13138, 10622, 3681,\n 12318, 11437, 11216, 3928, 5527, 4732, 746, 2161, 2001,\n 8361, 10628, 2053, 10935, 11554])}\n\ndef _drop_skew_params(dictionary):\n return {k: v for k, v in dictionary.items() if 'skew' not in k}\n\n\n@config_enumerate\ndef sine_model(num_mix_comp=2):\n # Mixture prior\n mix_weights = pyro.sample('mix_weights', Dirichlet(torch.ones((num_mix_comp,))))\n\n # Hprior BvM\n # Bayesian Inference and Decision Theory by Kathryn Blackmond Laskey\n beta_mean_phi = pyro.sample('beta_mean_phi', Uniform(0., 1.))\n beta_prec_phi = pyro.sample('beta_prec_phi', Gamma(1., 1. / 20.)) # shape, rate\n halpha_phi = beta_mean_phi * beta_prec_phi\n beta_mean_psi = pyro.sample('beta_mean_psi', Uniform(0, 1.))\n beta_prec_psi = pyro.sample('beta_prec_psi', Gamma(1., 1. / 20.)) # shape, rate\n halpha_psi = beta_mean_psi * beta_prec_psi\n\n with pyro.plate('mixture', num_mix_comp):\n # BvM priors\n phi_loc = pyro.sample('phi_loc', VonMises(pi, 2.))\n psi_loc = pyro.sample('psi_loc', VonMises(-pi/2, .2))\n phi_conc = pyro.sample('phi_conc', Beta(halpha_phi, beta_prec_phi - halpha_phi))\n psi_conc = pyro.sample('psi_conc', Beta(halpha_psi, beta_prec_psi - halpha_psi))\n corr_scale = pyro.sample('corr_scale', Beta(2., 15.))\n\n with pyro.plate('obs_plate'):\n assign = pyro.sample('mix_comp', Categorical(mix_weights), )\n bvm = SineBivariateVonMises(phi_loc=phi_loc[assign], psi_loc=psi_loc[assign],\n phi_concentration=150 * phi_conc[assign],\n psi_concentration=150 * psi_conc[assign],\n weighted_correlation=corr_scale[assign])\n return pyro.sample('phi_psi', bvm)\n\n\n@config_enumerate\ndef ss_model(num_mix_comp=2):\n # Mixture prior\n mix_weights = pyro.sample('mix_weights', Dirichlet(torch.ones((num_mix_comp,))))\n\n # Hprior BvM\n # Bayesian Inference and Decision Theory by Kathryn Blackmond Laskey\n beta_mean_phi = pyro.sample('beta_mean_phi', Uniform(0., 1.))\n beta_prec_phi = pyro.sample('beta_prec_phi', Gamma(1., 1 / 20.)) # shape, rate\n halpha_phi = beta_mean_phi * beta_prec_phi\n beta_mean_psi = pyro.sample('beta_mean_psi', Uniform(0, 1.))\n beta_prec_psi = pyro.sample('beta_prec_psi', Gamma(1., 1 / 20.)) # shape, rate\n halpha_psi = beta_mean_psi * beta_prec_psi\n\n with pyro.plate('mixture', num_mix_comp):\n # BvM priors\n phi_loc = pyro.sample('phi_loc', VonMises(pi, 2.))\n psi_loc = pyro.sample('psi_loc', VonMises(-pi/2, .2))\n phi_conc = pyro.sample('phi_conc', Beta(halpha_phi, beta_prec_phi - halpha_phi))\n psi_conc = pyro.sample('psi_conc', Beta(halpha_psi, beta_prec_psi - halpha_psi))\n corr_scale = pyro.sample('corr_scale', Beta(2., 15.))\n\n # SS prior\n skew_phi = pyro.sample('skew_phi', Uniform(-1., 1.))\n psi_bound = 1 - skew_phi.abs()\n skew_psi = pyro.sample('skew_psi', Uniform(-1., 1.))\n skewness = torch.stack((skew_phi, psi_bound * skew_psi), dim=-1)\n assert skewness.shape == (num_mix_comp, 2)\n\n with pyro.plate('obs_plate'):\n assign = pyro.sample('mix_comp', Categorical(mix_weights), )\n bvm = SineBivariateVonMises(phi_loc=phi_loc[assign], psi_loc=psi_loc[assign],\n phi_concentration=150 * phi_conc[assign],\n psi_concentration=150 * psi_conc[assign],\n weighted_correlation=corr_scale[assign])\n return pyro.sample('phi_psi', SineSkewed(bvm, skewness[assign]))\n\n\ndef cmodel(model, angles, num_mix_comp=2):\n poutine.condition(model, data={'phi_psi': angles})(num_mix_comp)\n\n\ndef fetch_aa_dihedrals(split='train', subsample_to=1000_000, shuffle=None):\n data = pickle.load(open('data/9mer_fragments_processed.pkl', 'rb'))[split]['sequences']\n data_aa = np.argmax(data[..., :20], -1)\n data = {aa: data[..., -2:][data_aa == i] for i, aa in enumerate(AMINO_ACIDS)}\n if shuffle is None:\n shuffles = {k: np.random.permutation(np.arange(v.shape[0])) for k, v in data.items()}\n # print(shuffles)\n [np.random.shuffle(v) for v in data.values()]\n data = {aa: aa_data[:min(subsample_to, aa_data.shape[0])] for aa, aa_data in data.items()}\n data = {aa: torch.tensor(aa_data, dtype=torch.float) for aa, aa_data in data.items()}\n return data\n\n\ndef multiple_formatter(denominator=2, number=np.pi, latex='\\pi'):\n def gcd(a, b):\n while b:\n a, b = b, a % b\n return a\n\n def _multiple_formatter(x, pos):\n den = denominator\n num = np.int(np.rint(den * x / number))\n com = gcd(num, den)\n (num, den) = (int(num / com), int(den / com))\n if den == 1:\n if num == 0:\n return r'$0$'\n if num == 1:\n return r'$%s$' % latex\n elif num == -1:\n return r'$-%s$' % latex\n else:\n return r'$%s%s$' % (num, latex)\n else:\n if num == 1:\n return r'$\\frac{%s}{%s}$' % (latex, den)\n elif num == -1:\n return r'$\\frac{-%s}{%s}$' % (latex, den)\n else:\n return r'$\\frac{%s%s}{%s}$' % (num, latex, den)\n\n return _multiple_formatter\n\n\ndef kde_ramachandran_plot(pred_data, data, aas, file_name='kde_rama_pred.png'):\n means = {'S': [[2.87, 3.13, -1.34, -1.12, 0.93, 0.99, -2.46, 1.23, 1.35, -1.89, 1.02],\n [1.36, 2.05, 1.31, -0.60, 0.74, -2.05, -3.14, 0.16, -0.12, 2.93, -2.57]],\n 'P': [[-1.08, 1.51, -1.01, -0.73, 2.41, 2.15, -1.45, 3.00, 2.45, -1.23, -1.00], [-0.49, -1.56, 2.45, -0.87, 1.70, -0.36, -3.12, 0.44, -0.33, 2.69, 0.30]],\n 'G':[[ 2.18, 1.70, -1.43, -1.10, 1.34, 2.40, -3.01, 1.60, 1.19, -2.08, 1.37] , [-0.22, -0.27, 1.43, -0.65, -2.76, -3.03, -3.11, 0.03, 0.55, 3.12, 0.21]] }\n fig, axs = plt.subplots(1, len(aas))\n levels = {'G': 8, 'P': 3, 'S': 8}\n for ax, aa in zip(axs, aas):\n aa_data = data[aa]\n\n ax.scatter(aa_data[:, 0], aa_data[:, 1], color='k', s=1)\n aa_data = pred_data[aa]\n ax.hexbin(aa_data[:, 0], aa_data[:, 1], cmap=\"Purples\", extent=[-math.pi, math.pi, -math.pi, math.pi], alpha=.5)\n if aa in means:\n ax.scatter(means[aa][0], means[aa][1], marker='x', color='k', s=.5)\n\n # label the contours\n ax.set_xlabel('$\\phi$')\n ax.set_xlim([-math.pi, math.pi])\n ax.set_ylim([-math.pi, math.pi])\n ax.set_aspect('equal', 'box')\n\n ax.xaxis.set_major_locator(plt.MultipleLocator(np.pi / 2))\n ax.xaxis.set_minor_locator(plt.MultipleLocator(np.pi / 12))\n ax.xaxis.set_major_formatter(plt.FuncFormatter(multiple_formatter()))\n\n axs[0].set_ylabel('$\\psi$')\n axs[0].yaxis.set_major_locator(plt.MultipleLocator(np.pi / 2))\n axs[0].yaxis.set_minor_locator(plt.MultipleLocator(np.pi / 12))\n axs[0].yaxis.set_major_formatter(plt.FuncFormatter(multiple_formatter()))\n axs[1].tick_params(labelleft=False)\n axs[2].tick_params(labelleft=False)\n if file_name:\n viz_dir = Path(__file__).parent.parent / 'viz'\n viz_dir.mkdir(exist_ok=True)\n fig.tight_layout()\n plt.savefig(str(viz_dir / file_name), dvi=300, bbox_inches='tight', transparent=True)\n else:\n fig.tight_layout()\n plt.show()\n plt.clf()\n\n\ndef main(aas=('S', 'G', 'P'), use_cuda=False):\n num_mix_comp = 11\n if torch.cuda.is_available() and use_cuda:\n device_context = tensors_default_to(\"cuda\")\n device = \"cuda\"\n else:\n device_context = tensors_default_to(\"cpu\")\n device = \"cpu\"\n\n pred_datas = {}\n with device_context:\n data = fetch_aa_dihedrals(subsample_to=5000, shuffle=shuffle)\n for aa in aas:\n chain_file = Path(__file__).parent / 'runs' / f'ssbvm_bmixture_aa{aa}_comp{num_mix_comp}_steps1000.pkl'\n\n posterior_samples = {k: {kk: {kkk: torch.tensor(vvv).detach().to(device) for kkk, vvv in vv.items()}\n for kk, vv in v.items()}\n for k, v in pickle.load(chain_file.open('rb')).items()}\n\n predictive = Predictive(sine_model, posterior_samples[aa]['sine'], return_sites=('phi_psi',))\n pred_data = []\n fail = 0\n for _ in range(2): # TODO: parallelize\n try:\n pred_data.append(predictive(num_mix_comp)['phi_psi'].squeeze())\n print('success')\n except Exception as e:\n print(e)\n fail += 1\n pred_datas[aa] = torch.stack(pred_data).view(-1, 2).to('cpu')\n print(f'failed samples {fail}')\n\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n kde_ramachandran_plot(pred_datas, data, aas)\n\n\nif __name__ == '__main__':\n main()\n" ]
[ [ "numpy.array", "torch.stack", "numpy.set_printoptions", "numpy.rint", "numpy.random.shuffle", "torch.ones", "matplotlib.pyplot.MultipleLocator", "torch.cuda.is_available", "numpy.argmax", "torch.tensor", "matplotlib.pyplot.show", "numpy.arange", "matplotlib.pyplot.clf" ] ]
vkazei/autokeras
[ "c66fc7be562ee4f704adedcfd935cb9522cf2e1d" ]
[ "autokeras/tuner.py" ]
[ "import os\nimport copy\nimport inspect\n\nimport kerastuner\nimport tensorflow as tf\n\n\nclass AutoTuner(kerastuner.Tuner):\n \"\"\"Modified KerasTuner base class to include preprocessing layers.\"\"\"\n\n def run_trial(self, trial, hp, fit_args, fit_kwargs):\n \"\"\"Preprocess the x and y before calling the base run_trial.\"\"\"\n # Initialize new fit kwargs for the current trial.\n new_fit_kwargs = copy.copy(fit_kwargs)\n new_fit_kwargs.update(\n dict(zip(inspect.getfullargspec(tf.keras.Model.fit).args, fit_args)))\n\n # Preprocess the dataset and set the shapes of the HyperNodes.\n self.hypermodel.hyper_build(hp)\n dataset, validation_data = self.hypermodel.preprocess(\n hp,\n new_fit_kwargs.get('x', None),\n new_fit_kwargs.get('validation_data', None),\n fit=True)\n self._save_preprocessors(trial.trial_id, trial.directory)\n\n # Batching\n batch_size = new_fit_kwargs.get('batch_size', 32)\n dataset = dataset.batch(batch_size)\n validation_data = validation_data.batch(batch_size)\n\n # Update the new fit kwargs values\n new_fit_kwargs['x'] = dataset\n new_fit_kwargs['validation_data'] = validation_data\n new_fit_kwargs['batch_size'] = None\n new_fit_kwargs['y'] = None\n\n # Add earlystopping callback if necessary\n callbacks = new_fit_kwargs.get('callbacks', [])\n new_fit_kwargs['callbacks'] = self.add_earlystopping_callback(callbacks)\n\n super().run_trial(trial, hp, [], new_fit_kwargs)\n\n def get_best_hp(self, num_models=1):\n \"\"\"Returns hyperparameters used to build the best model(s).\n\n # Arguments\n num_models (int, optional): Number of best models, whose building\n HyperParameters to return. Models will be returned in sorted order\n starting from the best. Defaults to 1.\n\n # Returns\n List of HyperParameter instances.\n \"\"\"\n best_trials = self._get_best_trials(num_models)\n return [trial.hyperparameters.copy()\n for trial in best_trials]\n\n def _save_preprocessors(self, trial_id, base_directory='.'):\n filename = '%s-preprocessors' % trial_id\n path = os.path.join(base_directory, filename)\n self.hypermodel.save_preprocessors(path)\n\n def get_best_trials(self, num_trials=1):\n return super()._get_best_trials(num_trials)\n\n def load_trial(self, trial):\n self.hypermodel.hyper_build(trial.hyperparameters)\n filename = '%s-preprocessors' % trial.trial_id\n path = os.path.join(trial.directory, filename)\n self.hypermodel.load_preprocessors(path)\n\n @staticmethod\n def add_earlystopping_callback(callbacks):\n if not callbacks:\n callbacks = []\n\n try:\n callbacks = copy.deepcopy(callbacks)\n except:\n raise ValueError(\n 'All callbacks used during a search '\n 'should be deep-copyable (since they are '\n 'reused across executions). '\n 'It is not possible to do `copy.deepcopy(%s)`' %\n (callbacks,))\n\n if not [callback for callback in callbacks\n if isinstance(callback, tf.keras.callbacks.EarlyStopping)]:\n # The patience is set to 30 based on human experience.\n callbacks.append(tf.keras.callbacks.EarlyStopping(patience=30))\n\n return callbacks\n\n\nclass RandomSearch(AutoTuner, kerastuner.RandomSearch):\n \"\"\"KerasTuner RandomSearch with preprocessing layer tuning.\"\"\"\n pass\n\n\nclass HyperBand(AutoTuner, kerastuner.Hyperband):\n \"\"\"KerasTuner Hyperband with preprocessing layer tuning.\"\"\"\n pass\n" ]
[ [ "tensorflow.keras.callbacks.EarlyStopping" ] ]
theroyakash/MedicalDataLeakageInspector
[ "ef43303a1cafbb1e8f2068b69987d2f9a1d1ca51" ]
[ "tests.py" ]
[ "import unittest\nimport pandas as pd\nimport LeakageInspector\n\nclass Test(unittest.TestCase):\n def test_case_1(self):\n df1 = pd.DataFrame({'patient_id': [0, 1, 2]})\n df2 = pd.DataFrame({'patient_id': [2, 3, 4]})\n self.assertEqual(LeakageInspector.LeakageInspector(df1, df2, 'patient_id').check_for_leakage(), True)\n\n def test_case_2(self):\n df1 = pd.DataFrame({'patient_id': [0, 1, 2]})\n df2 = pd.DataFrame({'patient_id': [3, 4, 5]})\n self.assertEqual(LeakageInspector.LeakageInspector(df1, df2, 'patient_id').check_for_leakage(), False)\n\nif __name__ == '__main__':\n unittest.main()" ]
[ [ "pandas.DataFrame" ] ]
PacktPublishing/TensorFlow-for-Machine-Learning-Solutions-
[ "3f258ee117bffaf18f5420fc4e6eefaab604fa02" ]
[ "Section 1/How TensorFlow Works.py" ]
[ "import tensorflow as tf\nfrom tensorflow.python.framework import ops\nops.reset_default_graph()\n\nsess = tf.Session()\n\nmy_tensor = tf.zeros([1,20])\n\nsess.run(my_tensor)\n\nmy_var = tf.Variable(tf.zeros([1,20]))\n\nsess.run(my_var.initializer)\nsess.run(my_var)\n\nrow_dim = 2\ncol_dim = 3\n\nzero_var = tf.Variable(tf.zeros([row_dim, col_dim]))\nones_var = tf.Variable(tf.ones([row_dim, col_dim]))\n\nsess.run(zero_var.initializer)\nsess.run(ones_var.initializer)\nprint(sess.run(zero_var))\nprint(sess.run(ones_var))\n\nzero_similar = tf.Variable(tf.zeros_like(zero_var))\nones_similar = tf.Variable(tf.ones_like(ones_var))\nsess.run(ones_similar.initializer)\nsess.run(zero_similar.initializer)\nprint(sess.run(ones_similar))\nprint(sess.run(zero_similar))\n\nfill_var = tf.Variable(tf.fill([row_dim, col_dim], -1))\nsess.run(fill_var.initializer)\nprint(sess.run(fill_var))\n\nconst_var = tf.Variable(tf.constant([8, 6, 7, 5, 3, 0, 9]))\nconst_fill_var = tf.Variable(tf.constant(-1, shape=[row_dim, col_dim]))\nsess.run(const_var.initializer)\nsess.run(const_fill_var.initializer)\nprint(sess.run(const_var))\nprint(sess.run(const_fill_var))\n\nlinear_var = tf.Variable(tf.linspace(start=0.0, stop=1.0, num=3)) # Generates [0.0, 0.5, 1.0] includes the end\nsequence_var = tf.Variable(tf.range(start=6, limit=15, delta=3)) # Generates [6, 9, 12] doesn't include the end\nsess.run(linear_var.initializer)\nsess.run(sequence_var.initializer)\nprint(sess.run(linear_var))\nprint(sess.run(sequence_var))\n\nrnorm_var = tf.random_normal([row_dim, col_dim], mean=0.0, stddev=1.0)\nrunif_var = tf.random_uniform([row_dim, col_dim], minval=0, maxval=4)\nprint(sess.run(rnorm_var))\nprint(sess.run(runif_var))\n\nops.reset_default_graph()\nsess = tf.Session()\nmy_var = tf.Variable(tf.zeros([1,20]))\nmerged = tf.summary.merge_all()\nwriter = tf.summary.FileWriter(\"./logs\", graph=sess.graph)\ninitialize_op = tf.global_variables_initializer()\nsess.run(initialize_op)\n" ]
[ [ "tensorflow.zeros", "tensorflow.range", "tensorflow.linspace", "tensorflow.random_uniform", "tensorflow.Session", "tensorflow.ones", "tensorflow.ones_like", "tensorflow.fill", "tensorflow.constant", "tensorflow.zeros_like", "tensorflow.python.framework.ops.reset_default_graph", "tensorflow.summary.merge_all", "tensorflow.summary.FileWriter", "tensorflow.global_variables_initializer", "tensorflow.random_normal" ] ]
TortillasAlfred/rl-starter-files
[ "a781c2f145d72ebddb2185df5b0f9b5348f35ad1" ]
[ "scripts/visualize.py" ]
[ "import argparse\nimport time\nimport numpy\nimport torch\n\nimport utils\n\n\n# Parse arguments\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\n \"--model\", required=True, help=\"name of the trained model (REQUIRED)\"\n)\nparser.add_argument(\"--seed\", type=int, default=0, help=\"random seed (default: 0)\")\nparser.add_argument(\n \"--shift\",\n type=int,\n default=0,\n help=\"number of times the environment is reset at the beginning (default: 0)\",\n)\nparser.add_argument(\n \"--argmax\",\n action=\"store_true\",\n default=False,\n help=\"select the action with highest probability (default: False)\",\n)\nparser.add_argument(\n \"--pause\",\n type=float,\n default=0.1,\n help=\"pause duration between two consequent actions of the agent (default: 0.1)\",\n)\nparser.add_argument(\n \"--gif\", type=str, default=None, help=\"store output as gif with the given filename\"\n)\nparser.add_argument(\n \"--episodes\", type=int, default=1000000, help=\"number of episodes to visualize\"\n)\nparser.add_argument(\n \"--memory\", action=\"store_true\", default=False, help=\"add a LSTM to the model\"\n)\nparser.add_argument(\n \"--text\", action=\"store_true\", default=False, help=\"add a GRU to the model\"\n)\n\nargs = parser.parse_args()\n\n# Set seed for all randomness sources\n\nutils.seed(args.seed)\n\n# Set device\n\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nprint(f\"Device: {device}\\n\")\n\n# Load environment\n\nenv = utils.get_stochastic_env()\nfor _ in range(args.shift):\n env.reset()\nprint(\"Environment loaded\\n\")\n\n# Load agent\n\nmodel_dir = utils.get_model_dir(args.model)\npolicy_acmodel = utils.ACModel(env.observation_space, env.action_space)\nagent = utils.Agent(\n policy_acmodel,\n env.observation_space,\n model_dir,\n device=device,\n argmax=True,\n num_envs=1,\n pretrained=True,\n)\nprint(\"Agent loaded\\n\")\n\n# Run the agent\n\nif args.gif:\n from array2gif import write_gif\n\n frames = []\n\n# Create a window to view the environment\nenv.render(\"human\")\n\nfor episode in range(args.episodes):\n obs = env.reset()\n\n while True:\n env.render(\"human\", highlight=False)\n if args.gif:\n frames.append(numpy.moveaxis(env.render(\"rgb_array\"), 2, 0))\n\n action = agent.get_action(obs)\n obs, reward, done, _ = env.step(-1)\n agent.analyze_feedback(reward, done)\n\n if done or env.window.closed:\n break\n\n if env.window.closed:\n break\n\nif args.gif:\n print(\"Saving gif... \", end=\"\")\n write_gif(numpy.array(frames), args.gif + \".gif\", fps=1 / args.pause)\n print(\"Done.\")\n" ]
[ [ "numpy.array", "torch.cuda.is_available" ] ]
tkkawa/nngp-copy
[ "42a5fb523653c0a213851628bc3ca0b78a2e8d1c" ]
[ "load_dataset.py" ]
[ "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Data loader for NNGP experiments.\n\nLoading MNIST dataset with train/valid/test split as numpy array.\n\nUsage:\nmnist_data = load_dataset.load_mnist(num_train=50000, use_float64=True,\n mean_subtraction=True)\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport copy\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.examples.tutorials.mnist import input_data\n\nflags = tf.app.flags\nFLAGS = flags.FLAGS\n\nflags.DEFINE_string('data_dir', '/tmp/nngp/data/',\n 'Directory for data.')\n\ndef load_mnist(num_train=50000,\n use_float64=False,\n mean_subtraction=False,\n random_roated_labels=False):\n \"\"\"Loads MNIST as numpy array.\"\"\"\n\n data_dir = FLAGS.data_dir\n datasets = input_data.read_data_sets(\n data_dir, False, validation_size=10000, one_hot=True)\n mnist_data = _select_mnist_subset(\n datasets,\n num_train,\n use_float64=use_float64,\n mean_subtraction=mean_subtraction,\n random_roated_labels=random_roated_labels)\n\n return mnist_data\n\n\ndef _select_mnist_subset(datasets,\n num_train=100,\n digits=list(range(10)),\n seed=9999,\n sort_by_class=False,\n use_float64=False,\n mean_subtraction=False,\n random_roated_labels=False):\n \"\"\"Select subset of MNIST and apply preprocessing.\"\"\"\n np.random.seed(seed)\n digits.sort()\n subset = copy.deepcopy(datasets)\n\n num_class = len(digits)\n num_per_class = num_train // num_class\n\n idx_list = np.array([], dtype='uint8')\n\n ys = np.argmax(subset.train.labels, axis=1) # undo one-hot\n\n for digit in digits:\n if datasets.train.num_examples == num_train:\n idx_list = np.concatenate((idx_list, np.where(ys == digit)[0]))\n else:\n idx_list = np.concatenate((idx_list,\n np.where(ys == digit)[0][:num_per_class]))\n if not sort_by_class:\n np.random.shuffle(idx_list)\n\n data_precision = np.float64 if use_float64 else np.float32\n\n train_image = subset.train.images[idx_list][:num_train].astype(data_precision)\n train_label = subset.train.labels[idx_list][:num_train].astype(data_precision)\n valid_image = subset.validation.images.astype(data_precision)\n valid_label = subset.validation.labels.astype(data_precision)\n test_image = subset.test.images.astype(data_precision)\n test_label = subset.test.labels.astype(data_precision)\n\n if sort_by_class:\n train_idx = np.argsort(np.argmax(train_label, axis=1))\n train_image = train_image[train_idx]\n train_label = train_label[train_idx]\n\n if mean_subtraction:\n train_image_mean = np.mean(train_image)\n train_label_mean = np.mean(train_label)\n train_image -= train_image_mean\n train_label -= train_label_mean\n valid_image -= train_image_mean\n valid_label -= train_label_mean\n test_image -= train_image_mean\n test_label -= train_label_mean\n\n if random_roated_labels:\n r, _ = np.linalg.qr(np.random.rand(10, 10))\n train_label = np.dot(train_label, r)\n valid_label = np.dot(valid_label, r)\n test_label = np.dot(test_label, r)\n\n return (train_image, train_label,\n valid_image, valid_label,\n test_image, test_label)\n\n" ]
[ [ "numpy.array", "numpy.dot", "numpy.random.rand", "numpy.random.seed", "tensorflow.examples.tutorials.mnist.input_data.read_data_sets", "numpy.random.shuffle", "numpy.mean", "numpy.where", "numpy.argmax" ] ]
Akulen/mangaki-zero
[ "5eb2de06b8684ed948b8b903e9f567f06c35e3ef" ]
[ "zero/als3.py" ]
[ "from zero.recommendation_algorithm import (RecommendationAlgorithm,\n register_algorithm)\nfrom scipy.sparse import coo_matrix\nfrom collections import defaultdict\nimport numpy as np\n\n\n@register_algorithm('als3', {'nb_components': 20})\nclass MangakiALS3(RecommendationAlgorithm):\n '''\n Alternating Least Squares for \"Singular Value Decomposition\" model\n (aka latent factor model)\n\n This implementation was supposed to be shorter and easier to read than\n MangakiALS2, but the performance is slightly worse, maybe because the\n initialization is different.\n (Gaussian instead of uniform; but Zhou's paper suggested a Gaussian\n initialization)\n '''\n def __init__(self, nb_components=20, nb_iterations=20, lambda_=0.1):\n super().__init__()\n self.nb_components = nb_components\n self.nb_iterations = nb_iterations\n self.lambda_ = lambda_\n\n @property\n def is_serializable(self):\n return True\n\n def fit(self, X, y):\n self.init_vars()\n self.bias = y.mean()\n self.matrix, self.matrixT = self.to_dict(X, y)\n self.ratings_of_user, self.ratings_of_work = self.to_sparse(X, y)\n users, works = map(np.unique, self.ratings_of_user.nonzero())\n for nb_iter in range(self.nb_iterations):\n for user_id in users:\n self.fit_user(user_id)\n for work_id in works:\n self.fit_work(work_id)\n self.compute_metrics()\n\n def to_dict(self, X, y):\n matrix = defaultdict(dict)\n matrixT = defaultdict(dict)\n for (user_id, work_id), rating in zip(X, y):\n matrix[user_id][work_id] = rating\n matrixT[work_id][user_id] = rating\n return matrix, matrixT\n\n def to_sparse(self, X, y):\n user_ids, work_ids = zip(*X) # Columns of X\n ratings = coo_matrix((y, (user_ids, work_ids)), shape=(self.nb_users,\n self.nb_works))\n return ratings.tocsr(), ratings.tocsc()\n\n def init_vars(self):\n self.U = np.random.multivariate_normal(\n mean=np.zeros(self.nb_components),\n cov=np.eye(self.nb_components),\n size=self.nb_users)\n self.V = np.random.multivariate_normal(\n mean=np.zeros(self.nb_components),\n cov=np.eye(self.nb_components),\n size=self.nb_works)\n self.W_user = np.random.normal(size=self.nb_users)\n self.W_work = np.random.normal(size=self.nb_works)\n self.bias = 0\n\n def fit_user(self, user_id):\n Ji = np.array(list(self.matrix[user_id].keys()))\n Ri = np.array(list(self.matrix[user_id].values()))\n Ni = Ji.size\n Vi = self.V[Ji]\n Wi = self.W_work[Ji]\n bi = self.W_user[user_id] + self.bias\n Li = self.lambda_ * Ni * np.eye(self.nb_components)\n self.U[user_id] = np.linalg.solve(Vi.T.dot(Vi) + Li,\n (Ri - Wi - bi).dot(Vi))\n self.W_user[user_id] = ((Ri - self.U[user_id].dot(Vi.T) - Wi).mean() /\n (1 + self.lambda_) - self.bias)\n\n def fit_work(self, work_id):\n Ij = np.array(list(self.matrixT[work_id].keys()))\n Rj = np.array(list(self.matrixT[work_id].values()))\n Nj = Ij.size\n Uj = self.U[Ij]\n Wj = self.W_user[Ij]\n bj = self.W_work[work_id] + self.bias\n Lj = self.lambda_ * Nj * np.eye(self.nb_components)\n self.V[work_id] = np.linalg.solve(Uj.T.dot(Uj) + Lj,\n (Rj - Wj - bj).dot(Uj))\n self.W_work[work_id] = ((Rj - self.V[work_id].dot(Uj.T) - Wj).mean() /\n (1 + self.lambda_) - self.bias)\n\n def predict(self, X):\n user_ids, work_ids = zip(*X)\n self.M = (self.U.dot(self.V.T) + self.W_user.reshape(-1, 1) +\n self.W_work.reshape(1, -1) + self.bias)\n return self.M[user_ids, work_ids]\n\n def get_shortname(self):\n return 'als3-%d' % self.nb_components\n" ]
[ [ "scipy.sparse.coo_matrix", "numpy.random.normal", "numpy.zeros", "numpy.eye" ] ]
tensorflow/tfx-common
[ "988549bb50c6174d1a2e2cb4208f355826724842" ]
[ "tfx_bsl/tfxio/tensor_representation_util.py" ]
[ "# Copyright 2019 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"TensorRepresentation utilities.\"\"\"\n\nfrom typing import List, Dict, Iterable, Mapping, Optional, Tuple, Union\n\nfrom absl import logging\nimport numpy as np\nimport tensorflow as tf\nfrom tfx_bsl.arrow import path\nfrom tensorflow_metadata.proto.v0 import schema_pb2\n\nif tf.__version__ < \"2\":\n # TF1 doesn't have tf.io.RaggedFeature.\n IOFeatures = Union[tf.io.VarLenFeature, tf.io.SparseFeature,\n tf.io.FixedLenFeature]\nelse:\n IOFeatures = Union[tf.io.VarLenFeature, tf.io.SparseFeature,\n tf.io.FixedLenFeature, tf.io.RaggedFeature]\n\n_DEFAULT_TENSOR_REPRESENTATION_GROUP = \"\"\n\n_DISQUALIFYING_LIFECYCLE_STAGES = [\n schema_pb2.DEPRECATED, schema_pb2.PLANNED, schema_pb2.ALPHA,\n schema_pb2.DEBUG_ONLY\n]\n\n# The schema proto may not contain this field, which means the legacy logic\n# does not apply.\n_IS_LEGACY_SCHEMA = (\"generate_legacy_feature_spec\" in\n schema_pb2.Schema.DESCRIPTOR.fields_by_name)\n\n_LEGACY_DEFAULT_VALUE_FOR_FEATURE_TYPE = {\n schema_pb2.BYTES:\n schema_pb2.TensorRepresentation.DefaultValue(bytes_value=b\"\"),\n schema_pb2.INT:\n schema_pb2.TensorRepresentation.DefaultValue(int_value=-1),\n schema_pb2.FLOAT:\n schema_pb2.TensorRepresentation.DefaultValue(float_value=-1.0),\n}\n\n_FEATURE_TYPE_TO_TF_TYPE = {\n schema_pb2.BYTES: tf.string,\n schema_pb2.INT: tf.int64,\n schema_pb2.FLOAT: tf.float32,\n}\n\n_DEFAULT_VALUE_KIND_TO_FEATURE_TYPE = {\n \"bytes_value\": schema_pb2.BYTES,\n \"int_value\": schema_pb2.INT,\n \"float_value\": schema_pb2.FLOAT,\n}\n\n\ndef _GetSparseTensorRepresentationUsedColumns(\n sparse_tensor_rep: schema_pb2.TensorRepresentation.SparseTensor\n) -> List[path.ColumnPath]:\n result = [path.ColumnPath(c) for c in sparse_tensor_rep.index_column_names]\n if sparse_tensor_rep.HasField(\"value_column_name\"):\n result.append(path.ColumnPath(sparse_tensor_rep.value_column_name))\n return result\n\n\ndef _GetRaggedTensorRepresentationUsedColumns(\n ragged_tensor_rep: schema_pb2.TensorRepresentation.RaggedTensor\n) -> List[path.ColumnPath]:\n \"\"\"Returns a list of ColumnPaths used by the Ragged TensorRepresentation.\"\"\"\n value_column_path = path.ColumnPath.from_proto(ragged_tensor_rep.feature_path)\n result = [value_column_path]\n for partition in ragged_tensor_rep.partition:\n if partition.HasField(\"row_length\"):\n result.append(value_column_path.parent().child(partition.row_length))\n return result\n\n\n_TENSOR_REPRESENTATION_KIND_TO_COLUMNS_GETTER = {\n \"dense_tensor\":\n lambda tr: [path.ColumnPath(tr.dense_tensor.column_name)],\n \"varlen_sparse_tensor\":\n lambda tr: [path.ColumnPath(tr.varlen_sparse_tensor.column_name)],\n \"sparse_tensor\":\n lambda tr: _GetSparseTensorRepresentationUsedColumns(tr.sparse_tensor),\n \"ragged_tensor\":\n lambda tr: _GetRaggedTensorRepresentationUsedColumns(tr.ragged_tensor),\n None:\n lambda _: [],\n}\n\n_TENSOR_REPRESENTATION_KIND_TO_VALUE_COLUMN_GETTER = {\n \"dense_tensor\":\n lambda tr: path.ColumnPath(tr.dense_tensor.column_name),\n \"varlen_sparse_tensor\":\n lambda tr: path.ColumnPath(tr.varlen_sparse_tensor.column_name),\n \"sparse_tensor\":\n lambda tr: path.ColumnPath(tr.sparse_tensor.value_column_name),\n \"ragged_tensor\":\n lambda tr: path.ColumnPath.from_proto(tr.ragged_tensor.feature_path)\n}\n\n\ndef SetTensorRepresentationsInSchema(\n schema: schema_pb2.Schema,\n tensor_representations: Mapping[str, schema_pb2.TensorRepresentation],\n tensor_representation_group_name: str = _DEFAULT_TENSOR_REPRESENTATION_GROUP\n) -> None:\n \"\"\"Sets the TensorRepresentationGroup of the given name to the given value.\"\"\"\n tensor_representation_map = schema.tensor_representation_group[\n tensor_representation_group_name].tensor_representation\n tensor_representation_map.clear()\n for k, v in tensor_representations.items():\n tensor_representation_map[k].CopyFrom(v)\n\n\ndef GetTensorRepresentationsFromSchema(\n schema: schema_pb2.Schema,\n tensor_representation_group_name: str = _DEFAULT_TENSOR_REPRESENTATION_GROUP\n) -> Optional[Dict[str, schema_pb2.TensorRepresentation]]:\n \"\"\"Gets a TensorRepresentationGroup as a dict<tensor_name,rep> from schema.\n\n If the group name is provided, look it up in the schema, otherwise, look for\n the default group.\n\n Args:\n schema: a schema_pb2.Schema.\n tensor_representation_group_name: (optional) the name of the group to look\n for. If not provided, look for the default name.\n\n Returns:\n None if not found. Otherwise a dict with tensor names being keys and\n TensorRepresentation as values.\n \"\"\"\n group = schema.tensor_representation_group.get(\n tensor_representation_group_name)\n if group is None:\n return None\n return dict(group.tensor_representation)\n\n\ndef InferTensorRepresentationsFromSchema(\n schema: schema_pb2.Schema) -> Dict[str, schema_pb2.TensorRepresentation]:\n \"\"\"Infers TensorRepresentations from the schema's Features.\"\"\"\n # TODO(zhuo): Add support for SparseFeature -> SparseTensor representation.\n if _ShouldUseLegacyLogic(schema):\n infer_func = _LegacyInferTensorRepresentationFromSchema\n else:\n infer_func = _InferTensorRepresentationFromSchema\n\n return infer_func(schema)\n\n\ndef InferTensorRepresentationsFromMixedSchema(\n schema: schema_pb2.Schema) -> Dict[str, schema_pb2.TensorRepresentation]:\n \"\"\"Infers TensorRepresentations from schema that has Features and TRs.\"\"\"\n tensor_representations = GetTensorRepresentationsFromSchema(schema)\n inferred_tensor_representations = InferTensorRepresentationsFromSchema(schema)\n if tensor_representations is None:\n return inferred_tensor_representations\n # Only keep inferred TRs that do not represent source columns. Existing TRs\n # are preferred over the inferred in case of name collisions.\n source_columns = set()\n for tensor_representation in tensor_representations.values():\n source_columns.update(\n str(path) for path in GetSourceColumnsFromTensorRepresentation(\n tensor_representation))\n for name, tensor_representation in inferred_tensor_representations.items():\n if name in tensor_representations:\n logging.warning(\n \"Feature name %s conflicts with tensor representation name in the \"\n \"same schema. Ignoring the feature and using the tensor \"\n \"representation.\", name)\n elif name not in source_columns:\n tensor_representations[name] = tensor_representation\n return tensor_representations\n\n\ndef GetSourceColumnsFromTensorRepresentation(\n tensor_representation: schema_pb2.TensorRepresentation\n) -> List[path.ColumnPath]:\n \"\"\"Returns columns required by the given TensorRepresentation.\"\"\"\n\n return _TENSOR_REPRESENTATION_KIND_TO_COLUMNS_GETTER[\n tensor_representation.WhichOneof(\"kind\")](\n tensor_representation)\n\n\ndef GetSourceValueColumnFromTensorRepresentation(\n tensor_representation: schema_pb2.TensorRepresentation) -> path.ColumnPath:\n \"\"\"Returns the column name of value columns from the TensorRepresentation.\n\n Each tensor representation will have one or more value column. A value column\n is a column that contributes to the values of a (composite) tensor. Certain\n composite tensor may consists of data from multiple columns, with one\n providing the values, others providing structural information.\n\n Args:\n tensor_representation: The tensor representation that contains tensor\n construction information.\n\n Raises:\n KeyError: if the tensor representation's \"kind\" is invalid. Valid \"kinds\"\n are dense_tensor, varlen_sparse_tensor, sparse_tensor, or ragged_tensor.\n \"\"\"\n return _TENSOR_REPRESENTATION_KIND_TO_VALUE_COLUMN_GETTER[\n tensor_representation.WhichOneof(\"kind\")](\n tensor_representation)\n\n\ndef CreateTfExampleParserConfig(\n tensor_representation: schema_pb2.TensorRepresentation,\n feature_type: schema_pb2.FeatureType\n) -> (\"Union[tf.io.VarLenFeature, tf.io.SparseFeature, tf.io.FixedLenFeature, \"\n \"tf.io.RaggedFeature]\"):\n \"\"\"Creates a Feature Configuration that is used for tf.io.parse_example.\n\n Args:\n tensor_representation: The tensor representation to convert to a Feature.\n feature_type: The schema_pb2.FeatureType of the given feature. The supported\n types are listed in _FEATURE_TYPE_TO_TF_TYPE.\n\n Returns:\n Either a `tf.io.FixedLenFeature`, `tf.io.VarLenFeature`, or\n `tf.io.SparseFeature`.\n\n Raises:\n ValueError: If the tensor_representation cannot be converted to a Feature.\n NotImplementedError: For ragged_tensor in tensor_representation.\n \"\"\"\n value_dtype = _FEATURE_TYPE_TO_TF_TYPE.get(feature_type, None)\n if value_dtype is None:\n raise ValueError(\n \"The feature_type: {} is not supported.\".format(feature_type))\n\n tensor_representation_kind = tensor_representation.WhichOneof(\"kind\")\n if tensor_representation_kind == \"dense_tensor\":\n dense_tensor_rep = tensor_representation.dense_tensor\n shape = _GetDimsFromFixedShape(dense_tensor_rep.shape)\n default_value = None\n if dense_tensor_rep.HasField(\"default_value\"):\n default_value = _GetDefaultValuesList(shape, feature_type,\n dense_tensor_rep.default_value)\n return tf.io.FixedLenFeature(\n shape=shape, dtype=value_dtype, default_value=default_value)\n elif tensor_representation_kind == \"varlen_sparse_tensor\":\n return tf.io.VarLenFeature(dtype=value_dtype)\n elif tensor_representation_kind == \"sparse_tensor\":\n sparse_tensor_rep = tensor_representation.sparse_tensor\n return tf.io.SparseFeature(\n index_key=sparse_tensor_rep.index_column_names,\n value_key=sparse_tensor_rep.value_column_name,\n dtype=value_dtype,\n size=_GetDimsFromFixedShape(sparse_tensor_rep.dense_shape))\n elif tensor_representation_kind == \"ragged_tensor\":\n if not hasattr(tf.io, \"RaggedFeature\"):\n raise NotImplementedError(\"TF1 does not support parsing ragged tensors.\")\n ragged_tensor_rep = tensor_representation.ragged_tensor\n if (ragged_tensor_rep.row_partition_dtype ==\n schema_pb2.TensorRepresentation.RowPartitionDType.INT32):\n row_splits_dtype = tf.int32\n else:\n row_splits_dtype = tf.int64\n\n partitions = []\n if len(ragged_tensor_rep.feature_path.step) > 1:\n raise ValueError(\n \"Parsing spec from a RaggedTensor with multiple steps in \"\n \"feature_path is not implemented.\")\n if not ragged_tensor_rep.feature_path.step:\n raise ValueError(\"RaggedTensor representation with empty feature_path.\")\n for partition in ragged_tensor_rep.partition:\n if partition.HasField(\"uniform_row_length\"):\n partitions.append(\n tf.io.RaggedFeature.UniformRowLength( # pytype:disable=attribute-error\n partition.uniform_row_length))\n elif partition.HasField(\"row_length\"):\n partitions.append(\n tf.io.RaggedFeature.RowLengths( # pytype:disable=attribute-error\n partition.row_length))\n else:\n raise NotImplementedError(\n \"RaggedTensor partition type not implemented: {}.\".format(\n partition.WhichOneof(\"kind\")))\n return tf.io.RaggedFeature(\n dtype=value_dtype,\n value_key=ragged_tensor_rep.feature_path.step[0],\n partitions=partitions,\n row_splits_dtype=row_splits_dtype)\n else:\n raise NotImplementedError(\n \"TensorRepresentation: {} is not supported.\".format(\n tensor_representation_kind))\n\n\ndef _ShouldIncludeFeature(\n feature: Union[schema_pb2.Feature, schema_pb2.SparseFeature]) -> bool:\n return not (feature.deprecated or\n feature.lifecycle_stage in _DISQUALIFYING_LIFECYCLE_STAGES)\n\n\ndef _InferTensorRepresentationFromSchema(\n schema: schema_pb2.Schema) -> Dict[str, schema_pb2.TensorRepresentation]:\n \"\"\"Translate a Feature proto into a TensorRepresentation proto.\n\n We apply the following rules:\n 1. If the feature has a fixed shape (set through Feature.shape field),\n then the feature must always be present (\n Feature.presence.min_fraction == 1.0), and a DenseTensor representation\n will be produced for it.\n 2. Otherwise, a VarLenSparseTensor representation will be produced for it.\n\n Args:\n schema: a schema_pb2.Schema.\n\n Returns:\n A Dict mapping tensor names to their TensorRepresentations.\n\n Raises:\n ValueError: if the feature has a fixed shape but is not always present.\n \"\"\"\n result = {}\n columns_remaining = {f.name: f for f in schema.feature}\n\n sparse_tensor_repsentations, columns_remaining = (\n _InferSparseTensorRepresentationsFromSchema(schema, columns_remaining))\n result.update(sparse_tensor_repsentations)\n\n for feature in columns_remaining.values():\n if not _ShouldIncludeFeature(feature):\n continue\n if feature.HasField(\"shape\"):\n if feature.presence.min_fraction != 1:\n raise ValueError(\n \"Feature {} had shape {} set but min_fraction {} != 1. Use\"\n \" value_count not shape field when min_fraction != 1.\".format(\n feature.name, feature.shape, feature.presence.min_fraction))\n logging.info(\"Feature %s has a shape %s. Setting to DenseTensor.\",\n feature.name, feature.shape)\n result[feature.name] = schema_pb2.TensorRepresentation(\n dense_tensor=schema_pb2.TensorRepresentation.DenseTensor(\n column_name=feature.name, shape=feature.shape))\n else:\n logging.info(\"Feature %s has no shape. Setting to VarLenSparseTensor.\",\n feature.name)\n result[feature.name] = schema_pb2.TensorRepresentation(\n varlen_sparse_tensor=schema_pb2.TensorRepresentation\n .VarLenSparseTensor(column_name=feature.name))\n\n return result\n\n\ndef _InferSparseTensorRepresentationsFromSchema(\n schema: schema_pb2.Schema, columns_remaining: Dict[str, schema_pb2.Feature]\n) -> Tuple[Dict[str, schema_pb2.TensorRepresentation], Dict[\n str, schema_pb2.Feature]]:\n \"\"\"Infers SparseTensor TensorRepresentation from the given schema.\"\"\"\n sparse_tensor_representations = {}\n for sparse_feature in schema.sparse_feature:\n if not _ShouldIncludeFeature(sparse_feature):\n continue\n index_keys = [\n index_feature.name for index_feature in sparse_feature.index_feature\n ]\n index_features = []\n for index_key in index_keys:\n try:\n index_features.append(columns_remaining.pop(index_key))\n except KeyError:\n raise ValueError(\n \"sparse_feature {} referred to index feature {} which did not \"\n \"exist in the schema\".format(sparse_feature.name, index_key))\n\n value_key = sparse_feature.value_feature.name\n try:\n columns_remaining.pop(value_key)\n except KeyError:\n raise ValueError(\n \"sparse_feature {} referred to value feature {} which did not \"\n \"exist in the schema or was referred to as an index or value multiple \"\n \"times.\".format(sparse_feature.name, value_key))\n\n shape = schema_pb2.FixedShape()\n for index_feature, index_key in zip(index_features, index_keys):\n if index_feature.HasField(\"int_domain\"):\n # Currently we only handle O-based INT index features whose minimum\n # domain value must be zero.\n if not index_feature.int_domain.HasField(\"min\"):\n raise ValueError(\"Cannot determine dense shape of sparse feature \"\n \"{}. The minimum domain value of index feature {}\"\n \" is not set.\".format(sparse_feature.name,\n index_key))\n if index_feature.int_domain.min != 0:\n raise ValueError(\"Only 0-based index features are supported. Sparse \"\n \"feature {} has index feature {} whose minimum \"\n \"domain value is {}.\".format(\n sparse_feature.name, index_key,\n index_feature.int_domain.min))\n\n if not index_feature.int_domain.HasField(\"max\"):\n raise ValueError(\"Cannot determine dense shape of sparse feature \"\n \"{}. The maximum domain value of index feature {}\"\n \" is not set.\".format(sparse_feature.name,\n index_key))\n shape.dim.add(size=index_feature.int_domain.max + 1)\n else:\n raise ValueError(\"Cannot determine dense shape of sparse feature {}.\"\n \" The index feature {} had no int_domain set.\".format(\n sparse_feature.name, index_key))\n\n sparse_tensor_representations[sparse_feature.name] = (\n schema_pb2.TensorRepresentation(\n sparse_tensor=schema_pb2.TensorRepresentation.SparseTensor(\n dense_shape=shape,\n index_column_names=index_keys,\n value_column_name=value_key)))\n\n return sparse_tensor_representations, columns_remaining\n\n\ndef _ShouldUseLegacyLogic(schema: schema_pb2.Schema) -> bool:\n if _IS_LEGACY_SCHEMA:\n return schema.generate_legacy_feature_spec\n return False\n\n\ndef _LegacyInferTensorRepresentationFromSchema(\n schema: schema_pb2.Schema) -> Dict[str, schema_pb2.TensorRepresentation]:\n \"\"\"Translate a Feature proto into a TensorRepresentation proto.\n\n This function applies heuristics to deduce the shape and other information\n from a FeatureProto. The FeatureProto contains information about the feature\n in an ExampleProto, but the feature spec proto also requires enough\n information to parse the feature into a tensor. We apply the following rules:\n\n 1. The shape and representation of the column are determined by the\n following rules:\n * if the value_count.min and value_count.max are both 1 then the shape\n is scalar and the representation is fixed length.\n * If value_count.min and value_count.max are equal but greater than 1,\n then the shape is a vector whose length is value_count.max and the\n representation is fixed length.\n * If value_count.min and value_count.max are equal and are less than 1,\n then the shape is a vector of unknown length and the representation\n is variable length.\n * If value_count.min and value_count.max are not equal then\n the shape is a vector of unknown length and the representation is\n variable length.\n\n 2. If the feature is always present or is variable length (based on the\n above rule), no default value is set but if the feature is not always\n present and is fixed length, then a canonical default value is chosen\n based on _LEGACY_DEFAULT_VALUE_FOR_FEATURE_TYPE.\n\n 3. Features that are deprecated are completely ignored and removed.\n\n Args:\n schema: A Schema proto.\n\n Returns:\n A Dict mapping tensor names to their TensorRepresentations.\n\n Raises:\n ValueError: If the feature's type is not supported or the schema is invalid.\n \"\"\"\n result = {}\n for feature in schema.feature:\n if not _ShouldIncludeFeature(feature):\n continue\n # Infer canonical tensorflow dtype.\n if feature.value_count.min < 0:\n raise ValueError(\n \"Feature {} has value_count.min < 0 (value was {}).\".format(\n feature.name, feature.value_count.min))\n\n if feature.value_count.max < 0:\n raise ValueError(\n \"Feature {} has value_count.max < 0 (value was {}).\".format(\n feature.name, feature.value_count.max))\n\n # Use heuristics to infer the shape and representation.\n if (feature.value_count.min == feature.value_count.max and\n feature.value_count.min == 1):\n # Case 1: value_count.min == value_count.max == 1. Infer a DenseTensor\n # with rank 0 and a default value.\n logging.info(\n \"Feature %s has value_count.min == value_count.max == 1. Setting to \"\n \"DenseTensor.\", feature.name)\n result[feature.name] = schema_pb2.TensorRepresentation(\n dense_tensor=schema_pb2.TensorRepresentation.DenseTensor(\n column_name=feature.name,\n shape=schema_pb2.FixedShape(),\n default_value=_LegacyInferDefaultValue(feature)))\n\n elif (feature.value_count.min == feature.value_count.max and\n feature.value_count.min > 1):\n # Case 2: value_count.min == value_count.max > 1. Infer a DenseTensor\n # with rank 1 and a default value.\n shape = schema_pb2.FixedShape(\n dim=[schema_pb2.FixedShape.Dim(size=feature.value_count.min)])\n logging.info(\n \"Feature %s has value_count.min == value_count.max > 1. Setting to \"\n \"DenseTensor.\", feature.name)\n result[feature.name] = schema_pb2.TensorRepresentation(\n dense_tensor=schema_pb2.TensorRepresentation.DenseTensor(\n column_name=feature.name,\n shape=shape,\n default_value=_LegacyInferDefaultValue(feature)))\n\n else:\n # Case 3: Either value_count.min != value_count.max or\n # value_count.min == value_count.max == 0. Infer a VarLenSparseTensor.\n logging.info(\n \"Feature %s has value_count.min != value_count.max or \"\n \"value_count.min == value_count.max == 0. \"\n \"Setting to VarLenSparseTensor.\", feature.name)\n result[feature.name] = schema_pb2.TensorRepresentation(\n varlen_sparse_tensor=schema_pb2.TensorRepresentation\n .VarLenSparseTensor(column_name=feature.name))\n\n return result\n\n\ndef _LegacyInferDefaultValue(\n feature_proto: schema_pb2.Feature\n) -> Optional[schema_pb2.TensorRepresentation.DefaultValue]:\n \"\"\"Inferrs a default value for a feature.\"\"\"\n if feature_proto.presence.min_fraction < 1:\n default_value = _LEGACY_DEFAULT_VALUE_FOR_FEATURE_TYPE.get(\n feature_proto.type)\n if default_value is None:\n raise ValueError(\"Unable to infer a default value for feature {}\".format(\n feature_proto))\n return default_value\n else:\n logging.info(\n \"Feature %s has min_fraction = 1 (%s). Not setting defalut value.\",\n feature_proto.name, feature_proto.presence)\n return None\n\n\ndef _GetDimsFromFixedShape(shape: schema_pb2.FixedShape) -> List[int]:\n \"\"\"Returns a list of dimensions, given a schema_pb2.FixedShape.\n\n Args:\n shape: A schema_pb2.FixedShape.\n \"\"\"\n return [dim.size for dim in shape.dim]\n\n\ndef _GetDefaultValuesList(\n unbatched_shape: List[int], feature_type: schema_pb2.FeatureType,\n default_value_proto: schema_pb2.TensorRepresentation.DefaultValue\n) -> List[Union[int, float, bytes]]:\n \"\"\"Returns a List filled with the default value given in the proto.\n\n Args:\n unbatched_shape: The shape of the tensor to fill.\n feature_type: The expected type of the default_value.\n default_value_proto: The DefaultValue proto that holds the default_value.\n\n Raises:\n ValueError: if the default_value is incompatible with feature_type.\n \"\"\"\n kind = default_value_proto.WhichOneof(\"kind\")\n default_value = getattr(default_value_proto, kind)\n expected_feature_type = _DEFAULT_VALUE_KIND_TO_FEATURE_TYPE.get(kind, None)\n if feature_type != expected_feature_type:\n raise ValueError(\n \"FeatureType: {} is incompatible with default_value: {}\".format(\n schema_pb2.FeatureType.Name(feature_type), default_value))\n size = int(np.prod(unbatched_shape, initial=1))\n\n return [default_value] * size\n\n\ndef ProjectTensorRepresentationsInSchema(\n schema: schema_pb2.Schema,\n tensor_names: Iterable[str]) -> schema_pb2.Schema:\n \"\"\"Returns a projection of schema by the given tensor names.\n\n Tries to extract TensorRpresentations from the schema and infers them in case\n there's none. The schema is then projected to have the TensorRepresentations\n and source feature columns of tensors that are present in `tensor_names`.\n\n Args:\n schema: A TFMD Schema to be projected.\n tensor_names: Names of tensors that schema must be projected on.\n\n Returns:\n A schema that contains a subset of TensorRepresentations and features in\n `schema` that is a set of source columns for the given tensors.\n\n Raises:\n ValueError: if `schema` doesn't contain any of the given `tensor_names` or\n TensorRepresentations' source columns are not present in `schema` features.\n \"\"\"\n tensor_representations = GetTensorRepresentationsFromSchema(schema)\n if tensor_representations is None:\n tensor_representations = InferTensorRepresentationsFromSchema(schema)\n tensor_names = set(tensor_names)\n if not tensor_names.issubset(tensor_representations):\n raise ValueError(\n \"Unable to project {} because they were not in the original \"\n \"or inferred TensorRepresentations.\".format(\n tensor_names - tensor_representations.keys()))\n paths = set()\n for tensor_name in tensor_names:\n paths.update(\n GetSourceColumnsFromTensorRepresentation(\n tensor_representations[tensor_name]))\n result = schema_pb2.Schema()\n\n for feature in schema.feature:\n feature_path = path.ColumnPath(feature.name)\n if feature_path in paths:\n paths.remove(feature_path)\n result.feature.add().CopyFrom(feature)\n\n if paths:\n raise ValueError(\"TensorRepresentations source columns {} are not present \"\n \"in the schema.\".format(paths))\n\n SetTensorRepresentationsInSchema(\n result,\n {k: v for k, v in tensor_representations.items() if k in tensor_names})\n\n return result\n\n\ndef _GetSourceColumnsFromFeature(\n feature: schema_pb2.Feature) -> List[path.ColumnPath]:\n \"\"\"Extracts all Feature paths from a potentially nested Feature.\"\"\"\n if feature.type == schema_pb2.FeatureType.STRUCT:\n result = []\n for child_feature in feature.struct_domain.feature:\n result.extend(\n path.ColumnPath([feature.name] + list(child_path.steps()))\n for child_path in _GetSourceColumnsFromFeature(child_feature))\n return result\n else:\n return [path.ColumnPath(feature.name)]\n\n\ndef ValidateTensorRepresentationsInSchema(\n schema: schema_pb2.Schema,\n tensor_representation_group_name: str = _DEFAULT_TENSOR_REPRESENTATION_GROUP\n):\n \"\"\"Checks that TensorRepresentations refer all schema features at least once.\n\n Args:\n schema: A TFMD Schema proto.\n tensor_representation_group_name: (optional) the name of the group to look\n for. If not provided, looks for the default name.\n\n Raises:\n ValueError: If either of the following is true\n * there's no TensorRepresentationGroup with the given name;\n * TensorRepresentations refer to a feature that is not in the schema;\n * feature exists in the schema, but is not referred to by any\n TensorRepresentation.\n \"\"\"\n tensor_representations = GetTensorRepresentationsFromSchema(\n schema, tensor_representation_group_name)\n if tensor_representations is None:\n raise ValueError(\n \"TensorRepresentations are not found in the schema. Did you specify \"\n \"correct group name?\")\n source_features = set()\n for representation in tensor_representations.values():\n source_features.update(\n GetSourceColumnsFromTensorRepresentation(representation))\n all_features = set()\n for feature in schema.feature:\n all_features.update(_GetSourceColumnsFromFeature(feature))\n source_not_in_schema = source_features - all_features\n if source_not_in_schema:\n raise ValueError(\n f\"Features referred in TensorRepresentations but not found in the \"\n f\"schema: {source_not_in_schema}\")\n in_schema_not_source = all_features - source_features\n if in_schema_not_source:\n raise ValueError(f\"Features present in the schema but not referred in any \"\n f\"TensorRepresentation: {in_schema_not_source}\")\n" ]
[ [ "tensorflow.io.RaggedFeature.UniformRowLength", "tensorflow.io.FixedLenFeature", "numpy.prod", "tensorflow.io.VarLenFeature", "tensorflow.io.RaggedFeature", "tensorflow.io.RaggedFeature.RowLengths" ] ]
cclauss/incubator-mxnet
[ "5df5467adeb6ede4b3c8d4d027e6a46f8212b5fb" ]
[ "python/mxnet/symbol/symbol.py" ]
[ "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\n# coding: utf-8\n# pylint: disable=invalid-name, protected-access, too-many-arguments, too-many-lines\n# pylint: disable=import-error, no-name-in-module, too-many-locals\n\"\"\"Symbolic configuration API of MXNet.\"\"\"\nfrom __future__ import absolute_import as _abs\ntry:\n from __builtin__ import slice as py_slice\nexcept ImportError:\n from builtins import slice as py_slice\n\nfrom array import array\nimport ctypes\nimport warnings\nfrom numbers import Number\n\nimport numpy as _numpy\n\nfrom ..attribute import AttrScope\nfrom ..base import _LIB, numeric_types, c_array, c_array_buf, c_str, c_str_array, c_handle_array\nfrom ..base import mx_uint, py_str, string_types, integer_types\nfrom ..base import NDArrayHandle, ExecutorHandle, SymbolHandle\nfrom ..base import check_call, MXNetError, NotImplementedForSymbol\nfrom ..context import Context, current_context\nfrom ..ndarray import NDArray, _DTYPE_NP_TO_MX, _DTYPE_MX_TO_NP, _GRAD_REQ_MAP\nfrom ..ndarray.ndarray import _STORAGE_TYPE_STR_TO_ID\nfrom ..ndarray import _ndarray_cls\nfrom ..executor import Executor\nfrom . import _internal\nfrom . import op\nfrom ._internal import SymbolBase, _set_symbol_class\n\n__all__ = [\"Symbol\", \"var\", \"Variable\", \"Group\", \"load\", \"load_json\",\n \"pow\", \"maximum\", \"minimum\", \"hypot\", \"eye\", \"zeros\", \"ones\", \"full\", \"arange\",\n \"histogram\"]\n\n\nclass Symbol(SymbolBase):\n \"\"\"Symbol is symbolic graph of the mxnet.\"\"\"\n # disable dictionary storage, also do not have parent type.\n # pylint: disable=no-member\n __slots__ = []\n\n # Make numpy functions return Symbol instead of numpy object array\n __array_priority__ = 1000.0\n\n def __repr__(self):\n \"\"\"Gets a string representation of the symbol.\"\"\"\n name = self.name\n if name is None:\n name = ', '.join([i.name for i in self])\n return '<%s group [%s]>' % (self.__class__.__name__, name)\n else:\n return '<%s %s>' % (self.__class__.__name__, name)\n\n def __iter__(self):\n \"\"\"Returns a generator object of symbol.\n\n One can loop through the returned object list to get outputs.\n\n Example\n -------\n >>> a = mx.sym.Variable('a')\n >>> b = mx.sym.Variable('b')\n >>> c = a+b\n >>> d = mx.sym.Variable('d')\n >>> e = d+c\n >>> out = e.get_children()\n >>> out\n <Symbol Grouped>\n >>> for i in out:\n ... i\n ...\n <Symbol d>\n <Symbol _plus0>\n \"\"\"\n return (self[i] for i in self.list_outputs())\n\n def __add__(self, other):\n \"\"\"x.__add__(y) <=> x+y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_add` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._Plus(self, other)\n if isinstance(other, Number):\n return _internal._PlusScalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __bool__(self):\n raise NotImplementedForSymbol(self.__bool__, 'bool')\n\n __nonzero__ = __bool__\n\n def __iadd__(self, other):\n raise NotImplementedForSymbol(self.__iadd__, '+=', other, 1)\n\n def __radd__(self, other):\n return self.__add__(other)\n\n def __sub__(self, other):\n \"\"\"x.__sub__(y) <=> x-y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_sub` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._Minus(self, other)\n if isinstance(other, Number):\n return _internal._MinusScalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __isub__(self, other):\n raise NotImplementedForSymbol(self.__isub__, '-=', other)\n\n def __rsub__(self, other):\n \"\"\"x.__rsub__(y) <=> y-x\n\n Only `NDArray` is supported for now.\n\n Example\n -------\n >>> x = mx.nd.ones((2,3))*3\n >>> y = mx.nd.ones((2,3))\n >>> x.__rsub__(y).asnumpy()\n array([[-2., -2., -2.],\n [-2., -2., -2.]], dtype=float32)\n \"\"\"\n if isinstance(other, Number):\n return _internal._RMinusScalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __mul__(self, other):\n \"\"\"x.__mul__(y) <=> x*y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_mul` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._Mul(self, other)\n if isinstance(other, Number):\n return _internal._MulScalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __imul__(self, other):\n raise NotImplementedForSymbol(self.__imul__, '*=', other)\n\n def __rmul__(self, other):\n return self.__mul__(other)\n\n def __div__(self, other):\n \"\"\"x.__div__(y) <=> x/y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_div` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._Div(self, other)\n if isinstance(other, Number):\n return _internal._DivScalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __rdiv__(self, other):\n \"\"\"x.__rdiv__(y) <=> y/x\n\n Only `NDArray` is supported for now.\n\n Example\n -------\n >>> x = mx.nd.ones((2,3))*3\n >>> y = mx.nd.ones((2,3))\n >>> x.__rdiv__(y).asnumpy()\n array([[ 0.33333334, 0.33333334, 0.33333334],\n [ 0.33333334, 0.33333334, 0.33333334]], dtype=float32)\n \"\"\"\n if isinstance(other, Number):\n return _internal._RDivScalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __mod__(self, other):\n \"\"\"x.__mod__(y) <=> x%y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_mod` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._Mod(self, other)\n if isinstance(other, Number):\n return _internal._ModScalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __rmod__(self, other):\n \"\"\"x.__rmod__(y) <=> y%x\n\n Only `NDArray` is supported for now.\n\n Example\n -------\n >>> x = mx.nd.ones((2,3))*3\n >>> y = mx.nd.ones((2,3))\n >>> x.__rmod__(y).asnumpy()\n array([[ 1., 1., 1.,\n [ 1., 1., 1., dtype=float32)\n \"\"\"\n if isinstance(other, Number):\n return _internal._RModScalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __idiv__(self, other):\n raise NotImplementedForSymbol(self.__idiv__, '/=', other)\n\n def __truediv__(self, other):\n return self.__div__(other)\n\n def __rtruediv__(self, other):\n return self.__rdiv__(other)\n\n def __itruediv__(self, other):\n raise NotImplementedForSymbol(self.__itruediv__, '/=', other)\n\n def __pow__(self, other):\n \"\"\"x.__pow__(y) <=> x**y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_pow` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._Power(self, other)\n if isinstance(other, Number):\n return _internal._PowerScalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __rpow__(self, other):\n raise NotImplementedForSymbol(self.__rpow__, 'y**x', other)\n\n def __neg__(self):\n \"\"\"x.__neg__() <=> -x\n\n Numerical negative, element-wise.\n\n Example\n -------\n >>> a = mx.sym.Variable('a')\n >>> a\n <Symbol a>\n >>> -a\n <Symbol _mulscalar0>\n >>> a_neg = a.__neg__()\n >>> c = a_neg*b\n >>> ex = c.eval(ctx=mx.cpu(), a=mx.nd.ones([2,3]), b=mx.nd.ones([2,3]))\n >>> ex[0].asnumpy()\n array([[-1., -1., -1.],\n [-1., -1., -1.]], dtype=float32)\n \"\"\"\n return self.__mul__(-1.0)\n\n def __copy__(self):\n return self.__deepcopy__(None)\n\n def __deepcopy__(self, _):\n \"\"\"Returns a deep copy of the input object.\n\n This function returns a deep copy of the input object including the current state\n of all its parameters such as weights, biases, etc.\n\n Any changes made to the deep copy do not reflect in the original object.\n\n Example\n -------\n >>> import copy\n >>> data = mx.sym.Variable('data')\n >>> data_1 = copy.deepcopy(data)\n >>> data_1 = 2*data\n >>> data_1.tojson()\n >>> data_1 is data # Data got modified\n False\n \"\"\"\n handle = SymbolHandle()\n check_call(_LIB.MXSymbolCopy(self.handle,\n ctypes.byref(handle)))\n return Symbol(handle)\n\n def __eq__(self, other):\n \"\"\"x.__eq__(y) <=> x==y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_equal` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._equal(self, other)\n if isinstance(other, numeric_types):\n return _internal._equal_scalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __ne__(self, other):\n \"\"\"x.__ne__(y) <=> x!=y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_not_equal` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._not_equal(self, other)\n if isinstance(other, numeric_types):\n return _internal._not_equal_scalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __gt__(self, other):\n \"\"\"x.__gt__(y) <=> x>y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_greater` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._greater(self, other)\n if isinstance(other, numeric_types):\n return _internal._greater_scalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __ge__(self, other):\n \"\"\"x.__ge__(y) <=> x>=y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_greater_equal` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._greater_equal(self, other)\n if isinstance(other, numeric_types):\n return _internal._greater_equal_scalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __lt__(self, other):\n \"\"\"x.__lt__(y) <=> x<y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_lesser` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._lesser(self, other)\n if isinstance(other, numeric_types):\n return _internal._lesser_scalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __le__(self, other):\n \"\"\"x.__le__(y) <=> x<=y\n\n Scalar input is supported.\n Broadcasting is not supported. Use `broadcast_lesser_equal` instead. \"\"\"\n if isinstance(other, Symbol):\n return _internal._lesser_equal(self, other)\n if isinstance(other, numeric_types):\n return _internal._lesser_equal_scalar(self, scalar=other)\n else:\n raise TypeError('type %s not supported' % str(type(other)))\n\n def __getstate__(self):\n handle = self.handle\n if handle is not None:\n return {'handle': self.tojson()}\n else:\n return {'handle': None}\n\n def __setstate__(self, state):\n # pylint: disable=assigning-non-slot\n handle = state['handle']\n if handle is not None:\n json_str = handle\n handle = SymbolHandle()\n check_call(_LIB.MXSymbolCreateFromJSON(c_str(json_str), ctypes.byref(handle)))\n self.handle = handle\n else:\n self.handle = None\n\n def __call__(self, *args, **kwargs):\n \"\"\"Composes symbol using inputs.\n\n x.__call__(y, z) <=> x(y,z)\n\n This function internally calls `_compose` to compose the symbol and\n returns the composed symbol.\n\n Example\n -------\n >>> data = mx.symbol.Variable('data')\n >>> net1 = mx.symbol.FullyConnected(data=data, name='fc1', num_hidden=10)\n >>> net2 = mx.symbol.FullyConnected(name='fc3', num_hidden=10)\n >>> composed = net2(fc3_data=net1, name='composed')\n >>> composed\n <Symbol composed>\n >>> called = net2.__call__(fc3_data=net1, name='composed')\n >>> called\n <Symbol composed>\n\n Parameters\n ----------\n args:\n Positional arguments.\n\n kwargs:\n Keyword arguments.\n\n Returns\n -------\n The resulting symbol.\n \"\"\"\n s = self.__copy__()\n s._compose(*args, **kwargs)\n return s\n\n def _compose(self, *args, **kwargs):\n \"\"\"Composes symbol using inputs.\n\n x._compose(y, z) <=> x(y,z)\n\n This function mutates the current symbol.\n\n Example\n -------\n >>> data = mx.symbol.Variable('data')\n >>> net1 = mx.symbol.FullyConnected(data=data, name='fc1', num_hidden=10)\n >>> net2 = mx.symbol.FullyConnected(name='fc3', num_hidden=10)\n >>> net2\n <Symbol fc3>\n >>> net2._compose(fc3_data=net1, name='composed')\n >>> net2\n <Symbol composed>\n\n Parameters\n ----------\n args:\n Positional arguments.\n\n kwargs:\n Keyword arguments.\n\n Returns\n -------\n The resulting symbol.\n \"\"\"\n name = kwargs.pop('name', None)\n\n if name:\n name = c_str(name)\n if len(args) != 0 and len(kwargs) != 0:\n raise TypeError('compose only accept input Symbols \\\n either as positional or keyword arguments, not both')\n\n for arg in args:\n if not isinstance(arg, Symbol):\n raise TypeError('Compose expect `Symbol` as arguments')\n for val in kwargs.values():\n if not isinstance(val, Symbol):\n raise TypeError('Compose expect `Symbol` as arguments')\n\n num_args = len(args) + len(kwargs)\n if len(kwargs) != 0:\n keys = c_str_array(kwargs.keys())\n args = c_handle_array(kwargs.values())\n else:\n keys = None\n args = c_handle_array(args)\n check_call(_LIB.MXSymbolCompose(\n self.handle, name, num_args, keys, args))\n\n def __getitem__(self, index):\n \"\"\"x.__getitem__(i) <=> x[i]\n\n Returns a sliced view of the input symbol.\n\n Example\n -------\n >>> a = mx.sym.var('a')\n >>> a.__getitem__(0)\n <Symbol a>\n >>> a[0]\n <Symbol a>\n\n Parameters\n ----------\n index : int or str\n Indexing key\n\n \"\"\"\n output_count = len(self)\n if isinstance(index, py_slice):\n start = 0 if index.start is None else index.start\n stop = output_count if index.stop is None else index.stop\n step = 1 if index.step is None else index.step\n return Group([self[i] for i in range(start, stop, step)])\n\n if isinstance(index, string_types):\n # Returning this list of names is expensive. Some symbols may have hundreds of outputs\n output_names = self.list_outputs()\n idx = None\n for i, name in enumerate(output_names):\n if name == index:\n if idx is not None:\n raise ValueError('There are multiple outputs with name \\\"%s\\\"' % index)\n idx = i\n if idx is None:\n raise ValueError('Cannot find output that matches name \\\"%s\\\"' % index)\n index = idx\n\n if not isinstance(index, int):\n raise TypeError('Symbol only support integer index to fetch i-th output')\n if index >= output_count:\n # Important, python determines the end by this exception\n raise IndexError\n handle = SymbolHandle()\n check_call(_LIB.MXSymbolGetOutput(\n self.handle, mx_uint(index), ctypes.byref(handle)))\n return Symbol(handle=handle)\n\n @property\n def name(self):\n \"\"\"Gets name string from the symbol, this function only works for non-grouped symbol.\n\n Returns\n -------\n value : str\n The name of this symbol, returns ``None`` for grouped symbol.\n \"\"\"\n ret = ctypes.c_char_p()\n success = ctypes.c_int()\n check_call(_LIB.MXSymbolGetName(\n self.handle, ctypes.byref(ret), ctypes.byref(success)))\n if success.value != 0:\n return py_str(ret.value)\n else:\n return None\n\n def attr(self, key):\n \"\"\"Returns the attribute string for corresponding input key from the symbol.\n\n This function only works for non-grouped symbols.\n\n Example\n -------\n >>> data = mx.sym.Variable('data', attr={'mood': 'angry'})\n >>> data.attr('mood')\n 'angry'\n\n Parameters\n ----------\n key : str\n The key corresponding to the desired attribute.\n\n Returns\n -------\n value : str\n The desired attribute value, returns ``None`` if the attribute does not exist.\n \"\"\"\n ret = ctypes.c_char_p()\n success = ctypes.c_int()\n check_call(_LIB.MXSymbolGetAttr(\n self.handle, c_str(key), ctypes.byref(ret), ctypes.byref(success)))\n if success.value != 0:\n return py_str(ret.value)\n else:\n return None\n\n def list_attr(self, recursive=False):\n \"\"\"Gets all attributes from the symbol.\n\n Example\n -------\n >>> data = mx.sym.Variable('data', attr={'mood': 'angry'})\n >>> data.list_attr()\n {'mood': 'angry'}\n\n Returns\n -------\n ret : Dict of str to str\n A dictionary mapping attribute keys to values.\n \"\"\"\n if recursive:\n raise DeprecationWarning(\"Symbol.list_attr with recursive=True has been deprecated. \"\n \"Please use attr_dict instead.\")\n size = mx_uint()\n pairs = ctypes.POINTER(ctypes.c_char_p)()\n f_handle = _LIB.MXSymbolListAttrShallow\n check_call(f_handle(self.handle, ctypes.byref(size), ctypes.byref(pairs)))\n return {py_str(pairs[i * 2]): py_str(pairs[i * 2 + 1]) for i in range(size.value)}\n\n def attr_dict(self):\n \"\"\"Recursively gets all attributes from the symbol and its children.\n\n Example\n -------\n >>> a = mx.sym.Variable('a', attr={'a1':'a2'})\n >>> b = mx.sym.Variable('b', attr={'b1':'b2'})\n >>> c = a+b\n >>> c.attr_dict()\n {'a': {'a1': 'a2'}, 'b': {'b1': 'b2'}}\n\n Returns\n -------\n ret : Dict of str to dict\n There is a key in the returned dict for every child with non-empty attribute set.\n For each symbol, the name of the symbol is its key in the dict\n and the correspond value is that symbol's attribute list (itself a dictionary).\n \"\"\"\n size = mx_uint()\n pairs = ctypes.POINTER(ctypes.c_char_p)()\n f_handle = _LIB.MXSymbolListAttr\n check_call(f_handle(self.handle, ctypes.byref(size), ctypes.byref(pairs)))\n ret = {}\n for i in range(size.value):\n name, key = py_str(pairs[i * 2]).split('$')\n val = py_str(pairs[i * 2 + 1])\n if name not in ret:\n ret[name] = {}\n ret[name][key] = val\n return ret\n\n def _set_attr(self, **kwargs):\n \"\"\"Sets an attribute of the symbol.\n\n For example. A._set_attr(foo=\"bar\") adds the mapping ``\"{foo: bar}\"``\n to the symbol's attribute dictionary.\n\n Parameters\n ----------\n **kwargs\n The attributes to set\n \"\"\"\n for key, value in kwargs.items():\n if not isinstance(value, string_types):\n raise ValueError(\"Set Attr only accepts string values\")\n check_call(_LIB.MXSymbolSetAttr(\n self.handle, c_str(key), c_str(str(value))))\n\n def get_internals(self):\n \"\"\"Gets a new grouped symbol `sgroup`. The output of `sgroup` is a list of\n outputs of all of the internal nodes.\n\n Consider the following code:\n\n Example\n -------\n >>> a = mx.sym.var('a')\n >>> b = mx.sym.var('b')\n >>> c = a + b\n >>> d = c.get_internals()\n >>> d\n <Symbol Grouped>\n >>> d.list_outputs()\n ['a', 'b', '_plus4_output']\n\n Returns\n -------\n sgroup : Symbol\n A symbol group containing all internal and leaf nodes of the computation graph\n used to compute the symbol.\n \"\"\"\n handle = SymbolHandle()\n check_call(_LIB.MXSymbolGetInternals(\n self.handle, ctypes.byref(handle)))\n return Symbol(handle=handle)\n\n def get_children(self):\n \"\"\"Gets a new grouped symbol whose output contains\n inputs to output nodes of the original symbol.\n\n Example\n -------\n >>> x = mx.sym.Variable('x')\n >>> y = mx.sym.Variable('y')\n >>> z = mx.sym.Variable('z')\n >>> a = y+z\n >>> b = x+a\n >>> b.get_children()\n <Symbol Grouped>\n >>> b.get_children().list_outputs()\n ['x', '_plus10_output']\n >>> b.get_children().get_children().list_outputs()\n ['y', 'z']\n\n Returns\n -------\n sgroup : Symbol or None\n The children of the head node. If the symbol has no\n inputs then ``None`` will be returned.\n \"\"\"\n handle = SymbolHandle()\n check_call(_LIB.MXSymbolGetChildren(\n self.handle, ctypes.byref(handle)))\n ret = Symbol(handle=handle)\n if len(ret.list_outputs()) == 0:\n return None\n return ret\n\n def list_arguments(self):\n \"\"\"Lists all the arguments in the symbol.\n\n Example\n -------\n >>> a = mx.sym.var('a')\n >>> b = mx.sym.var('b')\n >>> c = a + b\n >>> c.list_arguments\n ['a', 'b']\n\n Returns\n -------\n args : list of string\n List containing the names of all the arguments required to compute the symbol.\n \"\"\"\n size = ctypes.c_uint()\n sarr = ctypes.POINTER(ctypes.c_char_p)()\n check_call(_LIB.MXSymbolListArguments(\n self.handle, ctypes.byref(size), ctypes.byref(sarr)))\n return [py_str(sarr[i]) for i in range(size.value)]\n\n def list_outputs(self):\n \"\"\"Lists all the outputs in the symbol.\n\n Example\n -------\n >>> a = mx.sym.var('a')\n >>> b = mx.sym.var('b')\n >>> c = a + b\n >>> c.list_outputs()\n ['_plus12_output']\n\n Returns\n -------\n list of str\n List of all the outputs.\n For most symbols, this list contains only the name of this symbol.\n For symbol groups, this is a list with the names of all symbols\n in the group.\n \"\"\"\n size = ctypes.c_uint()\n sarr = ctypes.POINTER(ctypes.c_char_p)()\n check_call(_LIB.MXSymbolListOutputs(\n self.handle, ctypes.byref(size), ctypes.byref(sarr)))\n return [py_str(sarr[i]) for i in range(size.value)]\n\n # pylint: disable=invalid-length-returned\n def __len__(self):\n \"\"\"Get number of outputs for the symbol.\n\n Example\n -------\n >>> a = mx.sym.var('a')\n >>> b = mx.sym.var('b')\n >>> c = a + b\n >>> len(c)\n\n Returns\n -------\n len(self): Number of outputs\n Number of outputs\n \"\"\"\n output_count = mx_uint()\n check_call(_LIB.MXSymbolGetNumOutputs(self.handle, ctypes.byref(output_count)))\n return output_count.value\n\n def list_auxiliary_states(self):\n \"\"\"Lists all the auxiliary states in the symbol.\n\n Example\n -------\n >>> a = mx.sym.var('a')\n >>> b = mx.sym.var('b')\n >>> c = a + b\n >>> c.list_auxiliary_states()\n []\n\n Example of auxiliary states in `BatchNorm`.\n\n >>> data = mx.symbol.Variable('data')\n >>> weight = mx.sym.Variable(name='fc1_weight')\n >>> fc1 = mx.symbol.FullyConnected(data = data, weight=weight, name='fc1', num_hidden=128)\n >>> fc2 = mx.symbol.BatchNorm(fc1, name='batchnorm0')\n >>> fc2.list_auxiliary_states()\n ['batchnorm0_moving_mean', 'batchnorm0_moving_var']\n\n Returns\n -------\n aux_states : list of str\n List of the auxiliary states in input symbol.\n\n Notes\n -----\n Auxiliary states are special states of symbols that do not correspond to an argument,\n and are not updated by gradient descent. Common examples of auxiliary states\n include the `moving_mean` and `moving_variance` in `BatchNorm`.\n Most operators do not have auxiliary states.\n \"\"\"\n size = ctypes.c_uint()\n sarr = ctypes.POINTER(ctypes.c_char_p)()\n check_call(_LIB.MXSymbolListAuxiliaryStates(\n self.handle, ctypes.byref(size), ctypes.byref(sarr)))\n return [py_str(sarr[i]) for i in range(size.value)]\n\n def list_inputs(self):\n \"\"\"Lists all arguments and auxiliary states of this Symbol.\n\n Returns\n -------\n inputs : list of str\n List of all inputs.\n\n Examples\n --------\n >>> bn = mx.sym.BatchNorm(name='bn')\n >>> bn.list_arguments()\n ['bn_data', 'bn_gamma', 'bn_beta']\n >>> bn.list_auxiliary_states()\n ['bn_moving_mean', 'bn_moving_var']\n >>> bn.list_inputs()\n ['bn_data', 'bn_gamma', 'bn_beta', 'bn_moving_mean', 'bn_moving_var']\n \"\"\"\n size = ctypes.c_uint()\n sarr = ctypes.POINTER(ctypes.c_char_p)()\n check_call(_LIB.NNSymbolListInputNames(\n self.handle, 0, ctypes.byref(size), ctypes.byref(sarr)))\n return [py_str(sarr[i]) for i in range(size.value)]\n\n def infer_type(self, *args, **kwargs):\n \"\"\"Infers the type of all arguments and all outputs, given the known types\n for some arguments.\n\n This function takes the known types of some arguments in either positional way\n or keyword argument way as input. It returns a tuple of `None` values\n if there is not enough information to deduce the missing types.\n\n Inconsistencies in the known types will cause an error to be raised.\n\n Example\n -------\n >>> a = mx.sym.var('a')\n >>> b = mx.sym.var('b')\n >>> c = a + b\n >>> arg_types, out_types, aux_types = c.infer_type(a='float32')\n >>> arg_types\n [<type 'numpy.float32'>, <type 'numpy.float32'>]\n >>> out_types\n [<type 'numpy.float32'>]\n >>> aux_types\n []\n\n Parameters\n ----------\n *args :\n Type of known arguments in a positional way.\n Unknown type can be marked as None.\n\n **kwargs :\n Keyword arguments of known types.\n\n Returns\n -------\n arg_types : list of numpy.dtype or None\n List of argument types.\n The order is same as the order of list_arguments().\n out_types : list of numpy.dtype or None\n List of output types.\n The order is same as the order of list_outputs().\n aux_types : list of numpy.dtype or None\n List of auxiliary state types.\n The order is same as the order of list_auxiliary_states().\n \"\"\"\n if len(args) != 0 and len(kwargs) != 0:\n raise ValueError('Can only specify known argument \\\n types either by positional or kwargs way.')\n sdata = []\n if len(args) != 0:\n keys = c_array(ctypes.c_char_p, [])\n for s in args:\n if s is not None:\n s = _numpy.dtype(s).type\n if s not in _DTYPE_NP_TO_MX:\n raise TypeError('Argument need to be one of ' + str(_DTYPE_NP_TO_MX))\n sdata.append(_DTYPE_NP_TO_MX[s])\n else:\n sdata.append(-1)\n else:\n str_keys = []\n for k, v in kwargs.items():\n v = _numpy.dtype(v).type\n if v in _DTYPE_NP_TO_MX:\n str_keys.append(k)\n sdata.append(_DTYPE_NP_TO_MX[v])\n keys = c_str_array(str_keys)\n arg_type_size = mx_uint()\n arg_type_data = ctypes.POINTER(ctypes.c_int)()\n out_type_size = mx_uint()\n out_type_data = ctypes.POINTER(ctypes.c_int)()\n aux_type_size = mx_uint()\n aux_type_data = ctypes.POINTER(ctypes.c_int)()\n complete = ctypes.c_int()\n check_call(_LIB.MXSymbolInferType(\n self.handle,\n mx_uint(len(sdata)),\n keys,\n c_array_buf(ctypes.c_int, array('i', sdata)),\n ctypes.byref(arg_type_size),\n ctypes.byref(arg_type_data),\n ctypes.byref(out_type_size),\n ctypes.byref(out_type_data),\n ctypes.byref(aux_type_size),\n ctypes.byref(aux_type_data),\n ctypes.byref(complete)))\n if complete.value != 0:\n arg_types = [\n _DTYPE_MX_TO_NP[arg_type_data[i]] for i in range(arg_type_size.value)]\n out_types = [\n _DTYPE_MX_TO_NP[out_type_data[i]] for i in range(out_type_size.value)]\n aux_types = [\n _DTYPE_MX_TO_NP[aux_type_data[i]] for i in range(aux_type_size.value)]\n return (arg_types, out_types, aux_types)\n else:\n return (None, None, None)\n\n def infer_shape(self, *args, **kwargs):\n \"\"\"Infers the shapes of all arguments and all outputs given the known shapes of\n some arguments.\n\n This function takes the known shapes of some arguments in either positional way\n or keyword argument way as input. It returns a tuple of `None` values\n if there is not enough information to deduce the missing shapes.\n\n Example\n -------\n >>> a = mx.sym.var('a')\n >>> b = mx.sym.var('b')\n >>> c = a + b\n >>> arg_shapes, out_shapes, aux_shapes = c.infer_shape(a=(3,3))\n >>> arg_shapes\n [(3L, 3L), (3L, 3L)]\n >>> out_shapes\n [(3L, 3L)]\n >>> aux_shapes\n []\n >>> c.infer_shape(a=(0,3)) # 0s in shape means unknown dimensions. So, returns None.\n (None, None, None)\n\n Inconsistencies in the known shapes will cause an error to be raised.\n See the following example:\n\n >>> data = mx.sym.Variable('data')\n >>> out = mx.sym.FullyConnected(data=data, name='fc1', num_hidden=1000)\n >>> out = mx.sym.Activation(data=out, act_type='relu')\n >>> out = mx.sym.FullyConnected(data=out, name='fc2', num_hidden=10)\n >>> weight_shape= (1, 100)\n >>> data_shape = (100, 100)\n >>> out.infer_shape(data=data_shape, fc1_weight=weight_shape)\n Error in operator fc1: Shape inconsistent, Provided=(1,100), inferred shape=(1000,100)\n\n Parameters\n ----------\n *args :\n Shape of arguments in a positional way.\n Unknown shape can be marked as None.\n\n **kwargs :\n Keyword arguments of the known shapes.\n\n Returns\n -------\n arg_shapes : list of tuple or None\n List of argument shapes.\n The order is same as the order of list_arguments().\n out_shapes : list of tuple or None\n List of output shapes.\n The order is same as the order of list_outputs().\n aux_shapes : list of tuple or None\n List of auxiliary state shapes.\n The order is same as the order of list_auxiliary_states().\n \"\"\"\n try:\n res = self._infer_shape_impl(False, *args, **kwargs)\n if res[1] is None:\n arg_shapes, _, _ = self._infer_shape_impl(True, *args, **kwargs)\n arg_names = self.list_arguments()\n unknowns = []\n for name, shape in zip(arg_names, arg_shapes):\n if not shape or not _numpy.prod(shape):\n if len(unknowns) >= 10:\n unknowns.append('...')\n break\n unknowns.append('%s: %s' % (name, str(shape)))\n warnings.warn(\n \"Cannot decide shape for the following arguments \" +\n \"(0s in shape means unknown dimensions). \" +\n \"Consider providing them as input:\\n\\t\" +\n \"\\n\\t\".join(unknowns), stacklevel=2)\n return res\n except MXNetError:\n print(\"infer_shape error. Arguments:\")\n for i, arg in enumerate(args):\n print(\" #%d: %s\" % (i, arg))\n for k, v in kwargs.items():\n print(\" %s: %s\" % (k, v))\n raise\n\n def infer_shape_partial(self, *args, **kwargs):\n \"\"\"Infers the shape partially.\n\n This functions works the same way as `infer_shape`,\n except that this function can return partial results.\n\n In the following example, information about fc2 is not available. So, `infer_shape`\n will return a tuple of `None` values but `infer_shape_partial` will return partial values.\n\n Example\n -------\n >>> data = mx.sym.Variable('data')\n >>> prev = mx.sym.Variable('prev')\n >>> fc1 = mx.sym.FullyConnected(data=data, name='fc1', num_hidden=128)\n >>> fc2 = mx.sym.FullyConnected(data=prev, name='fc2', num_hidden=128)\n >>> out = mx.sym.Activation(data=mx.sym.elemwise_add(fc1, fc2), act_type='relu')\n >>> out.list_arguments()\n ['data', 'fc1_weight', 'fc1_bias', 'prev', 'fc2_weight', 'fc2_bias']\n >>> out.infer_shape(data=(10,64))\n (None, None, None)\n >>> out.infer_shape_partial(data=(10,64))\n ([(10L, 64L), (128L, 64L), (128L,), (), (), ()], [(10L, 128L)], [])\n >>> # infers shape if you give information about fc2\n >>> out.infer_shape(data=(10,64), prev=(10,128))\n ([(10L, 64L), (128L, 64L), (128L,), (10L, 128L), (128L, 128L), (128L,)], [(10L, 128L)], [])\n\n Parameters\n ----------\n *args :\n Shape of arguments in a positional way.\n Unknown shape can be marked as None\n\n **kwargs :\n Keyword arguments of known shapes.\n\n Returns\n -------\n arg_shapes : list of tuple or None\n List of argument shapes.\n The order is same as the order of list_arguments().\n out_shapes : list of tuple or None\n List of output shapes.\n The order is same as the order of list_outputs().\n aux_shapes : list of tuple or None\n List of auxiliary state shapes.\n The order is same as the order of list_auxiliary_states().\n \"\"\"\n return self._infer_shape_impl(True, *args, **kwargs)\n\n def _infer_shape_impl(self, partial, *args, **kwargs):\n \"\"\"The actual implementation for calling shape inference API.\"\"\"\n if len(args) != 0 and len(kwargs) != 0:\n raise ValueError('Can only specify known argument \\\n shapes either by positional or kwargs way.')\n sdata = []\n indptr = [0]\n if len(args) != 0:\n keys = c_array(ctypes.c_char_p, [])\n for i, s in enumerate(args):\n if s is not None:\n if not isinstance(s, tuple):\n raise TypeError(\"Arguments need to be shapes (tuple), \"\n \"but argument %d is %s.\" % (i, type(s)))\n sdata.extend(s)\n indptr.append(len(sdata))\n else:\n str_keys = []\n for k, v in kwargs.items():\n if not isinstance(v, tuple):\n raise TypeError(\"Arguments need to be shapes (tuple), \"\n \"but '%s' is %s.\" % (k, type(v)))\n str_keys.append(k)\n sdata.extend(v)\n indptr.append(len(sdata))\n keys = c_str_array(str_keys)\n arg_shape_size = mx_uint()\n arg_shape_ndim = ctypes.POINTER(mx_uint)()\n arg_shape_data = ctypes.POINTER(ctypes.POINTER(mx_uint))()\n out_shape_size = mx_uint()\n out_shape_ndim = ctypes.POINTER(mx_uint)()\n out_shape_data = ctypes.POINTER(ctypes.POINTER(mx_uint))()\n aux_shape_size = mx_uint()\n aux_shape_ndim = ctypes.POINTER(mx_uint)()\n aux_shape_data = ctypes.POINTER(ctypes.POINTER(mx_uint))()\n complete = ctypes.c_int()\n if partial:\n infer_func = _LIB.MXSymbolInferShapePartial\n else:\n infer_func = _LIB.MXSymbolInferShape\n check_call(infer_func(\n self.handle,\n mx_uint(len(indptr) - 1),\n keys,\n c_array_buf(mx_uint, array('I', indptr)),\n c_array_buf(mx_uint, array('I', sdata)),\n ctypes.byref(arg_shape_size),\n ctypes.byref(arg_shape_ndim),\n ctypes.byref(arg_shape_data),\n ctypes.byref(out_shape_size),\n ctypes.byref(out_shape_ndim),\n ctypes.byref(out_shape_data),\n ctypes.byref(aux_shape_size),\n ctypes.byref(aux_shape_ndim),\n ctypes.byref(aux_shape_data),\n ctypes.byref(complete)))\n if complete.value != 0:\n arg_shapes = [\n tuple(arg_shape_data[i][:arg_shape_ndim[i]]) for i in range(arg_shape_size.value)]\n out_shapes = [\n tuple(out_shape_data[i][:out_shape_ndim[i]]) for i in range(out_shape_size.value)]\n aux_shapes = [\n tuple(aux_shape_data[i][:aux_shape_ndim[i]]) for i in range(aux_shape_size.value)]\n return (arg_shapes, out_shapes, aux_shapes)\n else:\n return (None, None, None)\n\n def debug_str(self):\n \"\"\"Gets a debug string of symbol.\n\n It contains Symbol output, variables and operators in the computation graph\n with their inputs, variables and attributes.\n\n Returns\n -------\n string\n Debug string of the symbol.\n\n Examples\n --------\n >>> a = mx.sym.Variable('a')\n >>> b = mx.sym.sin(a)\n >>> c = 2 * a + b\n >>> d = mx.sym.FullyConnected(data=c, num_hidden=10)\n >>> d.debug_str()\n >>> print d.debug_str()\n Symbol Outputs:\n\t output[0]=fullyconnected0(0)\n Variable:a\n --------------------\n Op:_mul_scalar, Name=_mulscalar0\n Inputs:\n \targ[0]=a(0) version=0\n Attrs:\n \tscalar=2\n --------------------\n Op:sin, Name=sin0\n Inputs:\n \targ[0]=a(0) version=0\n --------------------\n Op:elemwise_add, Name=_plus0\n Inputs:\n \targ[0]=_mulscalar0(0)\n \targ[1]=sin0(0)\n Variable:fullyconnected0_weight\n Variable:fullyconnected0_bias\n --------------------\n Op:FullyConnected, Name=fullyconnected0\n Inputs:\n \targ[0]=_plus0(0)\n \targ[1]=fullyconnected0_weight(0) version=0\n \targ[2]=fullyconnected0_bias(0) version=0\n Attrs:\n \tnum_hidden=10\n \"\"\"\n debug_str = ctypes.c_char_p()\n check_call(_LIB.MXSymbolPrint(\n self.handle, ctypes.byref(debug_str)))\n return py_str(debug_str.value)\n\n def save(self, fname):\n \"\"\"Saves symbol to a file.\n\n You can also use pickle to do the job if you only work on python.\n The advantage of `load`/`save` functions is that the file contents are language agnostic.\n This means the model saved by one language binding can be loaded by a different\n language binding of `MXNet`.\n You also get the benefit of being able to directly load/save from cloud storage(S3, HDFS).\n\n Parameters\n ----------\n fname : str\n The name of the file.\n\n - \"s3://my-bucket/path/my-s3-symbol\"\n - \"hdfs://my-bucket/path/my-hdfs-symbol\"\n - \"/path-to/my-local-symbol\"\n\n See Also\n --------\n symbol.load : Used to load symbol from file.\n \"\"\"\n if not isinstance(fname, string_types):\n raise TypeError('fname need to be string')\n check_call(_LIB.MXSymbolSaveToFile(self.handle, c_str(fname)))\n\n def tojson(self):\n \"\"\"Saves symbol to a JSON string.\n\n See Also\n --------\n symbol.load_json : Used to load symbol from JSON string.\n \"\"\"\n json_str = ctypes.c_char_p()\n check_call(_LIB.MXSymbolSaveToJSON(self.handle, ctypes.byref(json_str)))\n return py_str(json_str.value)\n\n @staticmethod\n def _get_ndarray_inputs(arg_key, args, arg_names, allow_missing):\n \"\"\"Helper function to get NDArray lists handles from various inputs.\n\n Parameters\n ----------\n arg_key : str\n The name of argument, used for error message.\n\n args : list of NDArray or dict of str to NDArray\n Input arguments to the symbols.\n If type is list of NDArray, the position is in the same order of arg_names.\n If type is dict of str to NDArray, then it maps the name of arguments\n to the corresponding NDArray,\n\n args_names : list of string\n List of argument names.\n\n allow_missing : boolean\n Whether missing argument is allowed.\n When allowed, the missing handle will be set to None(null)\n\n Returns\n -------\n handles : list of NDArrayHandle\n The positional list of NDArrayHandles generated from input.\n \"\"\"\n # setup args\n arg_handles = []\n arg_arrays = []\n if isinstance(args, list):\n if len(args) != len(arg_names):\n raise ValueError('Length of %s does not match the number of arguments' % arg_key)\n for narr in args:\n if narr is None and allow_missing:\n arg_handles.append(None)\n elif not isinstance(narr, NDArray):\n raise TypeError('Only accept list of NDArrays or dict of str to NDArray')\n else:\n arg_handles.append(narr.handle)\n arg_arrays = args\n elif isinstance(args, dict):\n for name in arg_names:\n if name in args:\n narr = args[name]\n if not isinstance(narr, NDArray):\n raise TypeError('Only accept list of NDArrays or dict of str to NDArray')\n arg_handles.append(narr.handle)\n arg_arrays.append(narr)\n else:\n if allow_missing:\n arg_handles.append(None)\n arg_arrays.append(None)\n else:\n raise ValueError('key `%s` is missing in `%s`' % (name, arg_key))\n else:\n raise TypeError('Only accept list of NDArrays or dict of str to NDArray')\n return c_array(NDArrayHandle, arg_handles), arg_arrays\n\n def simple_bind(self, ctx, grad_req='write', type_dict=None, stype_dict=None,\n group2ctx=None, shared_arg_names=None, shared_exec=None,\n shared_buffer=None, **kwargs):\n \"\"\"Bind current symbol to get an executor, allocate all the arguments needed.\n Allows specifying data types.\n\n This function simplifies the binding procedure. You need to specify only input data shapes.\n Before binding the executor, the function allocates arguments and auxiliary states\n that were not explicitly specified. Allows specifying data types.\n\n Example\n -------\n >>> x = mx.sym.Variable('x')\n >>> y = mx.sym.FullyConnected(x, num_hidden=4)\n >>> exe = y.simple_bind(mx.cpu(), x=(5,4), grad_req='null')\n >>> exe.forward()\n [<NDArray 5x4 @cpu(0)>]\n >>> exe.outputs[0].asnumpy()\n array([[ 0., 0., 0., 0.],\n [ 0., 0., 0., 0.],\n [ 0., 0., 0., 0.],\n [ 0., 0., 0., 0.],\n [ 0., 0., 0., 0.]], dtype=float32)\n >>> exe.arg_arrays\n [<NDArray 5x4 @cpu(0)>, <NDArray 4x4 @cpu(0)>, <NDArray 4 @cpu(0)>]\n >>> exe.grad_arrays\n [<NDArray 5x4 @cpu(0)>, <NDArray 4x4 @cpu(0)>, <NDArray 4 @cpu(0)>]\n\n Parameters\n ----------\n ctx : Context\n The device context the generated executor to run on.\n\n grad_req: string\n {'write', 'add', 'null'}, or list of str or dict of str to str, optional\n To specify how we should update the gradient to the `args_grad`.\n\n - 'write' means every time gradient is written to specified `args_grad` NDArray.\n - 'add' means every time gradient is added to the specified NDArray.\n - 'null' means no action is taken, the gradient may not be calculated.\n\n type_dict : Dict of str->numpy.dtype\n Input type dictionary, name->dtype\n\n stype_dict : Dict of str->str\n Input storage type dictionary, name->storage_type\n\n group2ctx : Dict of string to mx.Context\n The dict mapping the `ctx_group` attribute to the context assignment.\n\n shared_arg_names : List of string\n The argument names whose `NDArray` of shared_exec can be reused for initializing\n the current executor.\n\n shared_exec : Executor\n The executor whose arg_arrays, arg_arrays, grad_arrays, and aux_arrays can be\n reused for initializing the current executor.\n\n shared_buffer : Dict of string to `NDArray`\n The dict mapping argument names to the `NDArray` that can be reused for initializing\n the current executor. This buffer will be checked for reuse if one argument name\n of the current executor is not found in `shared_arg_names`. The `NDArray`s are\n expected have default storage type.\n\n kwargs : Dict of str->shape\n Input shape dictionary, name->shape\n\n Returns\n -------\n executor : mxnet.Executor\n The generated executor\n \"\"\"\n # data types\n num_provided_arg_types = 0\n provided_arg_type_names = ctypes.POINTER(ctypes.c_char_p)() # provided type argument names\n provided_arg_type_data = ctypes.POINTER(mx_uint)() # provided types\n if type_dict is not None:\n provided_arg_type_names = []\n provided_arg_type_data = []\n for k, v in type_dict.items():\n v = _numpy.dtype(v).type\n if v in _DTYPE_NP_TO_MX:\n provided_arg_type_names.append(k)\n provided_arg_type_data.append(_DTYPE_NP_TO_MX[v])\n num_provided_arg_types = mx_uint(len(provided_arg_type_names))\n provided_arg_type_names = c_str_array(provided_arg_type_names)\n provided_arg_type_data = c_array_buf(ctypes.c_int, array('i', provided_arg_type_data))\n\n # storage types\n num_provided_arg_stypes = 0\n # provided storage type argument names\n provided_arg_stype_names = ctypes.POINTER(ctypes.c_char_p)()\n provided_arg_stype_data = ctypes.POINTER(mx_uint)() # provided storage types\n if stype_dict is not None:\n provided_arg_stype_names = []\n provided_arg_stype_data = []\n for k, v in stype_dict.items():\n if v in _STORAGE_TYPE_STR_TO_ID:\n provided_arg_stype_names.append(k)\n provided_arg_stype_data.append(_STORAGE_TYPE_STR_TO_ID[v])\n num_provided_arg_stypes = mx_uint(len(provided_arg_stype_names))\n provided_arg_stype_names = c_str_array(provided_arg_stype_names)\n provided_arg_stype_data = c_array_buf(ctypes.c_int, array('i', provided_arg_stype_data))\n\n provided_arg_shape_data = [] # shape data\n # argument shape index in sdata,\n # e.g. [sdata[indptr[0]], sdata[indptr[1]]) is the shape of the first arg\n provided_arg_shape_idx = [0]\n provided_arg_shape_names = [] # provided argument names\n for k, v in kwargs.items():\n # if k not in listed_arguments and k not in listed_aux_states:\n # raise ValueError('arg name %s is not valid', k)\n if isinstance(v, tuple):\n provided_arg_shape_names.append(k)\n provided_arg_shape_data.extend(v)\n provided_arg_shape_idx.append(len(provided_arg_shape_data))\n\n provided_req_type_list_len = 0\n provided_grad_req_types = ctypes.POINTER(ctypes.c_char_p)()\n provided_grad_req_names = ctypes.POINTER(ctypes.c_char_p)()\n if grad_req is not None:\n if isinstance(grad_req, string_types):\n # use provided_req_type_list_len = 0 to indicate this situation\n provided_req_type_list_len = 0\n provided_grad_req_types = [grad_req]\n elif isinstance(grad_req, list):\n if len(grad_req) == 0:\n raise RuntimeError('grad_req in simple_bind cannot be an empty list')\n provided_grad_req_types = grad_req\n provided_req_type_list_len = len(provided_grad_req_types)\n elif isinstance(grad_req, dict):\n if len(grad_req) == 0:\n raise RuntimeError('grad_req in simple_bind cannot be an empty dict')\n provided_grad_req_names = []\n provided_grad_req_types = []\n for k, v in grad_req.items():\n provided_grad_req_names.append(k)\n provided_grad_req_types.append(v)\n provided_grad_req_names = c_str_array(provided_grad_req_names)\n provided_req_type_list_len = len(provided_grad_req_types)\n provided_grad_req_types = c_str_array(provided_grad_req_types)\n\n num_ctx_map_keys = mx_uint(0)\n ctx_map_keys = ctypes.POINTER(ctypes.c_char_p)()\n ctx_map_dev_types = ctypes.POINTER(ctypes.c_int)()\n ctx_map_dev_ids = ctypes.POINTER(ctypes.c_int)()\n if group2ctx is not None:\n ctx_map_keys = []\n ctx_map_dev_types = []\n ctx_map_dev_ids = []\n for key, val in group2ctx.items():\n ctx_map_keys.append(key)\n ctx_map_dev_types.append(val.device_typeid)\n ctx_map_dev_ids.append(val.device_id)\n num_ctx_map_keys = mx_uint(len(ctx_map_keys))\n ctx_map_keys = c_str_array(ctx_map_keys)\n ctx_map_dev_types = c_array(ctypes.c_int, array('i', ctx_map_dev_types))\n ctx_map_dev_ids = c_array(ctypes.c_int, array('i', ctx_map_dev_ids))\n\n # prepare param names\n shared_arg_name_list = []\n if shared_arg_names is not None:\n if not isinstance(shared_arg_names, list):\n raise ValueError('shared_arg_names in simple_bind must be a list or None')\n shared_arg_name_list = shared_arg_names\n\n # prepare shared_buffer\n if shared_buffer is None:\n shared_buffer_len = ctypes.c_int(-1)\n shared_buffer_names = ctypes.POINTER(ctypes.c_char_p)()\n shared_buffer_handles = ctypes.POINTER(NDArrayHandle)()\n else:\n if not isinstance(shared_buffer, dict):\n raise ValueError('shared_buffer in simple_bind must be dict or None')\n buffer_names = shared_buffer.keys()\n buffer_arrays = shared_buffer.values()\n for v in buffer_arrays:\n assert(v.stype == 'default'), \\\n \"shared_buffer is expected to only contain NDArrays with default storage\"\n shared_buffer_names = c_str_array(buffer_names)\n shared_buffer_len = ctypes.c_int(len(buffer_arrays))\n shared_buffer_handles = c_handle_array(buffer_arrays)\n updated_shared_buffer_names = ctypes.POINTER(ctypes.c_char_p)()\n updated_shared_buffer_handles = ctypes.POINTER(NDArrayHandle)()\n\n # prepare shared_exec_handle\n shared_exec_handle = shared_exec.handle if shared_exec is not None else ExecutorHandle()\n\n # prepare current executor handle\n exe_handle = ExecutorHandle()\n\n # prepare current executor's in_args, arg_grads, and aux_states\n num_in_args = ctypes.c_uint()\n in_arg_handles = ctypes.POINTER(NDArrayHandle)()\n arg_grad_handles = ctypes.POINTER(NDArrayHandle)()\n num_aux_states = ctypes.c_uint()\n aux_state_handles = ctypes.POINTER(NDArrayHandle)()\n\n try:\n check_call(_LIB.MXExecutorSimpleBind(self.handle,\n ctypes.c_int(ctx.device_typeid),\n ctypes.c_int(ctx.device_id),\n num_ctx_map_keys,\n ctx_map_keys,\n ctx_map_dev_types,\n ctx_map_dev_ids,\n mx_uint(provided_req_type_list_len),\n provided_grad_req_names,\n provided_grad_req_types,\n mx_uint(len(provided_arg_shape_names)),\n c_str_array(provided_arg_shape_names),\n c_array_buf(mx_uint,\n array('I', provided_arg_shape_data)),\n c_array_buf(mx_uint,\n array('I', provided_arg_shape_idx)),\n num_provided_arg_types,\n provided_arg_type_names,\n provided_arg_type_data,\n num_provided_arg_stypes,\n provided_arg_stype_names,\n provided_arg_stype_data,\n mx_uint(len(shared_arg_name_list)),\n c_str_array(shared_arg_name_list),\n ctypes.byref(shared_buffer_len),\n shared_buffer_names,\n shared_buffer_handles,\n ctypes.byref(updated_shared_buffer_names),\n ctypes.byref(updated_shared_buffer_handles),\n ctypes.byref(num_in_args),\n ctypes.byref(in_arg_handles),\n ctypes.byref(arg_grad_handles),\n ctypes.byref(num_aux_states),\n ctypes.byref(aux_state_handles),\n shared_exec_handle,\n ctypes.byref(exe_handle)))\n except MXNetError as e:\n error_msg = \"simple_bind error. Arguments:\\n\"\n for k, v in kwargs.items():\n error_msg += \"%s: %s\\n\" % (k, v)\n error_msg += \"%s\" % e\n raise RuntimeError(error_msg)\n\n # update shared_buffer\n if shared_buffer is not None:\n for i in range(shared_buffer_len.value):\n k = py_str(updated_shared_buffer_names[i])\n v = NDArray(NDArrayHandle(updated_shared_buffer_handles[i]))\n shared_buffer[k] = v\n\n # create in_args, arg_grads, and aux_states for the current executor\n arg_arrays = [_ndarray_cls(NDArrayHandle(in_arg_handles[i]))\n for i in range(num_in_args.value)]\n grad_arrays = [_ndarray_cls(NDArrayHandle(arg_grad_handles[i]))\n if arg_grad_handles[i] is not None\n else None for i in range(num_in_args.value)]\n aux_arrays = [_ndarray_cls(NDArrayHandle(aux_state_handles[i]))\n for i in range(num_aux_states.value)]\n\n executor = Executor(exe_handle, self, ctx, grad_req, group2ctx)\n executor.arg_arrays = arg_arrays\n executor.grad_arrays = grad_arrays\n executor.aux_arrays = aux_arrays\n return executor\n\n def bind(self, ctx, args, args_grad=None, grad_req='write',\n aux_states=None, group2ctx=None, shared_exec=None):\n \"\"\"Binds the current symbol to an executor and returns it.\n\n We first declare the computation and then bind to the data to run.\n This function returns an executor which provides method `forward()` method for evaluation\n and a `outputs()` method to get all the results.\n\n Example\n -------\n >>> a = mx.sym.Variable('a')\n >>> b = mx.sym.Variable('b')\n >>> c = a + b\n <Symbol _plus1>\n >>> ex = c.bind(ctx=mx.cpu(), args={'a' : mx.nd.ones([2,3]), 'b' : mx.nd.ones([2,3])})\n >>> ex.forward()\n [<NDArray 2x3 @cpu(0)>]\n >>> ex.outputs[0].asnumpy()\n [[ 2. 2. 2.]\n [ 2. 2. 2.]]\n\n Parameters\n ----------\n ctx : Context\n The device context the generated executor to run on.\n\n args : list of NDArray or dict of str to NDArray\n Input arguments to the symbol.\n\n - If the input type is a list of `NDArray`, the order should be same as the order\n of `list_arguments()`.\n - If the input type is a dict of str to `NDArray`, then it maps the name of arguments\n to the corresponding `NDArray`.\n - In either case, all the arguments must be provided.\n\n args_grad : list of NDArray or dict of str to `NDArray`, optional\n When specified, `args_grad` provides NDArrays to hold\n the result of gradient value in backward.\n\n - If the input type is a list of `NDArray`, the order should be same as the order\n of `list_arguments()`.\n - If the input type is a dict of str to `NDArray`, then it maps the name of arguments\n to the corresponding NDArray.\n - When the type is a dict of str to `NDArray`, one only need to provide the dict\n for required argument gradient.\n Only the specified argument gradient will be calculated.\n\n grad_req : {'write', 'add', 'null'}, or list of str or dict of str to str, optional\n To specify how we should update the gradient to the `args_grad`.\n\n - 'write' means everytime gradient is write to specified `args_grad` `NDArray`.\n - 'add' means everytime gradient is add to the specified NDArray.\n - 'null' means no action is taken, the gradient may not be calculated.\n\n aux_states : list of `NDArray`, or dict of str to `NDArray`, optional\n Input auxiliary states to the symbol, only needed when the output of\n `list_auxiliary_states()` is not empty.\n\n - If the input type is a list of `NDArray`, the order should be same as the order\n of `list_auxiliary_states()`.\n - If the input type is a dict of str to `NDArray`, then it maps the name of\n `auxiliary_states` to the corresponding `NDArray`,\n - In either case, all the auxiliary states need to be provided.\n\n group2ctx : Dict of string to mx.Context\n The dict mapping the `ctx_group` attribute to the context assignment.\n\n shared_exec : mx.executor.Executor\n Executor to share memory with. This is intended for runtime reshaping, variable length\n sequences, etc. The returned executor shares state with `shared_exec`, and should not be\n used in parallel with it.\n\n Returns\n -------\n executor : Executor\n The generated executor\n\n Notes\n -----\n Auxiliary states are the special states of symbols that do not correspond\n to an argument, and do not have gradient but are still useful\n for the specific operations. Common examples of auxiliary states include\n the `moving_mean` and `moving_variance` states in `BatchNorm`.\n Most operators do not have auxiliary states and in those cases,\n this parameter can be safely ignored.\n\n One can give up gradient by using a dict in `args_grad` and only specify\n gradient they interested in.\n \"\"\"\n # pylint: disable=too-many-branches\n if not isinstance(ctx, Context):\n raise TypeError(\"Context type error\")\n\n listed_arguments = self.list_arguments()\n args_handle, args = self._get_ndarray_inputs('args', args, listed_arguments, False)\n # setup args gradient\n if args_grad is None:\n args_grad_handle = c_array(NDArrayHandle, [None] * len(args))\n else:\n args_grad_handle, args_grad = self._get_ndarray_inputs(\n 'args_grad', args_grad, listed_arguments, True)\n\n if aux_states is None:\n aux_states = []\n aux_args_handle, aux_states = self._get_ndarray_inputs(\n 'aux_states', aux_states, self.list_auxiliary_states(), False)\n\n # setup requirements\n if isinstance(grad_req, string_types):\n if grad_req not in _GRAD_REQ_MAP:\n raise ValueError('grad_req must be in %s' % str(_GRAD_REQ_MAP))\n reqs_array = c_array_buf(mx_uint,\n array('I', [_GRAD_REQ_MAP[grad_req]] * len(listed_arguments)))\n elif isinstance(grad_req, list):\n reqs_array = c_array_buf(mx_uint,\n array('I', [_GRAD_REQ_MAP[item] for item in grad_req]))\n elif isinstance(grad_req, dict):\n req_array = []\n for name in listed_arguments:\n if name in grad_req:\n req_array.append(_GRAD_REQ_MAP[grad_req[name]])\n else:\n req_array.append(0)\n reqs_array = c_array_buf(mx_uint, array('I', req_array))\n\n ctx_map_keys = []\n ctx_map_dev_types = []\n ctx_map_dev_ids = []\n\n if group2ctx:\n for key, val in group2ctx.items():\n ctx_map_keys.append(key)\n ctx_map_dev_types.append(val.device_typeid)\n ctx_map_dev_ids.append(val.device_id)\n\n handle = ExecutorHandle()\n shared_handle = shared_exec.handle if shared_exec is not None else ExecutorHandle()\n check_call(_LIB.MXExecutorBindEX(self.handle,\n ctypes.c_int(ctx.device_typeid),\n ctypes.c_int(ctx.device_id),\n mx_uint(len(ctx_map_keys)),\n c_str_array(ctx_map_keys),\n c_array_buf(ctypes.c_int, array('i', ctx_map_dev_types)),\n c_array_buf(ctypes.c_int, array('i', ctx_map_dev_ids)),\n mx_uint(len(args)),\n args_handle,\n args_grad_handle,\n reqs_array,\n mx_uint(len(aux_states)),\n aux_args_handle,\n shared_handle,\n ctypes.byref(handle)))\n executor = Executor(handle, self, ctx, grad_req, group2ctx)\n executor.arg_arrays = args\n executor.grad_arrays = args_grad\n executor.aux_arrays = aux_states\n return executor\n\n def gradient(self, wrt):\n \"\"\"Gets the autodiff of current symbol.\n\n This function can only be used if current symbol is a loss function.\n\n .. note:: This function is currently not implemented.\n\n Parameters\n ----------\n wrt : Array of String\n keyword arguments of the symbol that the gradients are taken.\n\n Returns\n -------\n grad : Symbol\n A gradient Symbol with returns to be the corresponding gradients.\n \"\"\"\n handle = SymbolHandle()\n c_wrt = c_str_array(wrt)\n check_call(_LIB.MXSymbolGrad(self.handle,\n mx_uint(len(wrt)),\n c_wrt,\n ctypes.byref(handle)))\n return Symbol(handle)\n\n # pylint: enable= no-member\n\n def eval(self, ctx=None, **kwargs):\n \"\"\"Evaluates a symbol given arguments.\n\n The `eval` method combines a call to `bind` (which returns an executor)\n with a call to `forward` (executor method).\n For the common use case, where you might repeatedly evaluate with same arguments,\n eval is slow.\n In that case, you should call `bind` once and then repeatedly call forward.\n This function allows simpler syntax for less cumbersome introspection.\n\n Example\n -------\n >>> a = mx.sym.Variable('a')\n >>> b = mx.sym.Variable('b')\n >>> c = a + b\n >>> ex = c.eval(ctx = mx.cpu(), a = mx.nd.ones([2,3]), b = mx.nd.ones([2,3]))\n >>> ex\n [<NDArray 2x3 @cpu(0)>]\n >>> ex[0].asnumpy()\n array([[ 2., 2., 2.],\n [ 2., 2., 2.]], dtype=float32)\n\n Parameters\n ----------\n ctx : Context\n The device context the generated executor to run on.\n\n kwargs : Keyword arguments of type `NDArray`\n Input arguments to the symbol. All the arguments must be provided.\n\n Returns\n ----------\n result : a list of NDArrays corresponding to the values taken by each symbol when\n evaluated on given args. When called on a single symbol (not a group),\n the result will be a list with one element.\n \"\"\"\n if ctx is None:\n ctx = current_context()\n return self.bind(ctx, kwargs).forward()\n\n def reshape(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`reshape`.\n\n The arguments are the same as for :py:func:`reshape`, with\n this array as data.\n \"\"\"\n return op.reshape(self, *args, **kwargs)\n\n def reshape_like(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`reshape_like`.\n\n The arguments are the same as for :py:func:`reshape_like`, with\n this array as data.\n \"\"\"\n return op.reshape_like(self, *args, **kwargs)\n\n def astype(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`cast`.\n\n The arguments are the same as for :py:func:`cast`, with\n this array as data.\n \"\"\"\n return op.cast(self, *args, **kwargs)\n\n def zeros_like(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`zeros_like`.\n\n The arguments are the same as for :py:func:`zeros_like`, with\n this array as data.\n \"\"\"\n return op.zeros_like(self, *args, **kwargs)\n\n def ones_like(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`ones_like`.\n\n The arguments are the same as for :py:func:`ones_like`, with\n this array as data.\n \"\"\"\n return op.ones_like(self, *args, **kwargs)\n\n def broadcast_axes(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`broadcast_axes`.\n\n The arguments are the same as for :py:func:`broadcast_axes`, with\n this array as data.\n \"\"\"\n return op.broadcast_axes(self, *args, **kwargs)\n\n def repeat(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`repeat`.\n\n The arguments are the same as for :py:func:`repeat`, with\n this array as data.\n \"\"\"\n return op.repeat(self, *args, **kwargs)\n\n def pad(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`pad`.\n\n The arguments are the same as for :py:func:`pad`, with\n this array as data.\n \"\"\"\n return op.pad(self, *args, **kwargs)\n\n def swapaxes(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`swapaxes`.\n\n The arguments are the same as for :py:func:`swapaxes`, with\n this array as data.\n \"\"\"\n return op.swapaxes(self, *args, **kwargs)\n\n def split(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`split`.\n\n The arguments are the same as for :py:func:`split`, with\n this array as data.\n \"\"\"\n return op.split(self, *args, **kwargs)\n\n def slice(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`slice`.\n\n The arguments are the same as for :py:func:`slice`, with\n this array as data.\n \"\"\"\n return op.slice(self, *args, **kwargs)\n\n def slice_axis(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`slice_axis`.\n\n The arguments are the same as for :py:func:`slice_axis`, with\n this array as data.\n \"\"\"\n return op.slice_axis(self, *args, **kwargs)\n\n def slice_like(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`slice_like`.\n\n The arguments are the same as for :py:func:`slice_like`, with\n this array as data.\n \"\"\"\n return op.slice_like(self, *args, **kwargs)\n\n def take(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`take`.\n\n The arguments are the same as for :py:func:`take`, with\n this array as data.\n \"\"\"\n return op.take(self, *args, **kwargs)\n\n def one_hot(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`one_hot`.\n\n The arguments are the same as for :py:func:`one_hot`, with\n this array as data.\n \"\"\"\n return op.one_hot(self, *args, **kwargs)\n\n def pick(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`pick`.\n\n The arguments are the same as for :py:func:`pick`, with\n this array as data.\n \"\"\"\n return op.pick(self, *args, **kwargs)\n\n def sort(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`sort`.\n\n The arguments are the same as for :py:func:`sort`, with\n this array as data.\n \"\"\"\n return op.sort(self, *args, **kwargs)\n\n def topk(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`topk`.\n\n The arguments are the same as for :py:func:`topk`, with\n this array as data.\n \"\"\"\n return op.topk(self, *args, **kwargs)\n\n def argsort(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`argsort`.\n\n The arguments are the same as for :py:func:`argsort`, with\n this array as data.\n \"\"\"\n return op.argsort(self, *args, **kwargs)\n\n def argmax(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`argmax`.\n\n The arguments are the same as for :py:func:`argmax`, with\n this array as data.\n \"\"\"\n return op.argmax(self, *args, **kwargs)\n\n def argmax_channel(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`argmax_channel`.\n\n The arguments are the same as for :py:func:`argmax_channel`, with\n this array as data.\n \"\"\"\n return op.argmax_channel(self, *args, **kwargs)\n\n def argmin(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`argmin`.\n\n The arguments are the same as for :py:func:`argmin`, with\n this array as data.\n \"\"\"\n return op.argmin(self, *args, **kwargs)\n\n def clip(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`clip`.\n\n The arguments are the same as for :py:func:`clip`, with\n this array as data.\n \"\"\"\n return op.clip(self, *args, **kwargs)\n\n def abs(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`abs`.\n\n The arguments are the same as for :py:func:`abs`, with\n this array as data.\n \"\"\"\n return op.abs(self, *args, **kwargs)\n\n def sign(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`sign`.\n\n The arguments are the same as for :py:func:`sign`, with\n this array as data.\n \"\"\"\n return op.sign(self, *args, **kwargs)\n\n def flatten(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`flatten`.\n\n The arguments are the same as for :py:func:`flatten`, with\n this array as data.\n \"\"\"\n return op.flatten(self, *args, **kwargs)\n\n def shape_array(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`shape_array`.\n\n The arguments are the same as for :py:func:`shape_op`, with\n this array as data.\n \"\"\"\n return op.shape_array(self, *args, **kwargs)\n\n def size_array(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`size_array`.\n\n The arguments are the same as for :py:func:`size_array`, with\n this array as data.\n \"\"\"\n return op.size_array(self, *args, **kwargs)\n\n def expand_dims(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`expand_dims`.\n\n The arguments are the same as for :py:func:`expand_dims`, with\n this array as data.\n \"\"\"\n return op.expand_dims(self, *args, **kwargs)\n\n def broadcast_to(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`broadcast_to`.\n\n The arguments are the same as for :py:func:`broadcast_to`, with\n this array as data.\n \"\"\"\n return op.broadcast_to(self, *args, **kwargs)\n\n def broadcast_like(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`broadcast_like`.\n\n The arguments are the same as for :py:func:`broadcast_like`, with\n this array as data.\n \"\"\"\n return op.broadcast_like(self, *args, **kwargs)\n\n def tile(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`tile`.\n\n The arguments are the same as for :py:func:`tile`, with\n this array as data.\n \"\"\"\n return op.tile(self, *args, **kwargs)\n\n def transpose(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`transpose`.\n\n The arguments are the same as for :py:func:`transpose`, with\n this array as data.\n \"\"\"\n return op.transpose(self, *args, **kwargs)\n\n def flip(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`flip`.\n\n The arguments are the same as for :py:func:`flip`, with\n this array as data.\n \"\"\"\n return op.flip(self, *args, **kwargs)\n\n def depth_to_space(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`depth_to_space`.\n\n The arguments are the same as for :py:func:`depth_to_space`, with\n this array as data.\n \"\"\"\n return op.depth_to_space(self, *args, **kwargs)\n\n def space_to_depth(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`space_to_depth`.\n\n The arguments are the same as for :py:func:`space_to_depth`, with\n this array as data.\n \"\"\"\n return op.space_to_depth(self, *args, **kwargs)\n\n def diag(self, k=0, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`diag`.\n\n The arguments are the same as for :py:func:`diag`, with\n this array as data.\n \"\"\"\n return op.diag(self, k, **kwargs)\n\n def sum(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`sum`.\n\n The arguments are the same as for :py:func:`sum`, with\n this array as data.\n \"\"\"\n return op.sum(self, *args, **kwargs)\n\n def nansum(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`nansum`.\n\n The arguments are the same as for :py:func:`nansum`, with\n this array as data.\n \"\"\"\n return op.nansum(self, *args, **kwargs)\n\n def prod(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`prod`.\n\n The arguments are the same as for :py:func:`prod`, with\n this array as data.\n \"\"\"\n return op.prod(self, *args, **kwargs)\n\n def nanprod(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`nanprod`.\n\n The arguments are the same as for :py:func:`nanprod`, with\n this array as data.\n \"\"\"\n return op.nanprod(self, *args, **kwargs)\n\n def mean(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`mean`.\n\n The arguments are the same as for :py:func:`mean`, with\n this array as data.\n \"\"\"\n return op.mean(self, *args, **kwargs)\n\n def max(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`max`.\n\n The arguments are the same as for :py:func:`max`, with\n this array as data.\n \"\"\"\n return op.max(self, *args, **kwargs)\n\n def min(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`min`.\n\n The arguments are the same as for :py:func:`min`, with\n this array as data.\n \"\"\"\n return op.min(self, *args, **kwargs)\n\n def norm(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`norm`.\n\n The arguments are the same as for :py:func:`norm`, with\n this array as data.\n \"\"\"\n return op.norm(self, *args, **kwargs)\n\n def round(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`round`.\n\n The arguments are the same as for :py:func:`round`, with\n this array as data.\n \"\"\"\n return op.round(self, *args, **kwargs)\n\n def rint(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`rint`.\n\n The arguments are the same as for :py:func:`rint`, with\n this array as data.\n \"\"\"\n return op.rint(self, *args, **kwargs)\n\n def fix(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`fix`.\n\n The arguments are the same as for :py:func:`fix`, with\n this array as data.\n \"\"\"\n return op.fix(self, *args, **kwargs)\n\n def floor(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`floor`.\n\n The arguments are the same as for :py:func:`floor`, with\n this array as data.\n \"\"\"\n return op.floor(self, *args, **kwargs)\n\n def ceil(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`ceil`.\n\n The arguments are the same as for :py:func:`ceil`, with\n this array as data.\n \"\"\"\n return op.ceil(self, *args, **kwargs)\n\n def trunc(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`trunc`.\n\n The arguments are the same as for :py:func:`trunc`, with\n this array as data.\n \"\"\"\n return op.trunc(self, *args, **kwargs)\n\n def sin(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`sin`.\n\n The arguments are the same as for :py:func:`sin`, with\n this array as data.\n \"\"\"\n return op.sin(self, *args, **kwargs)\n\n def cos(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`cos`.\n\n The arguments are the same as for :py:func:`cos`, with\n this array as data.\n \"\"\"\n return op.cos(self, *args, **kwargs)\n\n def tan(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`tan`.\n\n The arguments are the same as for :py:func:`tan`, with\n this array as data.\n \"\"\"\n return op.tan(self, *args, **kwargs)\n\n def arcsin(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`arcsin`.\n\n The arguments are the same as for :py:func:`arcsin`, with\n this array as data.\n \"\"\"\n return op.arcsin(self, *args, **kwargs)\n\n def arccos(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`arccos`.\n\n The arguments are the same as for :py:func:`arccos`, with\n this array as data.\n \"\"\"\n return op.arccos(self, *args, **kwargs)\n\n def arctan(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`arctan`.\n\n The arguments are the same as for :py:func:`arctan`, with\n this array as data.\n \"\"\"\n return op.arctan(self, *args, **kwargs)\n\n def degrees(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`degrees`.\n\n The arguments are the same as for :py:func:`degrees`, with\n this array as data.\n \"\"\"\n return op.degrees(self, *args, **kwargs)\n\n def radians(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`radians`.\n\n The arguments are the same as for :py:func:`radians`, with\n this array as data.\n \"\"\"\n return op.radians(self, *args, **kwargs)\n\n def sinh(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`sinh`.\n\n The arguments are the same as for :py:func:`sinh`, with\n this array as data.\n \"\"\"\n return op.sinh(self, *args, **kwargs)\n\n def cosh(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`cosh`.\n\n The arguments are the same as for :py:func:`cosh`, with\n this array as data.\n \"\"\"\n return op.cosh(self, *args, **kwargs)\n\n def tanh(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`tanh`.\n\n The arguments are the same as for :py:func:`tanh`, with\n this array as data.\n \"\"\"\n return op.tanh(self, *args, **kwargs)\n\n def arcsinh(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`arcsinh`.\n\n The arguments are the same as for :py:func:`arcsinh`, with\n this array as data.\n \"\"\"\n return op.arcsinh(self, *args, **kwargs)\n\n def arccosh(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`arccosh`.\n\n The arguments are the same as for :py:func:`arccosh`, with\n this array as data.\n \"\"\"\n return op.arccosh(self, *args, **kwargs)\n\n def arctanh(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`arctanh`.\n\n The arguments are the same as for :py:func:`arctanh`, with\n this array as data.\n \"\"\"\n return op.arctanh(self, *args, **kwargs)\n\n def exp(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`exp`.\n\n The arguments are the same as for :py:func:`exp`, with\n this array as data.\n \"\"\"\n return op.exp(self, *args, **kwargs)\n\n def expm1(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`expm1`.\n\n The arguments are the same as for :py:func:`expm1`, with\n this array as data.\n \"\"\"\n return op.expm1(self, *args, **kwargs)\n\n def log(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`log`.\n\n The arguments are the same as for :py:func:`log`, with\n this array as data.\n \"\"\"\n return op.log(self, *args, **kwargs)\n\n def log10(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`log10`.\n\n The arguments are the same as for :py:func:`log10`, with\n this array as data.\n \"\"\"\n return op.log10(self, *args, **kwargs)\n\n def log2(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`log2`.\n\n The arguments are the same as for :py:func:`log2`, with\n this array as data.\n \"\"\"\n return op.log2(self, *args, **kwargs)\n\n def log1p(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`log1p`.\n\n The arguments are the same as for :py:func:`log1p`, with\n this array as data.\n \"\"\"\n return op.log1p(self, *args, **kwargs)\n\n def sqrt(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`sqrt`.\n\n The arguments are the same as for :py:func:`sqrt`, with\n this array as data.\n \"\"\"\n return op.sqrt(self, *args, **kwargs)\n\n def rsqrt(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`rsqrt`.\n\n The arguments are the same as for :py:func:`rsqrt`, with\n this array as data.\n \"\"\"\n return op.rsqrt(self, *args, **kwargs)\n\n def cbrt(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`cbrt`.\n\n The arguments are the same as for :py:func:`cbrt`, with\n this array as data.\n \"\"\"\n return op.cbrt(self, *args, **kwargs)\n\n def rcbrt(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`rcbrt`.\n\n The arguments are the same as for :py:func:`rcbrt`, with\n this array as data.\n \"\"\"\n return op.rcbrt(self, *args, **kwargs)\n\n def square(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`square`.\n\n The arguments are the same as for :py:func:`square`, with\n this array as data.\n \"\"\"\n return op.square(self, *args, **kwargs)\n\n def reciprocal(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`reciprocal`.\n\n The arguments are the same as for :py:func:`reciprocal`, with\n this array as data.\n \"\"\"\n return op.reciprocal(self, *args, **kwargs)\n\n def relu(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`relu`.\n\n The arguments are the same as for :py:func:`relu`, with\n this array as data.\n \"\"\"\n return op.relu(self, *args, **kwargs)\n\n def sigmoid(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`sigmoid`.\n\n The arguments are the same as for :py:func:`sigmoid`, with\n this array as data.\n \"\"\"\n return op.sigmoid(self, *args, **kwargs)\n\n def softmax(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`softmax`.\n\n The arguments are the same as for :py:func:`softmax`, with\n this array as data.\n \"\"\"\n return op.softmax(self, *args, **kwargs)\n\n def log_softmax(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`log_softmax`.\n\n The arguments are the same as for :py:func:`log_softmax`, with\n this array as data.\n \"\"\"\n return op.log_softmax(self, *args, **kwargs)\n\n def squeeze(self, *args, **kwargs):\n \"\"\"Convenience fluent method for :py:func:`squeeze`.\n\n The arguments are the same as for :py:func:`squeeze`, with\n this array as data.\n \"\"\"\n return op.squeeze(self, *args, **kwargs)\n\n def wait_to_read(self):\n raise NotImplementedForSymbol(self.wait_to_read, None)\n\n def asnumpy(self):\n raise NotImplementedForSymbol(self.asnumpy, None)\n\n def asscalar(self):\n raise NotImplementedForSymbol(self.asscalar, None)\n\n def copy(self):\n raise NotImplementedForSymbol(self.copy, None)\n\n def as_in_context(self):\n raise NotImplementedForSymbol(self.as_in_context, None)\n\n def detach(self):\n raise NotImplementedForSymbol(self.detach, None)\n\n def backward(self):\n raise NotImplementedForSymbol(self.backward, None)\n\ndef var(name, attr=None, shape=None, lr_mult=None, wd_mult=None, dtype=None,\n init=None, stype=None, **kwargs):\n \"\"\"Creates a symbolic variable with specified name.\n\n Example\n -------\n >>> data = mx.sym.Variable('data', attr={'a': 'b'})\n >>> data\n <Symbol data>\n >>> csr_data = mx.sym.Variable('csr_data', stype='csr')\n >>> csr_data\n <Symbol csr_data>\n >>> row_sparse_weight = mx.sym.Variable('weight', stype='row_sparse')\n >>> row_sparse_weight\n <Symbol weight>\n\n Parameters\n ----------\n name : str\n Variable name.\n attr : Dict of strings\n Additional attributes to set on the variable. Format {string : string}.\n shape : tuple\n The shape of a variable. If specified, this will be used during the shape inference.\n If one has specified a different shape for this variable using\n a keyword argument when calling shape inference, this shape information will be ignored.\n lr_mult : float\n The learning rate multiplier for input variable.\n wd_mult : float\n Weight decay multiplier for input variable.\n dtype : str or numpy.dtype\n The dtype for input variable. If not specified, this value will be inferred.\n init : initializer (mxnet.init.*)\n Initializer for this variable to (optionally) override the default initializer.\n stype : str\n The storage type of the variable, such as 'row_sparse', 'csr', 'default', etc\n kwargs : Additional attribute variables\n Additional attributes must start and end with double underscores.\n\n Returns\n -------\n variable : Symbol\n A symbol corresponding to an input to the computation graph.\n \"\"\"\n if not isinstance(name, string_types):\n raise TypeError('Expect a string for variable `name`')\n handle = SymbolHandle()\n check_call(_LIB.MXSymbolCreateVariable(c_str(name), ctypes.byref(handle)))\n ret = Symbol(handle)\n if not hasattr(AttrScope._current, \"value\"):\n AttrScope._current.value = AttrScope()\n attr = AttrScope._current.value.get(attr)\n attr = {} if attr is None else attr\n if shape is not None:\n attr['__shape__'] = str(shape)\n if lr_mult is not None:\n attr['__lr_mult__'] = str(lr_mult)\n if wd_mult is not None:\n attr['__wd_mult__'] = str(wd_mult)\n if dtype is not None:\n attr['__dtype__'] = str(_DTYPE_NP_TO_MX[_numpy.dtype(dtype).type])\n if init is not None:\n if not isinstance(init, string_types):\n init = init.dumps()\n attr['__init__'] = init\n if stype is not None:\n attr['__storage_type__'] = str(_STORAGE_TYPE_STR_TO_ID[stype])\n for k, v in kwargs.items():\n if k.startswith('__') and k.endswith('__'):\n attr[k] = str(v)\n else:\n raise ValueError('Attribute name=%s is not supported.'\n ' Additional attributes must start and end with double underscores,'\n ' e.g, __yourattr__' % k)\n ret._set_attr(**attr)\n return ret\n\n\n# for back compatibility\nVariable = var\n\n\ndef Group(symbols):\n \"\"\"Creates a symbol that contains a collection of other symbols, grouped together.\n\n Example\n -------\n >>> a = mx.sym.Variable('a')\n >>> b = mx.sym.Variable('b')\n >>> mx.sym.Group([a,b])\n <Symbol Grouped>\n\n Parameters\n ----------\n symbols : list\n List of symbols to be grouped.\n\n Returns\n -------\n sym : Symbol\n A group symbol.\n \"\"\"\n if not symbols or any(not isinstance(sym, Symbol) for sym in symbols):\n raise TypeError('Expected a list of symbols as input')\n handle = SymbolHandle()\n check_call(_LIB.MXSymbolCreateGroup(\n mx_uint(len(symbols)),\n c_handle_array(symbols), ctypes.byref(handle)))\n return Symbol(handle)\n\n\ndef load(fname):\n \"\"\"Loads symbol from a JSON file.\n\n You can also use pickle to do the job if you only work on python.\n The advantage of load/save is the file is language agnostic.\n This means the file saved using save can be loaded by other language binding of mxnet.\n You also get the benefit being able to directly load/save from cloud storage(S3, HDFS).\n\n Parameters\n ----------\n fname : str\n The name of the file, examples:\n\n - `s3://my-bucket/path/my-s3-symbol`\n - `hdfs://my-bucket/path/my-hdfs-symbol`\n - `/path-to/my-local-symbol`\n\n Returns\n -------\n sym : Symbol\n The loaded symbol.\n\n See Also\n --------\n Symbol.save : Used to save symbol into file.\n \"\"\"\n if not isinstance(fname, string_types):\n raise TypeError('fname need to be string')\n handle = SymbolHandle()\n check_call(_LIB.MXSymbolCreateFromFile(c_str(fname), ctypes.byref(handle)))\n return Symbol(handle)\n\n\ndef load_json(json_str):\n \"\"\"Loads symbol from json string.\n\n Parameters\n ----------\n json_str : str\n A JSON string.\n\n Returns\n -------\n sym : Symbol\n The loaded symbol.\n\n See Also\n --------\n Symbol.tojson : Used to save symbol into json string.\n \"\"\"\n if not isinstance(json_str, string_types):\n raise TypeError('fname required to be string')\n handle = SymbolHandle()\n check_call(_LIB.MXSymbolCreateFromJSON(c_str(json_str), ctypes.byref(handle)))\n return Symbol(handle)\n\n\n# pylint: disable=no-member\n# pylint: disable=redefined-builtin\ndef pow(base, exp):\n \"\"\"Returns element-wise result of base element raised to powers from exp element.\n\n Both inputs can be Symbol or scalar number.\n Broadcasting is not supported. Use `broadcast_pow` instead.\n\n Parameters\n ---------\n base : Symbol or scalar\n The base symbol\n exp : Symbol or scalar\n The exponent symbol\n\n Returns\n -------\n Symbol or scalar\n The bases in x raised to the exponents in y.\n\n Examples\n --------\n >>> mx.sym.pow(2, 3)\n 8\n >>> x = mx.sym.Variable('x')\n >>> y = mx.sym.Variable('y')\n >>> z = mx.sym.pow(x, 2)\n >>> z.eval(x=mx.nd.array([1,2]))[0].asnumpy()\n array([ 1., 4.], dtype=float32)\n >>> z = mx.sym.pow(3, y)\n >>> z.eval(y=mx.nd.array([2,3]))[0].asnumpy()\n array([ 9., 27.], dtype=float32)\n >>> z = mx.sym.pow(x, y)\n >>> z.eval(x=mx.nd.array([3,4]), y=mx.nd.array([2,3]))[0].asnumpy()\n array([ 9., 64.], dtype=float32)\n \"\"\"\n if isinstance(base, Symbol) and isinstance(exp, Symbol):\n return _internal._Power(base, exp)\n if isinstance(base, Symbol) and isinstance(exp, Number):\n return _internal._PowerScalar(base, scalar=exp)\n if isinstance(base, Number) and isinstance(exp, Symbol):\n return _internal._RPowerScalar(exp, scalar=base)\n if isinstance(base, Number) and isinstance(exp, Number):\n return base**exp\n else:\n raise TypeError('types (%s, %s) not supported' % (str(type(base)), str(type(exp))))\n\n\n# pylint: disable=no-member\n# pylint: disable=redefined-builtin\ndef maximum(left, right):\n \"\"\"Returns element-wise maximum of the input elements.\n\n Both inputs can be Symbol or scalar number. Broadcasting is not supported.\n\n Parameters\n ---------\n left : Symbol or scalar\n First symbol to be compared.\n right : Symbol or scalar\n Second symbol to be compared.\n\n Returns\n -------\n Symbol or scalar\n The element-wise maximum of the input symbols.\n\n Examples\n --------\n >>> mx.sym.maximum(2, 3.5)\n 3.5\n >>> x = mx.sym.Variable('x')\n >>> y = mx.sym.Variable('y')\n >>> z = mx.sym.maximum(x, 4)\n >>> z.eval(x=mx.nd.array([3,5,2,10]))[0].asnumpy()\n array([ 4., 5., 4., 10.], dtype=float32)\n >>> z = mx.sym.maximum(x, y)\n >>> z.eval(x=mx.nd.array([3,4]), y=mx.nd.array([10,2]))[0].asnumpy()\n array([ 10., 4.], dtype=float32)\n \"\"\"\n if isinstance(left, Symbol) and isinstance(right, Symbol):\n return _internal._Maximum(left, right)\n if isinstance(left, Symbol) and isinstance(right, Number):\n return _internal._MaximumScalar(left, scalar=right)\n if isinstance(left, Number) and isinstance(right, Symbol):\n return _internal._MaximumScalar(right, scalar=left)\n if isinstance(left, Number) and isinstance(right, Number):\n return left if left > right else right\n else:\n raise TypeError('types (%s, %s) not supported' % (str(type(left)), str(type(right))))\n\n\n# pylint: disable=no-member\n# pylint: disable=redefined-builtin\ndef minimum(left, right):\n \"\"\"Returns element-wise minimum of the input elements.\n\n Both inputs can be Symbol or scalar number. Broadcasting is not supported.\n\n Parameters\n ---------\n left : Symbol or scalar\n First symbol to be compared.\n right : Symbol or scalar\n Second symbol to be compared.\n\n Returns\n -------\n Symbol or scalar\n The element-wise minimum of the input symbols.\n\n Examples\n --------\n >>> mx.sym.minimum(2, 3.5)\n 2\n >>> x = mx.sym.Variable('x')\n >>> y = mx.sym.Variable('y')\n >>> z = mx.sym.minimum(x, 4)\n >>> z.eval(x=mx.nd.array([3,5,2,10]))[0].asnumpy()\n array([ 3., 4., 2., 4.], dtype=float32)\n >>> z = mx.sym.minimum(x, y)\n >>> z.eval(x=mx.nd.array([3,4]), y=mx.nd.array([10,2]))[0].asnumpy()\n array([ 3., 2.], dtype=float32)\n \"\"\"\n if isinstance(left, Symbol) and isinstance(right, Symbol):\n return _internal._Minimum(left, right)\n if isinstance(left, Symbol) and isinstance(right, Number):\n return _internal._MinimumScalar(left, scalar=right)\n if isinstance(left, Number) and isinstance(right, Symbol):\n return _internal._MinimumScalar(right, scalar=left)\n if isinstance(left, Number) and isinstance(right, Number):\n return left if left < right else right\n else:\n raise TypeError('types (%s, %s) not supported' % (str(type(left)), str(type(right))))\n\n\n# pylint: disable=no-member\n# pylint: disable=redefined-builtin\ndef hypot(left, right):\n \"\"\"Given the \"legs\" of a right triangle, returns its hypotenuse.\n\n Equivalent to :math:`\\\\sqrt(left^2 + right^2)`, element-wise.\n Both inputs can be Symbol or scalar number. Broadcasting is not supported.\n\n Parameters\n ---------\n left : Symbol or scalar\n First leg of the triangle(s).\n right : Symbol or scalar\n Second leg of the triangle(s).\n\n Returns\n -------\n Symbol or scalar\n The hypotenuse of the triangle(s)\n\n Examples\n --------\n >>> mx.sym.hypot(3, 4)\n 5.0\n >>> x = mx.sym.Variable('x')\n >>> y = mx.sym.Variable('y')\n >>> z = mx.sym.hypot(x, 4)\n >>> z.eval(x=mx.nd.array([3,5,2]))[0].asnumpy()\n array([ 5., 6.40312433, 4.47213602], dtype=float32)\n >>> z = mx.sym.hypot(x, y)\n >>> z.eval(x=mx.nd.array([3,4]), y=mx.nd.array([10,2]))[0].asnumpy()\n array([ 10.44030666, 4.47213602], dtype=float32)\n \"\"\"\n if isinstance(left, Symbol) and isinstance(right, Symbol):\n return _internal._Hypot(left, right)\n if isinstance(left, Symbol) and isinstance(right, Number):\n return _internal._HypotScalar(left, scalar=right)\n if isinstance(left, Number) and isinstance(right, Symbol):\n return _internal._HypotScalar(right, scalar=left)\n if isinstance(left, Number) and isinstance(right, Number):\n return _numpy.hypot(left, right)\n else:\n raise TypeError('types (%s, %s) not supported' % (str(type(left)), str(type(right))))\n\ndef eye(N, M=0, k=0, dtype=None, **kwargs):\n \"\"\"Returns a new symbol of 2-D shpae, filled with ones on the diagonal and zeros elsewhere.\n\n Parameters\n ----------\n N: int\n Number of rows in the output.\n M: int, optional\n Number of columns in the output. If 0, defaults to N.\n k: int, optional\n Index of the diagonal: 0 (the default) refers to the main diagonal,\n a positive value refers to an upper diagonal,\n and a negative value to a lower diagonal.\n dtype : str or numpy.dtype, optional\n The value type of the inner value, default to ``np.float32``.\n\n Returns\n -------\n out : Symbol\n The created Symbol.\n \"\"\"\n if dtype is None:\n dtype = _numpy.float32\n return _internal._eye(N, M, k, dtype=dtype, **kwargs)\n\ndef zeros(shape, dtype=None, **kwargs):\n \"\"\"Returns a new symbol of given shape and type, filled with zeros.\n\n Parameters\n ----------\n shape : int or sequence of ints\n Shape of the new array.\n dtype : str or numpy.dtype, optional\n The value type of the inner value, default to ``np.float32``.\n\n Returns\n -------\n out : Symbol\n The created Symbol.\n \"\"\"\n if dtype is None:\n dtype = _numpy.float32\n return _internal._zeros(shape=shape, dtype=dtype, **kwargs)\n\n\ndef ones(shape, dtype=None, **kwargs):\n \"\"\"Returns a new symbol of given shape and type, filled with ones.\n\n Parameters\n ----------\n shape : int or sequence of ints\n Shape of the new array.\n dtype : str or numpy.dtype, optional\n The value type of the inner value, default to ``np.float32``.\n\n Returns\n -------\n out : Symbol\n The created Symbol\n \"\"\"\n if dtype is None:\n dtype = _numpy.float32\n return _internal._ones(shape=shape, dtype=dtype, **kwargs)\n\n\ndef full(shape, val, dtype=None, **kwargs):\n \"\"\"Returns a new array of given shape and type, filled with the given value `val`.\n\n Parameters\n ----------\n shape : int or sequence of ints\n Shape of the new array.\n val : scalar\n Fill value.\n dtype : str or numpy.dtype, optional\n The value type of the inner value, default to ``np.float32``.\n\n Returns\n -------\n out : Symbol\n The created Symbol\n \"\"\"\n if dtype is None:\n dtype = _numpy.float32\n return _internal._full(shape=shape, dtype=dtype, value=float(val), **kwargs)\n\n# pylint: disable=redefined-outer-name\ndef arange(start, stop=None, step=1.0, repeat=1, name=None, dtype=None):\n \"\"\"Returns evenly spaced values within a given interval.\n\n Parameters\n ----------\n start : number\n Start of interval. The interval includes this value. The default start value is 0.\n stop : number, optional\n End of interval. The interval does not include this value.\n step : number, optional\n Spacing between values.\n repeat : int, optional\n \"The repeating time of all elements.\n E.g repeat=3, the element a will be repeated three times --> a, a, a.\n dtype : str or numpy.dtype, optional\n The value type of the inner value, default to ``np.float32``.\n\n Returns\n -------\n out : Symbol\n The created Symbol\n \"\"\"\n if dtype is None:\n dtype = _numpy.float32\n return _internal._arange(start=start, stop=stop, step=step, repeat=repeat,\n name=name, dtype=dtype)\n\ndef histogram(a, bins=10, range=None, **kwargs):\n \"\"\"Compute the histogram of the input data.\n\n Parameters\n ----------\n a : NDArray\n Input data. The histogram is computed over the flattened array.\n bins : int or sequence of scalars\n If bins is an int, it defines the number of equal-width bins in the\n given range (10, by default). If bins is a sequence, it defines the bin edges,\n including the rightmost edge, allowing for non-uniform bin widths.\n range : (float, float), required if bins is an integer\n The lower and upper range of the bins. If not provided, range is simply (a.min(), a.max()).\n Values outside the range are ignored. The first element of the range must be less than or\n equal to the second. range affects the automatic bin computation as well, the range will\n be equally divided by the number of bins.\n \"\"\"\n if isinstance(bins, Symbol):\n return _internal._histogram(data=a, bins=bins, **kwargs)\n elif isinstance(bins, integer_types):\n if range is None:\n raise ValueError(\"null range is not supported in symbol mode\")\n return _internal._histogram(data=a, bin_cnt=bins, range=range, **kwargs)\n raise ValueError(\"bins argument should be either an integer or an NDArray\")\n\n_set_symbol_class(Symbol)\n" ]
[ [ "numpy.prod", "numpy.hypot", "numpy.dtype" ] ]
RobinLu1209/PyTorch-Template
[ "1a444fa18ac4a4bf96c6a06edfa86021acdc558d" ]
[ "models/unet.py" ]
[ "import torch\nimport torch.nn.functional as F\nfrom torch import nn\nfrom configs.default_config import LIDCConfig as cfg\n\nfrom torch.nn import Conv3d\nclass Conv2_5d(nn.Conv3d):\n def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True):\n kernel_size = (1,kernel_size,kernel_size)\n padding = (0,padding,padding)\n super().__init__(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias)\n\n\nconv = locals()[cfg.GLOBAL_CONV]\n\nclass _EncoderBlock(nn.Module):\n def __init__(self, in_channels, out_channels, dropout=False):\n super(_EncoderBlock, self).__init__()\n layers = [\n conv(in_channels, out_channels, kernel_size=3, padding=1),\n nn.BatchNorm3d(out_channels),\n nn.ReLU(inplace=True),\n conv(out_channels, out_channels, kernel_size=3, padding=1),\n nn.BatchNorm3d(out_channels),\n nn.ReLU(inplace=True),\n ]\n if dropout:\n layers.append(nn.Dropout())\n layers.append(nn.MaxPool3d(kernel_size=2, stride=2))\n self.encode = nn.Sequential(*layers)\n\n def forward(self, x):\n return self.encode(x)\n\n\nclass _DecoderBlock(nn.Module):\n def __init__(self, in_channels, middle_channels, out_channels):\n super(_DecoderBlock, self).__init__()\n self.decode = nn.Sequential(\n conv(in_channels, middle_channels, kernel_size=3, padding=1),\n nn.BatchNorm3d(middle_channels),\n nn.ReLU(inplace=True),\n conv(middle_channels, middle_channels, kernel_size=3, padding=1),\n nn.BatchNorm3d(middle_channels),\n nn.ReLU(inplace=True),\n nn.Upsample(scale_factor=2),\n conv(middle_channels, out_channels, kernel_size=1)\n )\n\n def forward(self, x):\n return self.decode(x)\n\n\nclass UNet(nn.Module):\n def __init__(self, num_classes):\n super(UNet, self).__init__()\n self.enc1 = _EncoderBlock(3, cfg.channels[0])\n self.enc2 = _EncoderBlock(cfg.channels[0], cfg.channels[1])\n self.enc3 = _EncoderBlock(cfg.channels[1], cfg.channels[2])\n self.enc4 = _EncoderBlock(cfg.channels[2], cfg.channels[3])\n self.center = _DecoderBlock(cfg.channels[3], cfg.channels[3], cfg.channels[3])\n self.dec4 = _DecoderBlock(2*cfg.channels[3], cfg.channels[3], cfg.channels[2])\n# self.center = _DecoderBlock(cfg.channels[2], cfg.channels[3], cfg.channels[2])\n self.dec3 = _DecoderBlock(2*cfg.channels[2], cfg.channels[2], cfg.channels[1])\n self.dec2 = _DecoderBlock(2*cfg.channels[1], cfg.channels[1], cfg.channels[0])\n self.dec1 = nn.Sequential(\n conv(2*cfg.channels[0], cfg.channels[0], kernel_size=3, padding=1),\n nn.BatchNorm3d(cfg.channels[0]),\n nn.ReLU(inplace=True),\n conv(cfg.channels[0], cfg.channels[0], kernel_size=3, padding=1),\n nn.BatchNorm3d(cfg.channels[0]),\n nn.ReLU(inplace=True),\n )\n self.final = conv(cfg.channels[0], num_classes, kernel_size=1)\n\n def forward(self, x):\n enc1 = self.enc1(x)\n enc2 = self.enc2(enc1)\n enc3 = self.enc3(enc2)\n enc4 = self.enc4(enc3)\n center = self.center(enc4)\n dec4 = self.dec4(torch.cat([center, F.interpolate(enc4, center.size()[2:], mode='trilinear')], 1))\n dec3 = self.dec3(torch.cat([dec4, F.interpolate(enc3, dec4.size()[2:], mode='trilinear')], 1))\n dec2 = self.dec2(torch.cat([dec3, F.interpolate(enc2, dec3.size()[2:], mode='trilinear')], 1))\n dec1 = self.dec1(torch.cat([dec2, F.interpolate(enc1, dec2.size()[2:], mode='trilinear')], 1))\n final = self.final(dec1)\n return F.interpolate(final, x.size()[2:], mode='trilinear')\n " ]
[ [ "torch.nn.Dropout", "torch.nn.Sequential", "torch.nn.MaxPool3d", "torch.nn.Upsample", "torch.nn.ReLU", "torch.nn.BatchNorm3d" ] ]
orcasound/orca-autoencoder
[ "8e66c9b45c74d642db0d8c93add75586cf5b6871" ]
[ "CNN/ferlitschAutoenc.py" ]
[ "# Copyright 2019 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# AutoEncoder - Pooling with Dense Layers/Hidden Units\n\nimport tensorflow as tf\nfrom tensorflow.keras import Model, Input\nfrom tensorflow.keras.layers import Dense\nfrom tensorflow.keras.layers import Conv2D, Conv2DTranspose\nfrom tensorflow.keras.layers import ReLU, BatchNormalization, Reshape, Flatten\n\n\ndef encoderDense(x, layers):\n ''' Construct the Encoder\n x : input to the encoder\n layers: number of nodes per layer\n '''\n\n # Flatten the input image\n x = Flatten()(x)\n\n # Progressive Unit Pooling\n for layer in layers:\n n_nodes = layer['n_nodes']\n x = Dense(n_nodes)(x)\n x = BatchNormalization()(x)\n x = ReLU()(x)\n\n # The Encoding\n return x\n\n\ndef decoderDense(x, layers, input_shape):\n ''' Construct the Decoder\n x : input to the decoder\n layers: nodes per layer\n '''\n\n # Progressive Unit Unpooling\n for _ in range(len(layers) - 1, 0, -1):\n n_nodes = layers[_]['n_nodes']\n x = Dense(n_nodes)(x)\n x = BatchNormalization()(x)\n x = ReLU()(x)\n\n # Last unpooling and match shape to input\n units = input_shape[0] * input_shape[1] * input_shape[2]\n x = Dense(units, activation='sigmoid')(x)\n\n # Reshape back into an image\n outputs = Reshape(input_shape)(x)\n\n # The decoded image\n return outputs\n\n\ndef encoderCNN(inputs, layers):\n \"\"\" Construct the Encoder\n inputs : the input vector\n layers : number of filters per layer\n \"\"\"\n x = inputs\n\n # Feature pooling by 1/2H x 1/2W\n for n_filters in layers:\n x = Conv2D(n_filters, (3, 3), strides=(2, 2), padding='same', use_bias=False, kernel_initializer='he_normal')(x)\n x = BatchNormalization()(x)\n x = ReLU()(x)\n\n return x\n\n\ndef decoderCNN(x, layers):\n \"\"\" Construct the Decoder\n x : input to decoder\n layers : the number of filters per layer (in encoder)\n \"\"\"\n\n # Feature unpooling by 2H x 2W\n for _ in range(len(layers) - 1, 0, -1):\n n_filters = layers[_]\n x = Conv2DTranspose(n_filters, (3, 3), strides=(2, 2), padding='same', use_bias=False,\n kernel_initializer='he_normal')(x)\n x = BatchNormalization()(x)\n x = ReLU()(x)\n\n # Last unpooling, restore number of channels\n x = Conv2DTranspose(1, (3, 3), strides=(2, 2), padding='same', use_bias=False, kernel_initializer='he_normal')(x)\n x = BatchNormalization()(x)\n x = ReLU()(x)\n\n return x\n\ndef exampleDense():\n ''' Example for constructing/training an AutoEncoder model on MNIST\n '''\n # Example of constructing an AutoEncoder\n # metaparameter: number of filters per layer\n layers = [{'n_nodes': 256}, {'n_nodes': 128}, {'n_nodes': 64}]\n\n inputs = Input((28, 28, 1))\n _encoder = encoderDense(inputs, layers)\n outputs = decoderDense(_encoder, layers, (28, 28, 1))\n ae = Model(inputs, outputs)\n\n ae.summary()\n\n from tensorflow.keras.datasets import mnist\n import numpy as np\n (x_train, y_train), (x_test, y_test) = mnist.load_data()\n x_train = (x_train / 255.0).astype(np.float32)\n x_test = (x_test / 255.0).astype(np.float32)\n x_train = np.expand_dims(x_train, axis=-1)\n x_test = np.expand_dims(x_test, axis=-1)\n\n ae.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n ae.fit(x_train, x_train, epochs=10, batch_size=32, validation_split=0.1, verbose=1)\n ae.evaluate(x_test, x_test)\n\ndef exampleCNN():\n # metaparameter: number of filters per layer in encoder\n layers = [64, 32, 32]\n # The input tensor\n inputs = Input(shape=(28, 28, 1))\n # The encoder\n x = encoderCNN(inputs, layers)\n # The decoder\n outputs = decoderCNN(x, layers)\n # Instantiate the Model\n ae = Model(inputs, outputs)\n from tensorflow.keras.datasets import mnist\n import numpy as np\n (x_train, y_train), (x_test, y_test) = mnist.load_data()\n x_train = (x_train / 255.0).astype(np.float32)\n x_test = (x_test / 255.0).astype(np.float32)\n x_train = np.expand_dims(x_train, axis=-1)\n x_test = np.expand_dims(x_test, axis=-1)\n\n ae.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n ae.fit(x_train, x_train, epochs=10, batch_size=32, validation_split=0.1, verbose=1)\n ae.evaluate(x_test, x_test)\n\n\n#exampleDense()\n\nexampleCNN()\n" ]
[ [ "tensorflow.keras.datasets.mnist.load_data", "tensorflow.keras.layers.Flatten", "tensorflow.keras.layers.Reshape", "tensorflow.keras.layers.Dense", "tensorflow.keras.layers.Conv2D", "tensorflow.keras.Model", "tensorflow.keras.layers.Conv2DTranspose", "tensorflow.keras.Input", "tensorflow.keras.layers.BatchNormalization", "tensorflow.keras.layers.ReLU", "numpy.expand_dims" ] ]
felix-wt/DynamicHandPose
[ "2fef3ab17627f5bd517f7baa6d555a7bab0b657e" ]
[ "main.py" ]
[ "import os\nimport sys\nimport json\nimport numpy as np\nimport torch\nfrom torch import nn\nfrom torch import optim\nfrom torch.optim import lr_scheduler\n\nfrom opts import parse_opts_offline\nfrom model import generate_model\nfrom mean import get_mean, get_std\nfrom spatial_transforms import *\nfrom temporal_transforms import *\nfrom target_transforms import ClassLabel, VideoID\nfrom target_transforms import Compose as TargetCompose\nfrom dataset import get_training_set, get_validation_set, get_test_set\nfrom utils import Logger\nfrom train import train_epoch\nfrom validation import val_epoch\nimport test\nimport pdb\nimport shutil\n\ndef save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):\n torch.save(state, '%s/%s_checkpoint.pth' % (opt.result_path, opt.store_name))\n if is_best:\n shutil.copyfile('%s/%s_checkpoint.pth' % (opt.result_path, opt.store_name),'%s/%s_best.pth' % (opt.result_path, opt.store_name))\n\ndef adjust_learning_rate(optimizer, epoch, lr_steps):\n \"\"\"Sets the learning rate to the initial LR decayed by 10 every 30 epochs\n 论文中说明 lr 会随epoch发升变化\"\"\"\n lr_new = opt.learning_rate * (0.1 ** (sum(epoch >= np.array(lr_steps))))\n for param_group in optimizer.param_groups: #\n param_group['lr'] = lr_new\n\nbest_prec1 = 0\n\nif __name__ == '__main__':\n opt = parse_opts_offline()\n if opt.root_path != '':\n # Join some given paths with root path \n if opt.result_path:\n opt.result_path = os.path.join(opt.root_path, opt.result_path)\n if opt.annotation_path:\n opt.annotation_path = os.path.join(opt.root_path, opt.annotation_path)\n if opt.resume_path:\n opt.resume_path = os.path.join(opt.root_path, opt.resume_path)\n if opt.pretrain_path:\n opt.pretrain_path = os.path.join(opt.root_path, opt.pretrain_path)\n if opt.video_path:\n opt.video_path = os.path.join(opt.root_path, opt.video_path)\n \n opt.scales = [opt.initial_scale]\n for i in range(1, opt.n_scales):\n opt.scales.append(opt.scales[-1] * opt.scale_step)\n opt.arch = '{}-{}'.format(opt.model, opt.model_depth)\n opt.mean = get_mean(opt.norm_value)\n opt.std = get_std(opt.norm_value)\n print(opt)\n with open(os.path.join(opt.result_path, 'opts.json'), 'w') as opt_file:\n json.dump(vars(opt), opt_file)\n\n torch.manual_seed(opt.manual_seed)\n\n model, parameters = generate_model(opt)\n print(model)\n\n pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n print(\"Total number of trainable parameters: \", pytorch_total_params)\n\n # Define Class weights\n if opt.weighted:\n print(\"Weighted Loss is created\")\n if opt.n_finetune_classes == 2:\n weight = torch.tensor([1.0, 3.0])\n else:\n weight = torch.ones(opt.n_finetune_classes)\n else:\n weight = None\n\n\n criterion = nn.CrossEntropyLoss()\n if not opt.no_cuda:\n criterion = criterion.cuda()\n\n if opt.no_mean_norm and not opt.std_norm:\n norm_method = Normalize([0, 0, 0], [1, 1, 1])\n elif not opt.std_norm:\n norm_method = Normalize(opt.mean, [1, 1, 1])\n else:\n norm_method = Normalize(opt.mean, opt.std)\n\n if not opt.no_train:\n assert opt.train_crop in ['random', 'corner', 'center']\n if opt.train_crop == 'random':\n crop_method = MultiScaleRandomCrop(opt.scales, opt.sample_size)\n elif opt.train_crop == 'corner':\n crop_method = MultiScaleCornerCrop(opt.scales, opt.sample_size)\n elif opt.train_crop == 'center':\n crop_method = MultiScaleCornerCrop(\n opt.scales, opt.sample_size, crop_positions=['c'])\n spatial_transform = Compose([\n crop_method,\n SpatialElasticDisplacement(),\n ToTensor(opt.norm_value), norm_method\n ])\n temporal_transform = Compose([\n TemporalRandomCrop(opt.sample_duration)\n ])\n target_transform = ClassLabel()\n training_data = get_training_set(opt, spatial_transform,\n temporal_transform, target_transform)\n \n train_loader = torch.utils.data.DataLoader(\n training_data,\n batch_size=opt.batch_size,\n shuffle=True,\n num_workers=opt.n_threads,\n pin_memory=True)\n train_logger = Logger(\n os.path.join(opt.result_path, 'train.log'),\n ['epoch', 'loss', 'acc', 'precision','recall','lr'])\n train_batch_logger = Logger(\n os.path.join(opt.result_path, 'train_batch.log'),\n ['epoch', 'batch', 'iter', 'loss', 'acc', 'precision', 'recall', 'lr'])\n\n if opt.nesterov:\n dampening = 0\n else:\n dampening = opt.dampening\n optimizer = optim.SGD(\n parameters,\n lr=opt.learning_rate,\n momentum=opt.momentum,\n dampening=dampening,\n weight_decay=opt.weight_decay,\n nesterov=opt.nesterov)\n # scheduler = lr_scheduler.ReduceLROnPlateau(\n # optimizer, 'min', patience=opt.lr_patience)\n if not opt.no_val:\n spatial_transform = Compose([\n Scale(opt.sample_size),\n CenterCrop(opt.sample_size),\n ToTensor(opt.norm_value), norm_method\n ])\n temporal_transform = Compose([\n TemporalCenterCrop(opt.sample_duration)\n ])\n target_transform = ClassLabel()\n validation_data = get_validation_set(\n opt, spatial_transform, temporal_transform, target_transform)\n val_loader = torch.utils.data.DataLoader(\n validation_data,\n batch_size=opt.batch_size,\n shuffle=False,\n num_workers=opt.n_threads,\n pin_memory=True)\n val_logger = Logger(\n os.path.join(opt.result_path, 'val.log'), \n ['epoch', 'loss', 'acc','precision', 'recall'])\n\n if opt.resume_path:\n print('loading checkpoint {}'.format(opt.resume_path))\n checkpoint = torch.load(opt.resume_path)\n assert opt.arch == checkpoint['arch']\n\n opt.begin_epoch = checkpoint['epoch']\n model.load_state_dict(checkpoint['state_dict'])\n if not opt.no_train:\n optimizer.load_state_dict(checkpoint['optimizer'])\n\n print('run')\n for i in range(opt.begin_epoch, opt.n_epochs + 1):\n if not opt.no_train:\n adjust_learning_rate(optimizer, i, opt.lr_steps)\n train_epoch(i, train_loader, model, criterion, optimizer, opt,\n train_logger, train_batch_logger)\n if not opt.no_val:\n validation_loss, prec1 = val_epoch(i, val_loader, model, criterion, opt,\n val_logger)\n is_best = prec1 > best_prec1\n best_prec1 = max(prec1, best_prec1)\n state = {\n 'epoch': i,\n 'arch': opt.arch,\n 'state_dict': model.state_dict(),\n 'optimizer': optimizer.state_dict(),\n 'best_prec1': best_prec1\n }\n save_checkpoint(state, is_best)\n\n # if not opt.no_train and not opt.no_val:\n # scheduler.step(validation_loss)\n\n if opt.test:\n spatial_transform = Compose([\n Scale(opt.sample_size),\n CenterCrop(opt.sample_size),\n ToTensor(opt.norm_value), norm_method\n ])\n temporal_transform = Compose([\n TemporalCenterCrop(opt.sample_duration)\n ])\n target_transform = ClassLabel()\n\n test_data = get_test_set(opt, spatial_transform, temporal_transform,\n target_transform)\n test_loader = torch.utils.data.DataLoader(\n test_data,\n batch_size=opt.batch_size,\n shuffle=False,\n num_workers=opt.n_threads,\n pin_memory=True)\n test.test(test_loader, model, opt, test_data.class_names)\n" ]
[ [ "numpy.array", "torch.save", "torch.optim.SGD", "torch.ones", "torch.manual_seed", "torch.tensor", "torch.utils.data.DataLoader", "torch.load", "torch.nn.CrossEntropyLoss" ] ]
nicolas-racchi/digit_recognizer
[ "e9bf7bcb587ca110128ac8106a178ad88a7966c4" ]
[ "flask_3/model/load.py" ]
[ "import tensorflow as tf \nfrom scipy.misc import imread, imresize, imshow\nfrom keras.models import model_from_json\nimport keras.models\nimport numpy as np \nimport re \nimport os\nimport base64\n\n\n\t# CARICAMENTO MODELLO\ndef init():\n\tjson_file = open('model/model.json', 'r')\n\tloaded_model_json = json_file.read()\n\tjson_file.close()\n\tloaded_model = model_from_json(loaded_model_json)\n\tloaded_model.load_weights(\"model/model.h5\")\n\tprint(\"Loaded model from disk\")\n\n\tloaded_model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])\n\tgraph = tf.get_default_graph()\n\n\treturn loaded_model, graph\n\n\n\t# PREPARAZIONE MODELLO\ndef convertImage(imgData1):\n\timgstring = re.search(b'base64,(.*)',imgData1).group(1)\n\twith open('output.png', 'wb') as output:\n\t\toutput.write(base64.b64decode(imgstring))\n\t\tprint(\"Binary conversion completed\")\n\n\ndef preprocess():\n\tx = imread('output.png', mode='L')\n\tx = np.invert(x)\n\tx = imresize(x, (28, 28))\n\tx = x.reshape(1, 28, 28, 1)\n\tprint(\"Preprocessing completed\")\n\treturn x" ]
[ [ "scipy.misc.imread", "numpy.invert", "scipy.misc.imresize", "tensorflow.get_default_graph" ] ]
data301-2020-winter2/course-project-group_1024
[ "e93f12b30010bb1d448917761eed7ccedfcca506" ]
[ "analysis/scripts/.ipynb_checkpoints/project_functions-checkpoint.py" ]
[ "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport seaborn as sns\n\ndef load_and_process(path):\n\n # Method Chain 1 (Load data and deal with missing data)\n df1 = (\n pd.read_csv(path)\n .loc[lambda x: ~x['Marital_Status'].str.contains(\"Unknown\", na=False)]\n .loc[lambda x: ~x['Income_Category'].str.contains(\"Unknown\", na=False)]\n .loc[lambda x: ~x['Education_Level'].str.contains(\"Unknown\", na=False)]\n .reset_index()\n .drop('index', axis = 1)\n )\n\n\n # Method Chain 2 (Create new columns, drop others, and do processing)\n df2 = (df1\n .rename(columns = {'Card_Category':'Card Type','Customer_Age':'Age','Income_Category':'Income','Credit_Limit':'Credit Limit','Education_Level':'Education','Months_Inactive_12_mon':'Months Inactive','Total_Relationship_Count':'Relationship Count'})\n .drop(columns=['Naive_Bayes_Classifier_Attrition_Flag_Card_Category_Contacts_Count_12_mon_Dependent_count_Education_Level_Months_Inactive_12_mon_1','Naive_Bayes_Classifier_Attrition_Flag_Card_Category_Contacts_Count_12_mon_Dependent_count_Education_Level_Months_Inactive_12_mon_2','Total_Ct_Chng_Q4_Q1','Total_Amt_Chng_Q4_Q1'])\n .assign(Utilization_Rate=lambda x: x['Avg_Utilization_Ratio']*100) \n )\n \n return df2\n" ]
[ [ "pandas.read_csv" ] ]
AustralianConservationFoundation/gnaf-loader
[ "3ff645fbdcbc191106bf0242bfadbc87bc4acc15" ]
[ "testing/weather/xx_process_weather_rain.py" ]
[ "# script gets all Australian BoM weather station observations\r\n# ... and applies an interpolated temperature to all GNAF points in a 100m grid\r\n\r\n# TODO:\r\n# 1. remove temperature biases due to altitude differences\r\n# a. Add SRTM altitudes to GNAF\r\n# b. Add interpolated altitude from weather stations to GNAF\r\n# c. adjust where the difference is > 100m\r\n# 2. generate temps outside the weather station network to catch the ~3,100 GNAF points outside the interpolated area\r\n#\r\n\r\nimport geopandas\r\nimport io\r\nimport json\r\nimport logging\r\nimport matplotlib.pyplot as plt\r\nimport multiprocessing\r\nimport numpy\r\nimport os\r\nimport pandas\r\nimport psycopg2\r\nimport requests\r\nimport scipy.interpolate\r\nimport sqlalchemy\r\nimport struct\r\nimport urllib.request\r\nimport zipfile\r\n\r\nfrom bs4 import BeautifulSoup\r\nfrom datetime import datetime\r\nfrom osgeo import gdal\r\n\r\n# where to save the files\r\noutput_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"data\")\r\n\r\n# states to include (note: no \"ACT\" or \"OT\" state, Antarctica is part of TAS in BoM observations)\r\nstates = [{\"name\": \"NSW\", \"product\": \"IDN60801\"},\r\n {\"name\": \"NT\", \"product\": \"IDD60801\"},\r\n {\"name\": \"QLD\", \"product\": \"IDQ60801\"},\r\n {\"name\": \"SA\", \"product\": \"IDS60801\"},\r\n {\"name\": \"TAS\", \"product\": \"IDT60801\"},\r\n {\"name\": \"VIC\", \"product\": \"IDV60801\"},\r\n {\"name\": \"WA\", \"product\": \"IDW60801\"},\r\n {\"name\": \"ANT\", \"product\": \"IDT60801\"}]\r\n\r\n# urls for each state's weather observations\r\nbase_url = \"http://www.bom.gov.au/{0}/observations/{0}all.shtml\"\r\n\r\n# postgres connect strings\r\npg_connect_string = \"dbname='geo' host='localhost' port='5432' user='postgres' password='password'\"\r\nsql_alchemy_engine_string = \"postgresql+psycopg2://postgres:password@localhost/geo\"\r\n\r\n\r\ndef main():\r\n start_time = datetime.now()\r\n\r\n # connect to Postgres\r\n try:\r\n pg_conn = psycopg2.connect(pg_connect_string)\r\n pg_conn.autocommit = True\r\n pg_cur = pg_conn.cursor()\r\n except psycopg2.Error:\r\n logger.fatal(\"Unable to connect to database\\nACTION: Check your Postgres parameters and/or database security\")\r\n return False\r\n\r\n # download weather stations\r\n station_list = get_weather_stations()\r\n logger.info(\"Downloaded {:,} weather stations : {}\".format(len(station_list), datetime.now() - start_time))\r\n\r\n obs_list = get_weather_observations(station_list)\r\n logger.info(\"Downloaded {:,} latest observations : {}\".format(len(obs_list), datetime.now() - start_time))\r\n start_time = datetime.now()\r\n\r\n # create dataframe of weather stations\r\n station_df = pandas.DataFrame(station_list)\r\n\r\n # create dataframe of weather obs\r\n obs_df = pandas.DataFrame(obs_list).drop_duplicates()\r\n\r\n # merge data and add points to dataframe\r\n df = (obs_df.merge(station_df, on=\"wmo\")\r\n .drop([\"lat\", \"lon\"], axis=1)\r\n )\r\n # gdf = geopandas.GeoDataFrame(df, geometry=geopandas.points_from_xy(df.longitude, df.latitude), crs=\"EPSG:4283\")\r\n\r\n # replace all missing values (\"-\") with NaN and change type of field of interest from string\r\n df2 = df.replace('-', numpy.nan).astype({'rain_trace': 'float64'})\r\n\r\n # select rows from the last hour with valid data\r\n rain_trace_df = df2[(df2[\"utc_time_diff\"] < 3600.0) & (df2[\"rain_trace\"].notna())\r\n & (df2[\"longitude\"] > 112.0) & (df2[\"longitude\"] < 162.0)\r\n & (df2[\"latitude\"] > -45.0) & (df2[\"latitude\"] < -8.0)]\r\n\r\n # # testing - get histogram of observation time\r\n # rain_trace_df.hist(\"utc_time\")\r\n # plt.savefig(os.path.join(output_path, \"histogram.png\"), dpi=300, facecolor=\"w\", pad_inches=0.0, metadata=None)\r\n\r\n # export dataframe to PostGIS\r\n export_dataframe(pg_cur, rain_trace_df, \"testing\", \"weather_stations\", \"replace\")\r\n logger.info(\"Exported weather station dataframe to PostGIS: {}\".format(datetime.now() - start_time))\r\n start_time = datetime.now()\r\n\r\n # # save to disk for debugging\r\n # rain_trace_df.to_feather(os.path.join(output_path \"temp_df.ipc\"))\r\n\r\n # # load from disk if debugging\r\n # temp_df = pandas.read_feather(os.path.join(output_path \"temp_df.ipc\"))\r\n\r\n # extract lat, long and air temp as arrays\r\n x = rain_trace_df[\"longitude\"].to_numpy()\r\n y = rain_trace_df[\"latitude\"].to_numpy()\r\n z = rain_trace_df[\"rain_trace\"].to_numpy()\r\n h = rain_trace_df[\"altitude\"].to_numpy()\r\n\r\n logger.info(\"Filtered observations dataframe with weather station coordinates : {} rows : {}\"\r\n .format(len(rain_trace_df.index), datetime.now() - start_time))\r\n start_time = datetime.now()\r\n\r\n # # open SRTM 3 second DEM of Australia (ESRI Binary Grid format)\r\n # dem_file_name = \"/Users/hugh.saalmans/Downloads/3secSRTM_DEM/DEM_ESRI_GRID_16bit_Integer/dem3s_int/hdr.adf\"\r\n # dem_dataset = gdal.Open(dem_file_name, gdal.GA_ReadOnly)\r\n # dem_geotransform = dem_dataset.GetGeoTransform()\r\n #\r\n # # get DEM origin point and pixel size to create numpy arrays from\r\n # dem_num_x, dem_num_y = dem_dataset.RasterXSize, dem_dataset.RasterYSize\r\n # dem_origin_x, dem_origin_y = dem_geotransform[0], dem_geotransform[3]\r\n # dem_origin_delta_x, dem_origin_delta_y = dem_geotransform[1], dem_geotransform[5]\r\n\r\n # select GNAF coordinates - group by 3 decimal places to create a ~100m grid of addresses\r\n # sql = \"\"\"SELECT latitude::numeric(5,3) as latitude, longitude::numeric(6,3) as longitude, count(*) as address_count\r\n # FROM gnaf_202105.address_principals\r\n # GROUP BY latitude::numeric(5,3), longitude::numeric(6,3)\"\"\"\r\n # sql = \"\"\"SELECT * FROM testing.gnaf_points_with_pop_and_height\"\"\"\r\n # gnaf_df = pandas.read_sql_query(sql, pg_conn)\r\n #\r\n # # save to feather file for future use (GNAF only changes once every 3 months)\r\n # gnaf_df.to_feather(os.path.join(output_path, \"gnaf.ipc\"))\r\n\r\n # load from feather file\r\n gnaf_df = pandas.read_feather(os.path.join(output_path, \"gnaf.ipc\"))\r\n\r\n gnaf_x = gnaf_df[\"longitude\"].to_numpy()\r\n gnaf_y = gnaf_df[\"latitude\"].to_numpy()\r\n gnaf_counts = gnaf_df[\"count\"].to_numpy()\r\n # gnaf_dem_elevation = gnaf_df[\"elevation\"].to_numpy()\r\n\r\n logger.info(\"Loaded {:,} GNAF points : {}\".format(len(gnaf_df.index), datetime.now() - start_time))\r\n start_time = datetime.now()\r\n\r\n # # interpolate temperatures for GNAF coordinates\r\n gnaf_points = numpy.array((gnaf_x.flatten(), gnaf_y.flatten())).T\r\n gnaf_temps = scipy.interpolate.griddata((x, y), z, gnaf_points, method=\"linear\")\r\n gnaf_weather_elevation = scipy.interpolate.griddata((x, y), h, gnaf_points, method=\"linear\")\r\n\r\n # create results dataframe\r\n temperature_df = pandas.DataFrame({\"latitude\": gnaf_y, \"longitude\": gnaf_x,\r\n \"count\": gnaf_counts, \"rain_trace\": gnaf_temps})\r\n # temperature_df = pandas.DataFrame({\"latitude\": gnaf_y, \"longitude\": gnaf_x,\r\n # \"count\": gnaf_counts, \"dem_elevation\": gnaf_dem_elevation,\r\n # \"weather_elevation\": gnaf_weather_elevation, \"rain_trace\": gnaf_temps})\r\n\r\n # # add temperatures adjusted for altitude differences between GNAF point and nearby weather stations\r\n # temperature_df[\"adjusted_temp\"] = temperature_df[\"rain_trace\"] + \\\r\n # (temperature_df[\"weather_elevation\"] - temperature_df[\"dem_elevation\"]) / 150.0\r\n\r\n # print(temperature_df)\r\n\r\n # get count of rows with a temperature\r\n row_count = len(temperature_df[temperature_df[\"rain_trace\"].notna()].index)\r\n\r\n logger.info(\"Got {:,} interpolated temperatures and elevations for GNAF points : {}\"\r\n .format(row_count, datetime.now() - start_time))\r\n start_time = datetime.now()\r\n\r\n # # plot a map of gnaf points by temperature\r\n # temperature_df.plot.scatter(\"longitude\", \"latitude\", c=\"rain_trace\", colormap=\"jet\")\r\n # plt.axis(\"off\")\r\n # plt.savefig(os.path.join(output_path, \"interpolated.png\"), dpi=300, facecolor=\"w\", pad_inches=0.0, metadata=None)\r\n #\r\n # logger.info(\"Plotted points to PNG file : {}\".format(datetime.now() - start_time))\r\n # start_time = datetime.now()\r\n\r\n # export dataframe to PostGIS\r\n export_dataframe(pg_cur, temperature_df, \"testing\", \"gnaf_temperature\", \"replace\")\r\n logger.info(\"Exported GNAF temperature dataframe to PostGIS: {}\".format(datetime.now() - start_time))\r\n # start_time = datetime.now()\r\n\r\n return True\r\n\r\n\r\ndef export_dataframe(pg_cur, df, schema_name, table_name, export_mode):\r\n # create geodataframe\r\n gdf = geopandas.GeoDataFrame(df, geometry=geopandas.points_from_xy(df.longitude, df.latitude), crs=\"EPSG:4283\")\r\n\r\n # export to GeoPackage\r\n # gdf.to_file(os.path.join(output_path, \"{}.gpkg\".format(table_name)), driver=\"GPKG\")\r\n #\r\n # logger.info(\"Exported points to GeoPackage : {}\".format(datetime.now() - start_time))\r\n # start_time = datetime.now()\r\n\r\n # export to PostGIS\r\n engine = sqlalchemy.create_engine(sql_alchemy_engine_string)\r\n gdf.to_postgis(table_name, engine, schema=schema_name, if_exists=export_mode)\r\n\r\n pg_cur.execute(\"ANALYSE {}.{}\".format(schema_name, table_name))\r\n # pg_cur.execute(\"ALTER TABLE testing.weather_stations ADD CONSTRAINT weather_stations_pkey PRIMARY KEY (wmo)\"\r\n # .format(schema_name, table_name))\r\n pg_cur.execute(\"ALTER TABLE {0}.{1} CLUSTER ON idx_{1}_geometry\".format(schema_name, table_name))\r\n pg_cur.execute(\"ALTER TABLE {}.{} RENAME COLUMN geometry TO geom\".format(schema_name, table_name))\r\n\r\n\r\ndef get_weather_observations(station_list):\r\n start_time = datetime.now()\r\n\r\n obs_urls = list()\r\n obs_list = list()\r\n for state in states:\r\n # get URL for web page to scrape\r\n input_url = base_url.format(state[\"name\"].lower())\r\n\r\n # load and parse web page\r\n r = requests.get(input_url)\r\n soup = BeautifulSoup(r.content, features=\"html.parser\")\r\n\r\n # get all links\r\n links = soup.find_all(\"a\", href=True)\r\n\r\n for link in links:\r\n url = link[\"href\"]\r\n\r\n if \"/products/\" in url:\r\n # only include weather station observations in their home state (border weather obs are duplicated)\r\n for station in station_list:\r\n if station[\"state\"] == state[\"name\"] and station[\"wmo\"] == int(url.split(\".\")[1]):\r\n # change URL to get JSON file of weather obs and add to list\r\n obs_url = url.replace(\"/products/\", \"http://www.bom.gov.au/fwo/\").replace(\".shtml\", \".json\")\r\n obs_urls.append(obs_url)\r\n\r\n # with open(os.path.join(output_path, \"weather_observations_urls.txt\"), \"w\", newline=\"\") as output_file:\r\n # output_file.write(\"\\n\".join(obs_urls))\r\n\r\n logger.info(\"\\t - {} : got obs file list : {}\".format(state[\"name\"], datetime.now() - start_time))\r\n start_time = datetime.now()\r\n\r\n # download each obs file using multiprocessing\r\n pool = multiprocessing.Pool(processes=16)\r\n results = pool.imap_unordered(run_multiprocessing, obs_urls)\r\n\r\n pool.close()\r\n pool.join()\r\n\r\n for result in list(results):\r\n if result.get(\"error\") is not None:\r\n logger.warning(\"\\t- Failed to parse {}\".format(result[\"error\"]))\r\n else:\r\n obs_list.append(result)\r\n\r\n return obs_list\r\n\r\n\r\ndef get_weather_stations():\r\n # get weather stations - obs have poor coordinates\r\n response = urllib.request.urlopen(\"ftp://ftp.bom.gov.au/anon2/home/ncc/metadata/sitelists/stations.zip\")\r\n data = io.BytesIO(response.read())\r\n station_file = zipfile.ZipFile(data, \"r\", zipfile.ZIP_DEFLATED).read(\"stations.txt\").decode(\"utf-8\")\r\n stations = station_file.split(\"\\r\\n\")\r\n\r\n station_list = list()\r\n\r\n # split fixed width file and get the fields we want\r\n field_widths = (-8, -6, 41, -8, -7, 9, 10, -15, 4, 11, -9, 7) # negative widths represent ignored fields\r\n format_string = \" \".join(\"{}{}\".format(abs(fw), \"x\" if fw < 0 else \"s\") for fw in field_widths)\r\n field_struct = struct.Struct(format_string)\r\n parser = field_struct.unpack_from\r\n # print(\"fmtstring: {!r}, recsize: {} chars\".format(fmtstring, fieldstruct.size))\r\n\r\n # skip first 5 rows (lazy coding!)\r\n stations.pop(0)\r\n stations.pop(0)\r\n stations.pop(0)\r\n stations.pop(0)\r\n stations.pop(0)\r\n\r\n # add each station to a list of dictionaries\r\n for station in stations:\r\n if len(station) > 128:\r\n fields = parser(bytes(station, \"utf-8\"))\r\n\r\n # convert to list\r\n field_list = list()\r\n\r\n for field in fields:\r\n field_list.append(field.decode(\"utf-8\").lstrip().rstrip())\r\n\r\n if field_list[5] != \"..\":\r\n station_dict = dict()\r\n station_dict[\"name\"] = field_list[0]\r\n station_dict[\"latitude\"] = float(field_list[1])\r\n station_dict[\"longitude\"] = float(field_list[2])\r\n station_dict[\"state\"] = field_list[3]\r\n if field_list[4] != \"..\":\r\n station_dict[\"altitude\"] = float(field_list[4])\r\n station_dict[\"wmo\"] = int(field_list[5])\r\n\r\n station_list.append(station_dict)\r\n\r\n return station_list\r\n\r\n\r\ndef run_multiprocessing(url):\r\n # file_path = os.path.join(output_path, \"obs\", url.split(\"/\")[-1])\r\n\r\n # try:\r\n obs_text = requests.get(url).text\r\n\r\n # with open(file_path, \"w\", newline=\"\") as output_file:\r\n # output_file.write(obs_text)\r\n\r\n obs_json = json.loads(obs_text)\r\n obs_list = obs_json[\"observations\"][\"data\"]\r\n\r\n try:\r\n # default is an error for when there are no observations\r\n result = dict()\r\n result[\"error\"] = \"{} : No observations\".format(url)\r\n\r\n for obs in obs_list:\r\n if obs[\"sort_order\"] == 0:\r\n result = obs\r\n\r\n # add utc time\r\n obs[\"utc_time\"] = datetime.strptime(obs[\"aifstime_utc\"], \"%Y%m%d%H%M%S\")\r\n obs[\"utc_time_diff\"] = (datetime.utcnow() - obs[\"utc_time\"]).total_seconds()\r\n\r\n except Exception as ex:\r\n result = dict()\r\n result[\"error\"] = \"{} : {}\".format(url, ex)\r\n # print(result)\r\n\r\n return result\r\n\r\n\r\nif __name__ == \"__main__\":\r\n full_start_time = datetime.now()\r\n\r\n logger = logging.getLogger()\r\n\r\n # set logger\r\n log_file = os.path.abspath(__file__).replace(\".py\", \".log\")\r\n logging.basicConfig(filename=log_file, level=logging.DEBUG, format=\"%(asctime)s %(message)s\",\r\n datefmt=\"%m/%d/%Y %I:%M:%S %p\")\r\n\r\n # setup logger to write to screen as well as writing to log file\r\n # define a Handler which writes INFO messages or higher to the sys.stderr\r\n console = logging.StreamHandler()\r\n console.setLevel(logging.INFO)\r\n # set a format which is simpler for console use\r\n formatter = logging.Formatter(\"%(name)-12s: %(levelname)-8s %(message)s\")\r\n # tell the handler to use this format\r\n console.setFormatter(formatter)\r\n # add the handler to the root logger\r\n logging.getLogger(\"\").addHandler(console)\r\n\r\n logger.info(\"\")\r\n logger.info(\"Start weather obs download\")\r\n # geoscape.check_python_version(logger)\r\n\r\n if main():\r\n logger.info(\"Finished successfully! : {}\".format(datetime.now() - full_start_time))\r\n else:\r\n logger.fatal(\"Something bad happened!\")\r\n\r\n logger.info(\"\")\r\n logger.info(\"-------------------------------------------------------------------------------\")\r\n" ]
[ [ "pandas.DataFrame" ] ]
syrte/halotools
[ "73ecde657373deb61fc19cb16ec5be7f10df697d", "73ecde657373deb61fc19cb16ec5be7f10df697d" ]
[ "halotools/sim_manager/user_supplied_ptcl_catalog.py", "halotools/mock_observables/pairwise_velocities/velocity_marked_npairs_xy_z.py" ]
[ "\"\"\" Module containing the UserSuppliedPtclCatalog class.\n\"\"\"\nimport numpy as np\nimport os\nfrom warnings import warn\nimport datetime\n\nfrom astropy.table import Table\n\ntry:\n import h5py\nexcept ImportError:\n warn(\"Most of the functionality of the sim_manager \"\n \"sub-package requires h5py to be installed,\\n\"\n \"which can be accomplished either with pip or conda\")\n\nfrom .ptcl_table_cache import PtclTableCache\nfrom .ptcl_table_cache_log_entry import PtclTableCacheLogEntry\nfrom .halo_table_cache_log_entry import get_redshift_string\n\nfrom ..utils.array_utils import custom_len\nfrom ..custom_exceptions import HalotoolsError\n\n__all__ = ('UserSuppliedPtclCatalog', )\n\n\nclass UserSuppliedPtclCatalog(object):\n \"\"\" Class used to transform a user-provided particle catalog\n into the standard form recognized by Halotools.\n\n Random downsamplings of dark matter particles are not especially useful\n catalogs in their own right. So primary purpose of this class\n is the `add_ptclcat_to_cache` method,\n which sets you up to use the dark matter particle collection\n together with the associated halo catalog.\n\n See :ref:`working_with_alternative_particle_data` for a tutorial on this class.\n\n \"\"\"\n\n def __init__(self, **kwargs):\n \"\"\"\n\n Parameters\n ------------\n **metadata : float or string\n Keyword arguments storing catalog metadata.\n The quantities `Lbox` and `particle_mass`\n are required and must be in Mpc/h and Msun/h units, respectively.\n `redshift` is also required metadata.\n See Examples section for further notes.\n\n **ptcl_catalog_columns : sequence of arrays\n Sequence of length-*Nptcls* arrays passed in as keyword arguments.\n\n Each key will be the column name attached to the input array.\n At a minimum, there must be columns ``x``, ``y`` and ``z``.\n See Examples section for further notes.\n\n Examples\n ----------\n Here is an example using dummy data to show how to create a new `UserSuppliedPtclCatalog`\n and store it in cache for future use with the associated halo catalog.\n First the setup:\n\n >>> redshift = 0.0\n >>> Lbox = 250.\n >>> particle_mass = 1e9\n >>> num_ptcls = int(1e4)\n >>> x = np.random.uniform(0, Lbox, num_ptcls)\n >>> y = np.random.uniform(0, Lbox, num_ptcls)\n >>> z = np.random.uniform(0, Lbox, num_ptcls)\n >>> ptcl_ids = np.arange(0, num_ptcls)\n >>> vx = np.random.uniform(-100, 100, num_ptcls)\n >>> vy = np.random.uniform(-100, 100, num_ptcls)\n >>> vz = np.random.uniform(-100, 100, num_ptcls)\n\n Now we simply pass in both the metadata and the particle catalog columns as keyword arguments:\n\n >>> ptcl_catalog = UserSuppliedPtclCatalog(redshift=redshift, Lbox=Lbox, particle_mass=particle_mass, x=x, y=y, z=z, vx=vx, vy=vy, vz=vz, ptcl_ids=ptcl_ids)\n\n Take note: it is important that the value of the input ``redshift`` matches\n whatever the redshift is of the associated halo catalog. Your ``redshift``\n should be accurate to four decimal places.\n\n Now that we have built a Halotools-formatted particle catalog, we can add it to the cache as follows.\n\n First choose a relatively permanent location on disk where you will be storing the particle data:\n\n >>> my_fname = 'some_fname.hdf5'\n\n Next choose the ``simname`` that matches the ``simname`` of the associated halo catalog, for example:\n\n >>> my_simname = 'bolplanck'\n\n Now choose any version name that will help you keep track of\n potentially different version of the same catalog of particles.\n\n >>> my_version_name = 'any version name'\n\n Finally, give a short, plain-language descriptions of how\n you obtained your collection of particles:\n\n >>> my_processing_notes = 'This particle catalog was obtained through the following means: ...'\n\n Now we add the particle catalog to cache using the following syntax:\n\n >>> ptcl_catalog.add_ptclcat_to_cache(my_fname, my_simname, my_version_name, my_processing_notes) # doctest: +SKIP\n\n Your particle catalog has now been cached and is accessible whenever\n you load the associated halo catalog into memory. For example:\n\n >>> from halotools.sim_manager import CachedHaloCatalog\n >>> halocat = CachedHaloCatalog(simname=my_simname, halo_finder='some halo-finder', version_name='some version-name', redshift=redshift, ptcl_version_name=my_version_name) # doctest: +SKIP\n\n Note the arguments passed to the `~halotools.sim_manager.CachedHaloCatalog` class.\n The ``version_name`` here refers to the *halos*, not the particles.\n When loading the `~halotools.sim_manager.CachedHaloCatalog`,\n you specify the version name of the particles\n with the ``ptcl_version_name`` keyword argument.\n The ``ptcl_version_name`` need not agree with the ``version_name`` of the associated halos.\n This allows halo and particle catalogs to evolve independently over time.\n In fact, for cases where you have supplied your own particles, it is *strongly* recommended\n that you choose a version name for your particles that differs from the version name\n that Halotools uses for its catalogs. This will help avoid future confusion over the\n where the cached particle catalog came from.\n\n The particle catalog itself is stored in the ``ptcl_table`` attribute,\n with columns accessed as follows:\n\n >>> array_of_x_positions = halocat.ptcl_table['x'] # doctest: +SKIP\n\n If you do not wish to store your particle catalog in cache,\n see the :ref:`using_user_supplied_ptcl_catalog_without_the_cache` section\n of the :ref:`working_with_alternative_particle_data` tutorial.\n\n \"\"\"\n\n ptcl_table_dict, metadata_dict = self._parse_constructor_kwargs(**kwargs)\n self.ptcl_table = Table(ptcl_table_dict)\n\n self._test_metadata_dict(**metadata_dict)\n\n # make Lbox a 3-vector\n _Lbox = metadata_dict.pop('Lbox')\n metadata_dict['Lbox'] = np.empty(3)\n metadata_dict['Lbox'][:] = _Lbox\n\n for key, value in metadata_dict.items():\n setattr(self, key, value)\n\n def _parse_constructor_kwargs(self, **kwargs):\n \"\"\"\n \"\"\"\n try:\n x = kwargs['x']\n assert type(x) is np.ndarray\n y = kwargs['y']\n assert type(y) is np.ndarray\n z = kwargs['z']\n assert type(z) is np.ndarray\n\n Nptcls = custom_len(x)\n assert Nptcls >= 1e4\n assert Nptcls == len(y)\n assert Nptcls == len(z)\n except KeyError:\n msg = (\"\\nThe UserSuppliedPtclCatalog requires ``x``, ``y`` and \"\n \"``z`` keyword arguments,\\n each of which must store an \"\n \"ndarray of the same length Nptcls >= 1e4.\\n\")\n\n raise HalotoolsError(msg)\n\n ptcl_table_dict = (\n {key: kwargs[key] for key in kwargs\n if (type(kwargs[key]) is np.ndarray) and\n (custom_len(kwargs[key]) == Nptcls)})\n\n metadata_dict = (\n {key: kwargs[key] for key in kwargs if key not in ptcl_table_dict})\n\n return ptcl_table_dict, metadata_dict\n\n def _test_metadata_dict(self, **metadata_dict):\n\n try:\n assert 'Lbox' in metadata_dict\n assert custom_len(metadata_dict['Lbox']) in [1,3]\n except AssertionError:\n msg = (\"\\nThe UserSuppliedPtclCatalog requires keyword argument \"\n \"``Lbox``, storing either a scalar or 3-vector.\\n\")\n raise HalotoolsError(msg)\n\n try:\n assert 'particle_mass' in metadata_dict\n assert custom_len(metadata_dict['particle_mass']) == 1\n assert 'redshift' in metadata_dict\n except AssertionError:\n msg = (\"\\nThe UserSuppliedPtclCatalog requires keyword arguments \"\n \"``particle_mass`` and ``redshift``\\n\"\n \"storing scalars that will be interpreted as metadata \"\n \"about the particle catalog.\\n\")\n raise HalotoolsError(msg)\n\n Lbox = np.empty(3)\n Lbox[:] = metadata_dict['Lbox']\n assert (Lbox > 0).all(), \"``Lbox`` must be positive\"\n\n try:\n x, y, z = (\n self.ptcl_table['x'],\n self.ptcl_table['x'],\n self.ptcl_table['z'])\n\n assert np.all(x >= 0)\n assert np.all(x <= Lbox[0])\n assert np.all(y >= 0)\n assert np.all(y <= Lbox[1])\n assert np.all(z >= 0)\n assert np.all(z <= Lbox[2])\n except AssertionError:\n msg = (\"The ``x``, ``y`` and ``z`` columns must only store \"\n \"arrays\\n that are bound by 0 and the input ``Lbox``. \\n\")\n raise HalotoolsError(msg)\n\n try:\n redshift = float(metadata_dict['redshift'])\n except:\n msg = (\"\\nThe ``redshift`` metadata must be a float.\\n\")\n raise HalotoolsError(msg)\n\n def add_ptclcat_to_cache(self, fname, simname, version_name,\n processing_notes, overwrite=False):\n\n \"\"\"\n Parameters\n ------------\n fname : string\n Absolute path of the file to be stored in cache.\n Must conclude with an `.hdf5` extension.\n\n simname : string\n Nickname of the simulation used as a shorthand way to keep track\n of the catalogs in your cache.\n\n version_name : string\n Nickname of the version of the particle catalog.\n The ``version_name`` is used as a bookkeeping tool in the cache log.\n As described in the `~halotools.sim_manager.UserSuppliedPtclCatalog` docstring,\n the version name selected here need not match the version name\n of the associated halo catalog.\n\n processing_notes : string\n String used to provide supplementary notes that will be attached to\n the hdf5 file storing your particle data.\n\n overwrite : bool, optional\n If the chosen ``fname`` already exists, then you must set ``overwrite``\n to True in order to write the file to disk. Default is False.\n\n \"\"\"\n\n ############################################################\n # Perform some consistency checks in the fname\n if (os.path.isfile(fname)) & (overwrite is False):\n msg = (\"\\nYou attempted to store your particle catalog \"\n \"in the following location: \\n\\n\" + str(fname) +\n \"\\n\\nThis path points to an existing file. \\n\"\n \"Either choose a different fname or set ``overwrite`` to True.\\n\")\n raise HalotoolsError(msg)\n\n try:\n dirname = os.path.dirname(fname)\n assert os.path.exists(dirname)\n except:\n msg = (\"\\nThe directory you are trying to store the file does not exist. \\n\")\n raise HalotoolsError(msg)\n\n if fname[-5:] != '.hdf5':\n msg = (\"\\nThe fname must end with an ``.hdf5`` extension.\\n\")\n raise HalotoolsError(msg)\n\n ############################################################\n # Perform consistency checks on the remaining log entry attributes\n try:\n _ = str(simname)\n _ = str(version_name)\n _ = str(processing_notes)\n except:\n msg = (\"\\nThe input ``simname``, ``version_name`` \"\n \"and ``processing_notes``\\nmust all be strings.\")\n raise HalotoolsError(msg)\n\n ############################################################\n # Now write the file to disk and add the appropriate metadata\n\n self.ptcl_table.write(fname, path='data', overwrite=overwrite)\n\n f = h5py.File(fname)\n\n redshift_string = get_redshift_string(self.redshift)\n\n f.attrs.create('simname', np.string_(simname))\n f.attrs.create('version_name', np.string_(version_name))\n f.attrs.create('redshift', np.string_(redshift_string))\n f.attrs.create('fname', np.string_(fname))\n\n f.attrs.create('Lbox', self.Lbox)\n f.attrs.create('particle_mass', self.particle_mass)\n\n time_right_now = datetime.datetime.now().strftime(\n '%Y-%m-%d %H:%M:%S')\n f.attrs.create('time_catalog_was_originally_cached', np.string_(time_right_now))\n\n f.attrs.create('processing_notes', np.string_(processing_notes))\n\n f.close()\n\n ############################################################\n # Now that the file is on disk, add it to the cache\n cache = PtclTableCache()\n\n log_entry = PtclTableCacheLogEntry(\n simname=simname, version_name=version_name,\n redshift=self.redshift, fname=fname)\n\n cache.add_entry_to_cache_log(log_entry, update_ascii=True)\n self.log_entry = log_entry\n", "from __future__ import (absolute_import, division, print_function, unicode_literals)\nimport numpy as np\nimport multiprocessing\nfrom functools import partial\n\nfrom ..pair_counters.npairs_xy_z import _npairs_xy_z_process_args\nfrom ..pair_counters.mesh_helpers import _set_approximate_cell_sizes, _cell1_parallelization_indices\nfrom ..pair_counters.rectangular_mesh import RectangularDoubleMesh\nfrom .velocity_marked_npairs_3d import (\n _func_signature_int_from_vel_weight_func_id, _velocity_marked_npairs_3d_process_weights)\nfrom .engines import velocity_marked_npairs_xy_z_engine\n\n__author__ = ('Duncan Campbell', 'Andrew Hearin')\n\n\n__all__ = ('velocity_marked_npairs_xy_z', )\n\n\ndef velocity_marked_npairs_xy_z(sample1, sample2, rp_bins, pi_bins, period=None,\n weights1=None, weights2=None,\n weight_func_id=1, verbose=False, num_threads=1,\n approx_cell1_size=None, approx_cell2_size=None):\n r\"\"\"\n Calculate the number of velocity weighted pairs\n with separations greater than or equal to\n :math:`r_{\\perp}` and :math:`r_{\\parallel}`, :math:`W(>r_{\\perp},>r_{\\parallel})`.\n\n :math:`r_{\\perp}` and :math:`r_{\\parallel}` are defined wrt the z-direction.\n\n The weight given to each pair is determined by the weights for a pair,\n :math:`w_1`, :math:`w_2`, and a user-specified \"velocity weighting function\", indicated\n by the ``weight_func_id`` parameter, :math:`f(w_1,w_2)`.\n\n Parameters\n ----------\n sample1 : array_like\n Npts1 x 3 numpy array containing 3-D positions of points.\n See the :ref:`mock_obs_pos_formatting` documentation page, or the\n Examples section below, for instructions on how to transform\n your coordinate position arrays into the\n format accepted by the ``sample1`` and ``sample2`` arguments.\n Length units are comoving and assumed to be in Mpc/h, here and throughout Halotools.\n\n sample2 : array_like\n Npts2 x 3 array containing 3-D positions of points.\n\n rp_bins : array_like\n array of boundaries defining the radial bins perpendicular to the LOS in which\n pairs are counted.\n Length units are comoving and assumed to be in Mpc/h, here and throughout Halotools.\n\n pi_bins : array_like\n array of boundaries defining the p radial bins parallel to the LOS in which\n pairs are counted.\n Length units are comoving and assumed to be in Mpc/h, here and throughout Halotools.\n\n period : array_like, optional\n Length-3 sequence defining the periodic boundary conditions\n in each dimension. If you instead provide a single scalar, Lbox,\n period is assumed to be the same in all Cartesian directions.\n If set to None (the default option), PBCs are set to infinity.\n Length units are comoving and assumed to be in Mpc/h, here and throughout Halotools.\n\n weights1 : array_like, optional\n Either a 1-D array of length *N1*, or a 2-D array of length *N1* x *N_weights*,\n containing the weights used for the weighted pair counts. If this parameter is\n None, the weights are set to np.ones(*(N1,N_weights)*).\n\n weights2 : array_like, optional\n Either a 1-D array of length *N1*, or a 2-D array of length *N1* x *N_weights*,\n containing the weights used for the weighted pair counts. If this parameter is\n None, the weights are set to np.ones(*(N1,N_weights)*).\n\n weight_func_id : int, optional\n velocity weighting function integer ID. Each weighting function requires a specific\n number of weights per point, *N_weights*. See the Notes for a description of\n available weighting functions.\n\n verbose : Boolean, optional\n If True, print out information and progress.\n\n num_threads : int, optional\n Number of threads to use in calculation, where parallelization is performed\n using the python ``multiprocessing`` module. Default is 1 for a purely serial\n calculation, in which case a multiprocessing Pool object will\n never be instantiated. A string 'max' may be used to indicate that\n the pair counters should use all available cores on the machine.\n\n approx_cell1_size : array_like, optional\n Length-3 array serving as a guess for the optimal manner by how points\n will be apportioned into subvolumes of the simulation box.\n The optimum choice unavoidably depends on the specs of your machine.\n Default choice is to use Lbox/10 in each dimension,\n which will return reasonable result performance for most use-cases.\n Performance can vary sensitively with this parameter, so it is highly\n recommended that you experiment with this parameter when carrying out\n performance-critical calculations.\n\n approx_cell2_size : array_like, optional\n Analogous to ``approx_cell1_size``, but for sample2. See comments for\n ``approx_cell1_size`` for details.\n\n Returns\n -------\n w1N_pairs : numpy.array\n 2-D array of shape *(Nrp_bins,Npi_bins)* containing the weighted number counts\n of pairs. The exact values depend on ``weight_func_id``\n (which weighting function was chosen).\n\n w2N_pairs : numpy.array\n 2-D array of shape *(Nrp_bins,Npi_bins)* containing the weighted number counts\n of pairs. The exact values depend on ``weight_func_id``\n (which weighting function was chosen).\n\n w3N_pairs : numpy.array\n 2-D array of shape *(Nrp_bins,Npi_bins)* containing the weighted number counts\n of pairs. The exact values depend on ``weight_func_id``\n (which weighting function was chosen).\n\n Examples\n --------\n For demonstration purposes we will work with\n halos in the `~halotools.sim_manager.FakeSim`.\n\n >>> from halotools.sim_manager import FakeSim\n >>> halocat = FakeSim()\n\n >>> x = halocat.halo_table['halo_x']\n >>> y = halocat.halo_table['halo_y']\n >>> z = halocat.halo_table['halo_z']\n\n We transform our *x, y, z* points into the array shape used by the pair-counter by\n taking the transpose of the result of `numpy.vstack`. This boilerplate transformation\n is used throughout the `~halotools.mock_observables` sub-package:\n\n >>> sample1 = np.vstack((x,y,z)).T\n\n We will do the same to get a random set of velocities.\n\n >>> vx = halocat.halo_table['halo_vx']\n >>> vy = halocat.halo_table['halo_vy']\n >>> vz = halocat.halo_table['halo_vz']\n >>> velocities = np.vstack((x,y,z,vx,vy,vz)).T\n\n >>> rp_bins = np.logspace(-2,-1,10)\n >>> pi_bins = np.linspace(0, 10, 5)\n >>> result = velocity_marked_npairs_xy_z(sample1, sample1, rp_bins, pi_bins, period=halocat.Lbox, weights1=velocities, weights2=velocities,)\n\n \"\"\"\n result = _npairs_xy_z_process_args(sample1, sample2, rp_bins, pi_bins, period,\n verbose, num_threads, approx_cell1_size, approx_cell2_size)\n x1in, y1in, z1in, x2in, y2in, z2in = result[0:6]\n rp_bins, pi_bins, period, num_threads, PBCs, approx_cell1_size, approx_cell2_size = result[6:]\n xperiod, yperiod, zperiod = period\n\n rp_max = np.max(rp_bins)\n pi_max = np.max(pi_bins)\n search_xlength, search_ylength, search_zlength = rp_max, rp_max, pi_max\n\n # Process the input weights and with the helper function\n weights1, weights2 = (\n _velocity_marked_npairs_3d_process_weights(sample1, sample2,\n weights1, weights2, weight_func_id))\n\n # Compute the estimates for the cell sizes\n approx_cell1_size, approx_cell2_size = (\n _set_approximate_cell_sizes(approx_cell1_size, approx_cell2_size, period)\n )\n approx_x1cell_size, approx_y1cell_size, approx_z1cell_size = approx_cell1_size\n approx_x2cell_size, approx_y2cell_size, approx_z2cell_size = approx_cell2_size\n\n # Build the rectangular mesh\n double_mesh = RectangularDoubleMesh(x1in, y1in, z1in, x2in, y2in, z2in,\n approx_x1cell_size, approx_y1cell_size, approx_z1cell_size,\n approx_x2cell_size, approx_y2cell_size, approx_z2cell_size,\n search_xlength, search_ylength, search_zlength, xperiod, yperiod, zperiod, PBCs)\n\n # Create a function object that has a single argument, for parallelization purposes\n engine = partial(velocity_marked_npairs_xy_z_engine, double_mesh,\n x1in, y1in, z1in, x2in, y2in, z2in,\n weights1, weights2, weight_func_id, rp_bins, pi_bins)\n\n # Calculate the cell1 indices that will be looped over by the engine\n num_threads, cell1_tuples = _cell1_parallelization_indices(\n double_mesh.mesh1.ncells, num_threads)\n\n if num_threads > 1:\n pool = multiprocessing.Pool(num_threads)\n result = np.array(pool.map(engine, cell1_tuples))\n counts1, counts2, counts3 = result[:, 0], result[:, 1], result[:, 2]\n counts1 = np.sum(counts1, axis=0)\n counts2 = np.sum(counts2, axis=0)\n counts3 = np.sum(counts3, axis=0)\n pool.close()\n else:\n counts1, counts2, counts3 = np.array(engine(cell1_tuples[0]))\n\n return counts1, counts2, counts3\n" ]
[ [ "numpy.all", "numpy.empty", "numpy.string_" ], [ "numpy.max", "numpy.sum" ] ]
hehuanlin123/DeepLearning
[ "6b7feabbbde9ac9489f76da4c06eeb6703fb165a" ]
[ "Kaggle/Playgroud/RiskPrediction/Home-Credit-Default-Risk-master/py/trash/803_cv_lgb.py" ]
[ "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Jul 17 11:15:57 2018\n\n@author: kazuki.onodera\n\"\"\"\n\nimport gc, os\nfrom tqdm import tqdm\nimport pandas as pd\nimport sys\nsys.path.append(f'/home/{os.environ.get(\"USER\")}/PythonLibrary')\nimport lgbextension as ex\nimport lightgbm as lgb\nfrom multiprocessing import cpu_count, Pool\n#from glob import glob\nimport count\nimport utils, utils_cat\nutils.start(__file__)\n#==============================================================================\n\nSEED = 71\n\nHEADS = list(range(500, 2500, 100))\n\nparam = {\n 'objective': 'binary',\n 'metric': 'auc',\n 'learning_rate': 0.01,\n 'max_depth': 6,\n 'num_leaves': 63,\n 'max_bin': 255,\n \n 'min_child_weight': 10,\n 'min_data_in_leaf': 150,\n 'reg_lambda': 0.5, # L2 regularization term on weights.\n 'reg_alpha': 0.5, # L1 regularization term on weights.\n \n 'colsample_bytree': 0.9,\n 'subsample': 0.9,\n# 'nthread': 32,\n 'nthread': cpu_count(),\n 'bagging_freq': 1,\n 'verbose':-1,\n 'seed': SEED\n }\n\n# =============================================================================\n# load\n# =============================================================================\nimp = pd.read_csv('LOG/imp_801_imp_lgb.py.csv')\nimp['split'] /= imp['split'].max()\nimp['gain'] /= imp['gain'].max()\nimp['total'] = imp['split'] + imp['gain']\nimp.sort_values('total', ascending=False, inplace=True)\n\n\nfor HEAD in HEADS:\n imp_ = imp[~imp.feature.str.startswith('Mxw_META_FEATURE_')]\n use_files = (imp_.head(HEAD).feature + '.f').tolist()\n \n files = utils.get_use_files(use_files, True)\n \n X = pd.concat([\n pd.read_feather(f) for f in tqdm(files, mininterval=60)\n ], axis=1)\n y = utils.read_pickles('../data/label').TARGET\n \n \n if X.columns.duplicated().sum()>0:\n raise Exception(f'duplicated!: { X.columns[X.columns.duplicated()] }')\n print('no dup :) ')\n print(f'X.shape {X.shape}')\n \n gc.collect()\n \n CAT = list( set(X.columns)&set(utils_cat.ALL))\n \n # =============================================================================\n # cv\n # =============================================================================\n dtrain = lgb.Dataset(X, y, categorical_feature=CAT )\n gc.collect()\n \n ret = lgb.cv(param, dtrain, 9999, nfold=5,\n early_stopping_rounds=100, verbose_eval=50,\n seed=SEED)\n \n result = f\"CV auc-mean({HEAD}): {ret['auc-mean'][-1]}\"\n print(result)\n \n utils.send_line(result)\n\n\n# =============================================================================\n# train\n# =============================================================================\n#dtrain = lgb.Dataset(X, y, categorical_feature=CAT )\n##model = lgb.train(param, dtrain, len(ret['auc-mean']))\n#model = lgb.train(param, dtrain, 1000)\n#imp = ex.getImp(model).sort_values(['gain', 'feature'], ascending=[False, True])\n#\n#\n#imp.to_csv(f'LOG/imp_{__file__}.csv', index=False)\n#\n#\"\"\"\n#imp = pd.read_csv('LOG/imp_909_cv.py.csv')\n#\"\"\"\n\n#def multi_touch(arg):\n# os.system(f'touch \"../feature_unused/{arg}.f\"')\n#\n#\n#col = imp[imp['split']==0]['feature'].tolist()\n#pool = Pool(cpu_count())\n#pool.map(multi_touch, col)\n#pool.close()\n\n#==============================================================================\nutils.end(__file__)\n#utils.stop_instance()\n\n\n" ]
[ [ "pandas.read_csv", "pandas.read_feather" ] ]
bnmajor/hexrdgui
[ "d19f7cf4a4469b0d3b6978f2f65c5e8a6bd81785" ]
[ "hexrd/ui/calibration/panel_buffer_dialog.py" ]
[ "import os\n\nfrom PySide2.QtCore import Signal, QObject, QSignalBlocker\nfrom PySide2.QtWidgets import QFileDialog, QMessageBox\nimport numpy as np\n\nfrom hexrd.ui import enter_key_filter\n\nfrom hexrd.ui.hexrd_config import HexrdConfig\nfrom hexrd.ui.ui_loader import UiLoader\n\nCONFIG_MODE_BORDER = 'border'\nCONFIG_MODE_NUMPY = 'numpy'\n\n\nclass PanelBufferDialog(QObject):\n\n accepted = Signal()\n rejected = Signal()\n finished = Signal(int)\n\n def __init__(self, detector, parent=None):\n super().__init__(parent)\n\n self.detector = detector\n loader = UiLoader()\n self.ui = loader.load_file('panel_buffer_dialog.ui')\n self.ui.installEventFilter(enter_key_filter)\n\n # Hide the tab bar. It gets selected by changes to the combo box.\n self.ui.tab_widget.tabBar().hide()\n self.setup_combo_box_data()\n\n self.update_gui()\n\n self.setup_connections()\n\n def setup_connections(self):\n self.ui.select_file_button.pressed.connect(self.select_file)\n self.ui.config_mode.currentIndexChanged.connect(self.update_mode_tab)\n self.ui.accepted.connect(self.on_accepted)\n self.ui.rejected.connect(self.on_rejected)\n\n def setup_combo_box_data(self):\n item_data = [\n CONFIG_MODE_BORDER,\n CONFIG_MODE_NUMPY\n ]\n for i, data in enumerate(item_data):\n self.ui.config_mode.setItemData(i, data)\n\n def show(self):\n self.ui.show()\n\n def on_accepted(self):\n if self.mode == CONFIG_MODE_NUMPY and self.file_name == '':\n msg = 'Please select a NumPy array file'\n QMessageBox.critical(self.ui, 'HEXRD', msg)\n self.show()\n return\n\n if self.update_config():\n self.accepted.emit()\n self.finished.emit(self.ui.result())\n\n def on_rejected(self):\n self.rejected.emit()\n self.finished.emit(self.ui.result())\n\n def select_file(self):\n selected_file, selected_filter = QFileDialog.getOpenFileName(\n self.ui, 'Load Panel Buffer', HexrdConfig().working_dir,\n 'NPY files (*.npy)')\n\n if selected_file:\n HexrdConfig().working_dir = os.path.dirname(selected_file)\n self.ui.file_name.setText(selected_file)\n\n @property\n def file_name(self):\n return self.ui.file_name.text()\n\n @property\n def x_border(self):\n return self.ui.border_x_spinbox.value()\n\n @property\n def y_border(self):\n return self.ui.border_y_spinbox.value()\n\n @property\n def widgets(self):\n return [\n self.ui.file_name,\n self.ui.border_x_spinbox,\n self.ui.border_y_spinbox\n ]\n\n def update_config(self):\n # Set the new config options on the internal config\n config = HexrdConfig().config\n detector_config = config['instrument']['detectors'][self.detector]\n\n buffer_default = {'status': 0}\n buffer = detector_config.setdefault('buffer', buffer_default)\n if self.mode == CONFIG_MODE_BORDER:\n buffer['value'] = [self.x_border, self.y_border]\n else:\n array = np.load(self.file_name)\n\n # Must match the detector size\n detector_shape = (detector_config['pixels']['columns']['value'],\n detector_config['pixels']['rows']['value'])\n if array.shape != detector_shape:\n msg = 'The NumPy array shape must match the detector'\n QMessageBox.critical(self.ui, 'HEXRD', msg)\n self.show()\n return False\n\n buffer['value'] = array\n\n return True\n\n def update_gui(self):\n blockers = [QSignalBlocker(x) for x in self.widgets] # noqa: F841\n\n config = HexrdConfig().config\n detector_config = config['instrument']['detectors'][self.detector]\n\n if 'buffer' in detector_config:\n buffer = detector_config['buffer']['value']\n\n if isinstance(buffer, np.ndarray):\n self.mode = CONFIG_MODE_NUMPY\n else:\n self.mode = CONFIG_MODE_BORDER\n if np.isscalar(buffer):\n buffer = [buffer]*2\n\n self.ui.border_x_spinbox.setValue(buffer[0])\n self.ui.border_y_spinbox.setValue(buffer[1])\n\n self.update_mode_tab()\n\n @property\n def mode(self):\n return self.ui.config_mode.currentData()\n\n @mode.setter\n def mode(self, v):\n w = self.ui.config_mode\n for i in range(w.count()):\n if v == w.itemData(i):\n w.setCurrentIndex(i)\n return\n\n raise Exception(f'Unable to set config mode: {v}')\n\n def update_mode_tab(self):\n mode_tab = getattr(self.ui, self.mode + '_tab')\n self.ui.tab_widget.setCurrentWidget(mode_tab)\n" ]
[ [ "numpy.load", "numpy.isscalar" ] ]
pratik2508/Tacotron-Indian-English
[ "d3e4bf46c1da1c0e10918618662ef8175983b886" ]
[ "datasets/datafeeder.py" ]
[ "import numpy as np\nimport os\nimport random\nimport tensorflow as tf\nimport threading\nimport time\nimport traceback\nfrom text import cmudict, text_to_sequence\nfrom util.infolog import log\n\n\n_batches_per_group = 32\n_p_cmudict = 0.5\n_pad = 0\n\n\nclass DataFeeder(threading.Thread):\n '''Feeds batches of data into a queue on a background thread.'''\n\n def __init__(self, coordinator, metadata_filename, hparams):\n super(DataFeeder, self).__init__()\n self._coord = coordinator\n self._hparams = hparams\n self._cleaner_names = [x.strip() for x in hparams.cleaners.split(',')]\n self._offset = 0\n\n # Load metadata:\n self._datadir = os.path.dirname(metadata_filename)\n with open(metadata_filename, encoding='utf-8') as f:\n self._metadata = [line.strip().split('|') for line in f]\n hours = sum((int(x[2]) for x in self._metadata)) * hparams.frame_shift_ms / (3600 * 1000)\n log('Loaded metadata for %d examples (%.2f hours)' % (len(self._metadata), hours))\n\n # Create placeholders for inputs and targets. Don't specify batch size because we want to\n # be able to feed different sized batches at eval time.\n self._placeholders = [\n tf.placeholder(tf.int32, [None, None], 'inputs'),\n tf.placeholder(tf.int32, [None], 'input_lengths'),\n tf.placeholder(tf.float32, [None, None, hparams.num_mels], 'mel_targets'),\n tf.placeholder(tf.float32, [None, None, hparams.num_freq], 'linear_targets')\n ]\n\n # Create queue for buffering data:\n queue = tf.FIFOQueue(8, [tf.int32, tf.int32, tf.float32, tf.float32], name='input_queue')\n self._enqueue_op = queue.enqueue(self._placeholders)\n self.inputs, self.input_lengths, self.mel_targets, self.linear_targets = queue.dequeue()\n self.inputs.set_shape(self._placeholders[0].shape)\n self.input_lengths.set_shape(self._placeholders[1].shape)\n self.mel_targets.set_shape(self._placeholders[2].shape)\n self.linear_targets.set_shape(self._placeholders[3].shape)\n\n # Load CMUDict: If enabled, this will randomly substitute some words in the training data with\n # their ARPABet equivalents, which will allow you to also pass ARPABet to the model for\n # synthesis (useful for proper nouns, etc.)\n if hparams.use_cmudict:\n cmudict_path = os.path.join(self._datadir, 'cmudict-0.7b')\n if not os.path.isfile(cmudict_path):\n raise Exception('If use_cmudict=True, you must download ' +\n 'http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b to %s' % cmudict_path)\n self._cmudict = cmudict.CMUDict(cmudict_path, keep_ambiguous=True)\n log('Loaded CMUDict with %d unambiguous entries' % len(self._cmudict))\n else:\n self._cmudict = None\n\n\n def start_in_session(self, session):\n self._session = session\n self.start()\n\n\n def run(self):\n try:\n while not self._coord.should_stop():\n self._enqueue_next_group()\n except Exception as e:\n traceback.print_exc()\n self._coord.request_stop(e)\n\n\n def _enqueue_next_group(self):\n start = time.time()\n\n # Read a group of examples:\n n = self._hparams.batch_size\n r = self._hparams.outputs_per_step\n examples = [self._get_next_example() for i in range(n * _batches_per_group)]\n\n # Bucket examples based on similar output sequence length for efficiency:\n examples.sort(key=lambda x: x[-1])\n batches = [examples[i:i+n] for i in range(0, len(examples), n)]\n random.shuffle(batches)\n\n log('Generated %d batches of size %d in %.03f sec' % (len(batches), n, time.time() - start))\n for batch in batches:\n feed_dict = dict(zip(self._placeholders, _prepare_batch(batch, r)))\n self._session.run(self._enqueue_op, feed_dict=feed_dict)\n\n\n def _get_next_example(self):\n '''Loads a single example (input, mel_target, linear_target, cost) from disk'''\n if self._offset >= len(self._metadata):\n self._offset = 0\n random.shuffle(self._metadata)\n meta = self._metadata[self._offset]\n self._offset += 1\n\n text = meta[3]\n if self._cmudict and random.random() < _p_cmudict:\n text = ' '.join([self._maybe_get_arpabet(word) for word in text.split(' ')])\n\n input_data = np.asarray(text_to_sequence(text, self._cleaner_names), dtype=np.int32)\n linear_target = np.load(os.path.join(self._datadir, meta[0]))\n mel_target = np.load(os.path.join(self._datadir, meta[1]))\n return (input_data, mel_target, linear_target, len(linear_target))\n\n\n def _maybe_get_arpabet(self, word):\n arpabet = self._cmudict.lookup(word)\n return '{%s}' % arpabet[0] if arpabet is not None and random.random() < 0.5 else word\n\n\ndef _prepare_batch(batch, outputs_per_step):\n random.shuffle(batch)\n inputs = _prepare_inputs([x[0] for x in batch])\n input_lengths = np.asarray([len(x[0]) for x in batch], dtype=np.int32)\n mel_targets = _prepare_targets([x[1] for x in batch], outputs_per_step)\n linear_targets = _prepare_targets([x[2] for x in batch], outputs_per_step)\n return (inputs, input_lengths, mel_targets, linear_targets)\n\n\ndef _prepare_inputs(inputs):\n max_len = max((len(x) for x in inputs))\n return np.stack([_pad_input(x, max_len) for x in inputs])\n\n\ndef _prepare_targets(targets, alignment):\n max_len = max((len(t) for t in targets)) + 1\n return np.stack([_pad_target(t, _round_up(max_len, alignment)) for t in targets])\n\n\ndef _pad_input(x, length):\n return np.pad(x, (0, length - x.shape[0]), mode='constant', constant_values=_pad)\n\n\ndef _pad_target(t, length):\n return np.pad(t, [(0, length - t.shape[0]), (0,0)], mode='constant', constant_values=_pad)\n\n\ndef _round_up(x, multiple):\n remainder = x % multiple\n return x if remainder == 0 else x + multiple - remainder\n" ]
[ [ "numpy.pad", "tensorflow.FIFOQueue", "tensorflow.placeholder" ] ]
pratikk-bulani/Attribute-and-Simile-Classifiers-for-Face-Verification
[ "653d9ab257482bc2bb3ea7514134a2072496d283" ]
[ "src/neural network (BONUS)/attr_net.py" ]
[ "import torchvision.transforms as transforms\nfrom torch.utils.data.sampler import SubsetRandomSampler\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.utils.data import TensorDataset, DataLoader, Dataset\nimport torchvision\nfrom torchvision import models\nimport torch.optim as optim\nimport pandas as pd\nimport numpy as np\nimport cv2\nimport os\nfrom sklearn import preprocessing\nimport matplotlib.pyplot as plt\nimport re\nimport logging\nimport argparse\n\ndef get_args():\n parser = argparse.ArgumentParser(\n description=\"Predict masks from input images\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter,\n )\n # parser.add_argument(\n # \"--model\",\n # \"-m\",\n # default=\"MODEL.pth\",\n # metavar=\"FILE\",\n # help=\"Specify the file in which the model is stored\",\n # )\n # parser.add_argument(\n # \"--input\",\n # \"-i\",\n # metavar=\"INPUT\",\n # nargs=\"+\",\n # help=\"filenames of input images\",\n # required=True,\n # )\n # parser.add_argument(\n # \"--output\", \"-o\", metavar=\"INPUT\", nargs=\"+\", help=\"Filenames of ouput images\"\n # )\n # parser.add_argument(\n # \"--viz\",\n # \"-v\",\n # action=\"store_true\",\n # help=\"Visualize the images as they are processed\",\n # default=False,\n # )\n # parser.add_argument(\n # \"--no-save\",\n # \"-n\",\n # action=\"store_true\",\n # help=\"Do not save the output masks\",\n # default=False,\n # )\n\n parser.add_argument(\n \"--inputimg\",\n \"-i\",\n type=int,\n help=\"Minimum probability value to consider a mask pixel white\",\n default=30000,\n )\n parser.add_argument(\n \"--attributes\",\n \"-a\",\n type=int,\n help=\"Scale factor for the input images\",\n default=0.5,\n )\n parser.add_argument(\n \"--epochs\",\n \"-e\",\n type=int,\n help=\"Scale factor for the input images\",\n default=5,\n )\n\n return parser.parse_args()\n\ndef initialize_celeba(CELEBA_IMAGES = 30000):\n # global output_file_path, output_low_level_path, df_attributes\n attributes_path = \"./content/drive/MyDrive/CelebA/metadata/list_attr_celeba.csv\"\n df_attributes = pd.read_csv(attributes_path) \n removeImages = df_attributes.shape[0] - CELEBA_IMAGES\n drop_indices = np.random.choice(df_attributes.index[1:], removeImages, replace=False)\n df_attributes = df_attributes.drop(drop_indices) \n print(df_attributes.shape)\n return df_attributes\n\nclass celebAData(Dataset):\n def __init__(self,df_attributes,transform=None,train=True):\n super().__init__()\n self.df_attributes = df_attributes\n self.transform = transform\n self.train = train\n self.data_list = df_attributes['image_id'].tolist()\n # celeba_id = df['image_id'].tolist()\n\n def __len__(self):\n return len(self.data_list)\n \n def __getitem__(self,item):\n # global output_file_path, output_low_level_path, df_attributes\n # print(item)\n img_idx = item\n imgname = self.data_list[item]\n # foldername = imgname[:-9]\n # imgpath = os.path.join('/content/drive/MyDrive/LFW/zipped/lfw',foldername,imgname)\n imgpath = os.path.join(\"./content/drive/MyDrive/CelebA/zipped/img_align_celeba/img_align_celeba\", imgname)\n # print(imgpath)\n img = cv2.imread(imgpath)\n img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)\n imgr = cv2.resize(img,(224,224))\n label = self.df_attributes.iloc[img_idx][1:]\n label = np.array(label)\n label = np.where(label < 0,0,1)\n\n if self.transform is not None:\n imgr = self.transform(imgr)\n if self.train:\n return {\n 'img' : imgr,\n 'label' : torch.tensor(label)\n }\n else:\n return {\n 'img':imgr\n }\n\nclass ResnetModel(nn.Module):\n def __init__(self,n_classes):\n super().__init__()\n resnet = models.resnext50_32x4d(pretrained=True)\n resnet.fc = nn.Sequential(\n nn.Dropout(p=0.2),\n nn.Linear(in_features=resnet.fc.in_features, out_features=n_classes)\n )\n self.base_model = resnet\n self.sigm = nn.Sigmoid()\n\n def forward(self, x):\n return self.sigm(self.base_model(x))\n\ndef pred_acc(original, predicted):\n return torch.round(predicted).eq(original).sum().numpy()/len(original)\n\n#from tqdm.notebook import tqdm\ndef train_test(modeltype,df_attributes,erro='bce',optimizertype='Adam',batch_size=32, epochs=5, transforms=None, modelPath = \"./attr_net_small.pth\"):\n # full_dataset = CustomData(transforms,train_path,trainimgs)\n print(\"=========================================================================================\")\n print(\"=========================================================================================\")\n full_dataset = celebAData(df_attributes, transforms)\n train_size = int(0.8 * len(full_dataset)) \n test_size = len(full_dataset) - train_size\n\n # batch = 32\n train_dataset, test_dataset = torch.utils.data.random_split(full_dataset, [train_size, test_size])\n train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)\n test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False)\n attributesConsidered = df_attributes.shape[1]-1\n print(\"attributes considered:\", attributesConsidered)\n logging.info(\"attributes considered:{}\".format(attributesConsidered))\n\n if modeltype == 'alexnet':\n model = AlexNet(attributesConsidered).to(device)\n elif modeltype == 'resnet':\n model = ResnetModel(attributesConsidered)\n model = model.to(device)\n elif modeltype == 'mobilenet':\n model = MultiOutputModel().to(device)\n else:\n raise Exception(\"Enter a valid model type!\")\n \n if optimizertype == 'Adam':\n optimizer = optim.Adam(model.parameters())\n elif optimizertype == 'Adagrad':\n optimizer = torch.optim.Adagrad(model.parameters(), lr=0.01, lr_decay=0,\n weight_decay=0, initial_accumulator_value=0, eps=1e-10)\n elif optimizertype == 'SGD':\n optimizer = optim.SGD(model.parameters(), lr = 0.01, momentum=0.9)\n else:\n raise Exception(\"Enter a valid optimizer type!\")\n\n if erro == 'ce':\n error = nn.CrossEntropyLoss().to(device)\n elif erro == 'ms':\n error = nn.MSELoss().to(device)\n elif erro == 'bce':\n error = nn.BCELoss().to(device)\n\n valid_loss_min = np.Inf\n\n losses = {'train' : [] } \n accuracies = {'train' : []} \n dataloaders = {\n 'train':train_loader,\n 'test':test_loader\n }\n\n print(\"dataloaders loaded\")\n logging.info(\"dataloaders loaded\")\n for epoch in range(epochs):\n print('='*10)\n print(\"Epoch: {}\".format(epoch))\n\n logging.info('='*10)\n logging.info(\"Epoch: {}\".format(epoch))\n\n model.train()\n # for phase in ['train']:\n\n curr_loss = 0.0\n curr_acc = 0\n\n for dinputs in dataloaders[\"train\"]:\n inputs = dinputs[\"img\"].to(device)\n labels = dinputs[\"label\"].to(device)\n\n outputs = model(inputs)\n\n loss = error(outputs, labels.type(torch.float))\n # print(loss)\n curr_loss += loss.item() * inputs.size(0)\n loss.backward()\n optimizer.step()\n\n # _, preds = torch.max(outputs, 1)\n preds = outputs\n\n # curr_acc = 0.0\n for i,o in enumerate(outputs):\n acc = pred_acc(torch.Tensor.cpu(labels[i]), torch.Tensor.cpu(o))\n curr_acc += acc\n\n curr_loss = curr_loss / len(dataloaders[\"train\"].sampler)\n curr_acc = np.asarray(curr_acc,dtype=np.float32) / len(dataloaders[\"train\"].sampler)\n \n losses[\"train\"].append(curr_loss)\n accuracies[\"train\"].append(curr_acc)\n # if epoch == epochs - 1:\n print(\"train\" + \":\")\n print('loss = {:.4f} accuracy = {:.4f}'.format(curr_loss,curr_acc))\n\n logging.info(\"train: \")\n logging.info('loss = {:.4f} accuracy = {:.4f}'.format(curr_loss,curr_acc))\n # print()\n # train_losses.append(train_loss)\n # valid_losses.append(valid_loss)\n\n '''\n test code here\n '''\n print(\"testing\" + \":\")\n logging.info(\"testing: \")\n model.eval()\n test_loss = 0.0\n test_acc = 0\n for dinputs in dataloaders[\"test\"]:\n inputs = dinputs[\"img\"].to(device)\n labels = dinputs[\"label\"].to(device)\n\n outputs = model(inputs)\n\n loss = error(outputs, labels.type(torch.float))\n test_loss += loss.item() * inputs.size(0)\n\n # _, preds = torch.max(outputs, 1)\n preds = outputs\n\n for i,o in enumerate(outputs):\n acc = pred_acc(torch.Tensor.cpu(labels[i]), torch.Tensor.cpu(o))\n test_acc += acc\n\n test_loss = test_loss / len(dataloaders[\"test\"].sampler)\n test_acc = np.asarray(test_acc,dtype=np.float32) / len(dataloaders[\"test\"].sampler)\n\n print(\"loss: {:.4f} accuracy: {:.4f}\".format(test_loss, test_acc))\n logging.info(\"loss: {:.4f} accuracy: {:.4f}\".format(test_loss, test_acc))\n\n torch.save(model.state_dict(), os.path.join(modelPath))\n print(\"saved model at:\",modelPath)\n logging.info(\"saved model at: {}\".format(modelPath))\n \n return losses,accuracies \n\ndef main(modelPath = \"./attr_net_small.pth\"):\n args = get_args()\n noImages = args.inputimg\n epochs = args.epochs\n df_attributes = initialize_celeba(noImages)\n logging.info(\"celebA initialized\")\n attributesConsidered = args.attributes\n df_attributes_short = df_attributes.iloc[:, : attributesConsidered + 1]\n print(\"attributes:\", attributesConsidered)\n print(\"noImages:\", noImages)\n print(\"epochs:\",epochs)\n print(\"device:\", device)\n logging.info(\"device:{}\".format(device))\n\n transforms_train = transforms.Compose([\n transforms.ToPILImage(),\n transforms.ToTensor(),\n transforms.Normalize((0.3166, 0.3947, 0.4725), (0.1755, 0.1720, 0.1657))\n ])\n loss,acc = train_test(\"resnet\",df_attributes_short,erro='bce',transforms=transforms_train, epochs =epochs, modelPath = modelPath )\n\ndevice = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n # Assuming that we are on a CUDA machine, this should print a CUDA device:\nprint(device)\nlogging.basicConfig(filename='log_attr_net.log', level = logging.INFO)\nmain(modelPath = \"./attr_net_75k_40.pth\")\n" ]
[ [ "torch.nn.Linear", "torch.round", "numpy.array", "torch.nn.Dropout", "numpy.random.choice", "numpy.asarray", "torch.nn.MSELoss", "torch.nn.Sigmoid", "torch.utils.data.random_split", "torch.Tensor.cpu", "numpy.where", "torch.cuda.is_available", "torch.tensor", "torch.utils.data.DataLoader", "torch.nn.BCELoss", "pandas.read_csv", "torch.nn.CrossEntropyLoss" ] ]
LasalJayawardena/Neural-Network-Projects
[ "de10a53c0cc385eca770dfb19fef6c01838549bc" ]
[ "Neural_Networks_Pytorch/utils.py" ]
[ "import matplotlib.pyplot as plt \nimport numpy as np \nimport torch \n\n\ndef multiclass_accuracy(y_pred,y_true):\n top_p,top_class = y_pred.topk(1,dim = 1)\n equals = top_class == y_true.view(*top_class.shape)\n return torch.mean(equals.type(torch.FloatTensor))\n\ndef view_classify(img, ps):\n\n ps = ps.data.numpy().squeeze()\n\n fig, (ax1, ax2) = plt.subplots(figsize=(6,9), ncols=2)\n ax1.imshow(img.resize_(1, 28, 28).numpy().squeeze(),cmap = 'gray')\n ax1.axis('off')\n ax2.barh(np.arange(10), ps)\n ax2.set_aspect(0.1)\n ax2.set_yticks(np.arange(10))\n ax2.set_yticklabels(np.arange(10))\n ax2.set_title('Class Probability')\n ax2.set_xlim(0, 1.1)\n\n plt.tight_layout()\n\n return None\n" ]
[ [ "numpy.arange", "matplotlib.pyplot.tight_layout", "matplotlib.pyplot.subplots" ] ]
computationalmodelling/fidimag
[ "07a275c897a44ad1e0d7e8ef563f10345fdc2a6e" ]
[ "tests/test_two_particles_neb_method.py" ]
[ "from __future__ import print_function\nimport pytest\n\n# FIDIMAG:\nfrom fidimag.micro import Sim\nfrom fidimag.common import CuboidMesh\nfrom fidimag.micro import UniformExchange, UniaxialAnisotropy\nfrom fidimag.common.nebm_spherical import NEBM_Spherical\nfrom fidimag.common.nebm_geodesic import NEBM_Geodesic\nimport numpy as np\n\n# Material Parameters\n# Parameters\nA = 1e-12\nKx = 1e5\n# Strong anisotropy\nMs = 3.8e5\n\n\n\"\"\"\nWe will define two particles using a 4 sites mesh, letting the\nsites in the middle as Ms = 0\n\n\"\"\"\n\n\ndef two_part(pos):\n\n x = pos[0]\n\n if x > 6 or x < 3:\n return Ms\n else:\n return 0\n\n# Finite differences mesh\nmesh = CuboidMesh(nx=3,\n ny=1,\n nz=1,\n dx=3, dy=3, dz=3,\n unit_length=1e-9\n )\n\n\n# Simulation Function\ndef relax_neb(k, maxst, simname, init_im, interp, save_every=10000,\n coordinates='Cartesian'):\n \"\"\"\n Execute a simulation with the NEB function of the FIDIMAG code, for an\n elongated particle (long cylinder)\n\n The simulations are made for a specific spring constant 'k' (a float),\n number of images 'init_im', interpolations between images 'interp'\n (an array) and a maximum of 'maxst' steps.\n 'simname' is the name of the simulation, to distinguish the\n output files.\n\n --> vtks and npys are saved in files starting with the 'simname' string\n\n \"\"\"\n\n # Prepare simulation\n # We define the cylinder with the Magnetisation function\n sim = Sim(mesh)\n sim.Ms = two_part\n\n # sim.add(UniformExchange(A=A))\n\n # Uniaxial anisotropy along x-axis\n sim.add(UniaxialAnisotropy(Kx, axis=(1, 0, 0)))\n\n # Define many initial states close to one extreme. We want to check\n # if the images in the last step, are placed mostly in equally positions\n init_images = init_im\n\n # Number of images between each state specified before (here we need only\n # two, one for the states between the initial and intermediate state\n # and another one for the images between the intermediate and final\n # states). Thus, the number of interpolations must always be\n # equal to 'the number of initial states specified', minus one.\n interpolations = interp\n\n if coordinates == 'Spherical':\n neb = NEBM_Spherical(sim,\n init_images,\n interpolations=interpolations,\n spring_constant=k,\n name=simname\n )\n if coordinates == 'Geodesic':\n neb = NEBM_Geodesic(sim,\n init_images,\n interpolations=interpolations,\n spring_constant=k,\n name=simname,\n integrator='sundials'\n )\n\n neb.relax(max_iterations=2000,\n save_vtks_every=save_every,\n save_npys_every=save_every,\n stopping_dYdt=1e-4,\n dt=1e-6\n )\n\n # print(neb.G)\n # print(neb.tangents)\n # print(neb.spring_force)\n\n\ndef mid_m(pos):\n if pos[0] > 4:\n return (0.5, 0, 0.2)\n else:\n return (-0.5, 0, 0.2)\n\n\ndef test_energy_barrier_2particles():\n # Initial images: we set here a rotation interpolating\n init_im = [(-1, 0, 0), mid_m, (1, 0, 0)]\n interp = [6, 6]\n\n coord_list = ['Geodesic']\n barriers = []\n\n # Define different ks for multiple simulations\n # krange = ['1e8']\n\n for coordinates in coord_list:\n relax_neb(1e4, 2000,\n 'neb_2particles_k1e8_10-10int_{}'.format(coordinates),\n init_im,\n interp,\n save_every=5000,\n coordinates=coordinates\n )\n\n _file = np.loadtxt('neb_2particles_k1e8_10-10int_{}_energy.ndt'.format(coordinates))\n barriers.append((np.max(_file[-1][1:]) - _file[-1][1]) / 1.602e-19)\n\n print('Energy barrier for {} is:'.format(coordinates), barriers[-1])\n assert np.abs(barriers[-1] - 0.016019) < 1e-5\n\n print(barriers)\n\n\nif __name__ == '__main__':\n test_energy_barrier_2particles()\n" ]
[ [ "numpy.max", "numpy.abs" ] ]
akolobov/robomimic
[ "104f00e84f915b3b618bc0fb9b849cfeb61e8e8e" ]
[ "robomimic/utils/dataset.py" ]
[ "\"\"\"\nThis file contains Dataset classes that are used by torch dataloaders\nto fetch batches from hdf5 files.\n\"\"\"\nimport os\nimport h5py\nimport numpy as np\nfrom copy import deepcopy\nfrom contextlib import contextmanager\n\nimport torch.utils.data\n\nimport robomimic.utils.tensor_utils as TensorUtils\nimport robomimic.utils.obs_utils as ObsUtils\nimport robomimic.utils.log_utils as LogUtils\n\n\nclass SequenceDataset(torch.utils.data.Dataset):\n def __init__(\n self,\n hdf5_path,\n obs_keys,\n dataset_keys,\n frame_stack=1,\n seq_length=1,\n pad_frame_stack=True,\n pad_seq_length=True,\n get_pad_mask=False,\n goal_mode=None,\n hdf5_cache_mode=None,\n hdf5_use_swmr=True,\n hdf5_normalize_obs=False,\n filter_by_attribute=None,\n load_next_obs=True,\n ):\n \"\"\"\n Dataset class for fetching sequences of experience.\n Length of the fetched sequence is equal to (@frame_stack - 1 + @seq_length)\n\n Args:\n hdf5_path (str): path to hdf5\n\n obs_keys (tuple, list): keys to observation items (image, object, etc) to be fetched from the dataset\n\n dataset_keys (tuple, list): keys to dataset items (actions, rewards, etc) to be fetched from the dataset\n\n frame_stack (int): numbers of stacked frames to fetch. Defaults to 1 (single frame).\n\n seq_length (int): length of sequences to sample. Defaults to 1 (single frame).\n\n pad_frame_stack (int): whether to pad sequence for frame stacking at the beginning of a demo. This\n ensures that partial frame stacks are observed, such as (s_0, s_0, s_0, s_1). Otherwise, the\n first frame stacked observation would be (s_0, s_1, s_2, s_3).\n\n pad_seq_length (int): whether to pad sequence for sequence fetching at the end of a demo. This\n ensures that partial sequences at the end of a demonstration are observed, such as\n (s_{T-1}, s_{T}, s_{T}, s_{T}). Otherwise, the last sequence provided would be\n (s_{T-3}, s_{T-2}, s_{T-1}, s_{T}).\n\n get_pad_mask (bool): if True, also provide padding masks as part of the batch. This can be\n useful for masking loss functions on padded parts of the data.\n\n goal_mode (str): either \"last\" or None. Defaults to None, which is to not fetch goals\n\n hdf5_cache_mode (str): one of [\"all\", \"low_dim\", or None]. Set to \"all\" to cache entire hdf5 \n in memory - this is by far the fastest for data loading. Set to \"low_dim\" to cache all \n non-image data. Set to None to use no caching - in this case, every batch sample is \n retrieved via file i/o. You should almost never set this to None, even for large \n image datasets.\n\n hdf5_use_swmr (bool): whether to use swmr feature when opening the hdf5 file. This ensures\n that multiple Dataset instances can all access the same hdf5 file without problems.\n\n hdf5_normalize_obs (bool): if True, normalize observations by computing the mean observation\n and std of each observation (in each dimension and modality), and normalizing to unit\n mean and variance in each dimension.\n\n filter_by_attribute (str): if provided, use the provided filter key to look up a subset of\n demonstrations to load\n\n load_next_obs (bool): whether to load next_obs from the dataset\n \"\"\"\n super(SequenceDataset, self).__init__()\n\n self.hdf5_path = os.path.expanduser(hdf5_path)\n self.hdf5_use_swmr = hdf5_use_swmr\n self.hdf5_normalize_obs = hdf5_normalize_obs\n self._hdf5_file = None\n\n assert hdf5_cache_mode in [\"all\", \"low_dim\", None]\n self.hdf5_cache_mode = hdf5_cache_mode\n\n self.load_next_obs = load_next_obs\n self.filter_by_attribute = filter_by_attribute\n\n # get all keys that needs to be fetched\n self.obs_keys = tuple(obs_keys)\n self.dataset_keys = tuple(dataset_keys)\n\n self.n_frame_stack = frame_stack\n assert self.n_frame_stack >= 1\n\n self.seq_length = seq_length\n assert self.seq_length >= 1\n\n self.goal_mode = goal_mode\n if self.goal_mode is not None:\n assert self.goal_mode in [\"last\"]\n if not self.load_next_obs:\n assert self.goal_mode != \"last\" # we use last next_obs as goal\n\n self.pad_seq_length = pad_seq_length\n self.pad_frame_stack = pad_frame_stack\n self.get_pad_mask = get_pad_mask\n\n self.load_demo_info(filter_by_attribute=self.filter_by_attribute)\n\n # maybe prepare for observation normalization\n self.obs_normalization_stats = None\n if self.hdf5_normalize_obs:\n self.obs_normalization_stats = self.normalize_obs()\n\n # maybe store dataset in memory for fast access\n if self.hdf5_cache_mode in [\"all\", \"low_dim\"]:\n obs_keys_in_memory = self.obs_keys\n if self.hdf5_cache_mode == \"low_dim\":\n # only store low-dim observations\n obs_keys_in_memory = []\n for k in self.obs_keys:\n if ObsUtils.key_is_obs_modality(k, \"low_dim\"):\n obs_keys_in_memory.append(k)\n self.obs_keys_in_memory = obs_keys_in_memory\n\n self.hdf5_cache = self.load_dataset_in_memory(\n demo_list=self.demos,\n hdf5_file=self.hdf5_file,\n obs_keys=self.obs_keys_in_memory,\n dataset_keys=self.dataset_keys,\n load_next_obs=self.load_next_obs\n )\n\n if self.hdf5_cache_mode == \"all\":\n # cache getitem calls for even more speedup. We don't do this for\n # \"low-dim\" since image observations require calls to getitem anyways.\n print(\"SequenceDataset: caching get_item calls...\")\n self.getitem_cache = [self.get_item(i) for i in LogUtils.custom_tqdm(range(len(self)))]\n\n # don't need the previous cache anymore\n del self.hdf5_cache\n self.hdf5_cache = None\n else:\n self.hdf5_cache = None\n\n self.close_and_delete_hdf5_handle()\n\n def load_demo_info(self, filter_by_attribute=None, demos=None):\n \"\"\"\n Args:\n filter_by_attribute (str): if provided, use the provided filter key\n to select a subset of demonstration trajectories to load\n\n demos (list): list of demonstration keys to load from the hdf5 file. If \n omitted, all demos in the file (or under the @filter_by_attribute \n filter key) are used.\n \"\"\"\n # filter demo trajectory by mask\n if demos is not None:\n self.demos = demos\n elif filter_by_attribute is not None:\n self.demos = [elem.decode(\"utf-8\") for elem in np.array(self.hdf5_file[\"mask/{}\".format(filter_by_attribute)][:])]\n else:\n self.demos = list(self.hdf5_file[\"data\"].keys())\n\n # sort demo keys\n inds = np.argsort([int(elem[5:]) for elem in self.demos])\n self.demos = [self.demos[i] for i in inds]\n\n self.n_demos = len(self.demos)\n\n # keep internal index maps to know which transitions belong to which demos\n self._index_to_demo_id = dict() # maps every index to a demo id\n self._demo_id_to_start_indices = dict() # gives start index per demo id\n self._demo_id_to_demo_length = dict()\n\n # determine index mapping\n self.total_num_sequences = 0\n for ep in self.demos:\n demo_length = self.hdf5_file[\"data/{}\".format(ep)].attrs[\"num_samples\"]\n self._demo_id_to_start_indices[ep] = self.total_num_sequences\n self._demo_id_to_demo_length[ep] = demo_length\n\n num_sequences = demo_length\n # determine actual number of sequences taking into account whether to pad for frame_stack and seq_length\n if not self.pad_frame_stack:\n num_sequences -= (self.n_frame_stack - 1)\n if not self.pad_seq_length:\n num_sequences -= (self.seq_length - 1)\n\n if self.pad_seq_length:\n assert demo_length >= 1 # sequence needs to have at least one sample\n num_sequences = max(num_sequences, 1)\n else:\n assert num_sequences >= 1 # assume demo_length >= (self.n_frame_stack - 1 + self.seq_length)\n\n for _ in range(num_sequences):\n self._index_to_demo_id[self.total_num_sequences] = ep\n self.total_num_sequences += 1\n\n @property\n def hdf5_file(self):\n \"\"\"\n This property allows for a lazy hdf5 file open.\n \"\"\"\n if self._hdf5_file is None:\n self._hdf5_file = h5py.File(self.hdf5_path, 'r', swmr=self.hdf5_use_swmr, libver='latest')\n return self._hdf5_file\n\n def close_and_delete_hdf5_handle(self):\n \"\"\"\n Maybe close the file handle.\n \"\"\"\n if self._hdf5_file is not None:\n self._hdf5_file.close()\n self._hdf5_file = None\n\n @contextmanager\n def hdf5_file_opened(self):\n \"\"\"\n Convenient context manager to open the file on entering the scope\n and then close it on leaving.\n \"\"\"\n should_close = self._hdf5_file is None\n yield self.hdf5_file\n if should_close:\n self.close_and_delete_hdf5_handle()\n\n def __del__(self):\n self.close_and_delete_hdf5_handle()\n\n def __repr__(self):\n \"\"\"\n Pretty print the class and important attributes on a call to `print`.\n \"\"\"\n msg = str(self.__class__.__name__)\n msg += \" (\\n\\tpath={}\\n\\tobs_keys={}\\n\\tseq_length={}\\n\\tfilter_key={}\\n\\tframe_stack={}\\n\"\n msg += \"\\tpad_seq_length={}\\n\\tpad_frame_stack={}\\n\\tgoal_mode={}\\n\"\n msg += \"\\tcache_mode={}\\n\"\n msg += \"\\tnum_demos={}\\n\\tnum_sequences={}\\n)\"\n filter_key_str = self.filter_by_attribute if self.filter_by_attribute is not None else \"none\"\n goal_mode_str = self.goal_mode if self.goal_mode is not None else \"none\"\n cache_mode_str = self.hdf5_cache_mode if self.hdf5_cache_mode is not None else \"none\"\n msg = msg.format(self.hdf5_path, self.obs_keys, self.seq_length, filter_key_str, self.n_frame_stack,\n self.pad_seq_length, self.pad_frame_stack, goal_mode_str, cache_mode_str,\n self.n_demos, self.total_num_sequences)\n return msg\n\n def __len__(self):\n \"\"\"\n Ensure that the torch dataloader will do a complete pass through all sequences in \n the dataset before starting a new iteration.\n \"\"\"\n return self.total_num_sequences\n\n def load_dataset_in_memory(self, demo_list, hdf5_file, obs_keys, dataset_keys, load_next_obs):\n \"\"\"\n Loads the hdf5 dataset into memory, preserving the structure of the file. Note that this\n differs from `self.getitem_cache`, which, if active, actually caches the outputs of the\n `getitem` operation.\n\n Args:\n demo_list (list): list of demo keys, e.g., 'demo_0'\n hdf5_file (h5py.File): file handle to the hdf5 dataset.\n obs_keys (list, tuple): observation keys to fetch, e.g., 'images'\n dataset_keys (list, tuple): dataset keys to fetch, e.g., 'actions'\n load_next_obs (bool): whether to load next_obs from the dataset\n\n Returns:\n all_data (dict): dictionary of loaded data.\n \"\"\"\n all_data = dict()\n print(\"SequenceDataset: loading dataset into memory...\")\n for ep in LogUtils.custom_tqdm(demo_list):\n all_data[ep] = {}\n all_data[ep][\"attrs\"] = {}\n all_data[ep][\"attrs\"][\"num_samples\"] = hdf5_file[\"data/{}\".format(ep)].attrs[\"num_samples\"]\n # get obs\n all_data[ep][\"obs\"] = {k: hdf5_file[\"data/{}/obs/{}\".format(ep, k)][()].astype('float32') for k in obs_keys}\n if load_next_obs:\n all_data[ep][\"next_obs\"] = {k: hdf5_file[\"data/{}/next_obs/{}\".format(ep, k)][()].astype('float32') for k in obs_keys}\n # get other dataset keys\n for k in dataset_keys:\n if k in hdf5_file[\"data/{}\".format(ep)]:\n all_data[ep][k] = hdf5_file[\"data/{}/{}\".format(ep, k)][()].astype('float32')\n else:\n all_data[ep][k] = np.zeros((all_data[ep][\"attrs\"][\"num_samples\"], 1), dtype=np.float32)\n\n if \"model_file\" in hdf5_file[\"data/{}\".format(ep)].attrs:\n all_data[ep][\"attrs\"][\"model_file\"] = hdf5_file[\"data/{}\".format(ep)].attrs[\"model_file\"]\n\n return all_data\n\n def normalize_obs(self):\n \"\"\"\n Computes a dataset-wide mean and standard deviation for the observations \n (per dimension and per obs key) and returns it.\n \"\"\"\n def _compute_traj_stats(traj_obs_dict):\n \"\"\"\n Helper function to compute statistics over a single trajectory of observations.\n \"\"\"\n traj_stats = { k : {} for k in traj_obs_dict }\n for k in traj_obs_dict:\n traj_stats[k][\"n\"] = traj_obs_dict[k].shape[0]\n traj_stats[k][\"mean\"] = traj_obs_dict[k].mean(axis=0, keepdims=True) # [1, ...]\n traj_stats[k][\"sqdiff\"] = ((traj_obs_dict[k] - traj_stats[k][\"mean\"]) ** 2).sum(axis=0, keepdims=True) # [1, ...]\n return traj_stats\n\n def _aggregate_traj_stats(traj_stats_a, traj_stats_b):\n \"\"\"\n Helper function to aggregate trajectory statistics.\n See https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm\n for more information.\n \"\"\"\n merged_stats = {}\n for k in traj_stats_a:\n n_a, avg_a, M2_a = traj_stats_a[k][\"n\"], traj_stats_a[k][\"mean\"], traj_stats_a[k][\"sqdiff\"]\n n_b, avg_b, M2_b = traj_stats_b[k][\"n\"], traj_stats_b[k][\"mean\"], traj_stats_b[k][\"sqdiff\"]\n n = n_a + n_b\n mean = (n_a * avg_a + n_b * avg_b) / n\n delta = (avg_b - avg_a)\n M2 = M2_a + M2_b + (delta ** 2) * (n_a * n_b) / n\n merged_stats[k] = dict(n=n, mean=mean, sqdiff=M2)\n return merged_stats\n\n # Run through all trajectories. For each one, compute minimal observation statistics, and then aggregate\n # with the previous statistics.\n ep = self.demos[0]\n obs_traj = {k: self.hdf5_file[\"data/{}/obs/{}\".format(ep, k)][()].astype('float32') for k in self.obs_keys}\n obs_traj = ObsUtils.process_obs_dict(obs_traj)\n merged_stats = _compute_traj_stats(obs_traj)\n print(\"SequenceDataset: normalizing observations...\")\n for ep in LogUtils.custom_tqdm(self.demos[1:]):\n obs_traj = {k: self.hdf5_file[\"data/{}/obs/{}\".format(ep, k)][()].astype('float32') for k in self.obs_keys}\n obs_traj = ObsUtils.process_obs_dict(obs_traj)\n traj_stats = _compute_traj_stats(obs_traj)\n merged_stats = _aggregate_traj_stats(merged_stats, traj_stats)\n\n obs_normalization_stats = { k : {} for k in merged_stats }\n for k in merged_stats:\n # note we add a small tolerance of 1e-3 for std\n obs_normalization_stats[k][\"mean\"] = merged_stats[k][\"mean\"]\n obs_normalization_stats[k][\"std\"] = np.sqrt(merged_stats[k][\"sqdiff\"] / merged_stats[k][\"n\"]) + 1e-3\n return obs_normalization_stats\n\n def get_obs_normalization_stats(self):\n \"\"\"\n Returns dictionary of mean and std for each observation key if using\n observation normalization, otherwise None.\n\n Returns:\n obs_normalization_stats (dict): a dictionary for observation\n normalization. This maps observation keys to dicts\n with a \"mean\" and \"std\" of shape (1, ...) where ... is the default\n shape for the observation.\n \"\"\"\n assert self.hdf5_normalize_obs, \"not using observation normalization!\"\n return deepcopy(self.obs_normalization_stats)\n\n def get_dataset_for_ep(self, ep, key):\n \"\"\"\n Helper utility to get a dataset for a specific demonstration.\n Takes into account whether the dataset has been loaded into memory.\n \"\"\"\n\n # check if this key should be in memory\n key_should_be_in_memory = (self.hdf5_cache_mode in [\"all\", \"low_dim\"])\n if key_should_be_in_memory:\n # if key is an observation, it may not be in memory\n if '/' in key:\n key1, key2 = key.split('/')\n assert(key1 in ['obs', 'next_obs'])\n if key2 not in self.obs_keys_in_memory:\n key_should_be_in_memory = False\n\n if key_should_be_in_memory:\n # read cache\n if '/' in key:\n key1, key2 = key.split('/')\n assert(key1 in ['obs', 'next_obs'])\n ret = self.hdf5_cache[ep][key1][key2]\n else:\n ret = self.hdf5_cache[ep][key]\n else:\n # read from file\n hd5key = \"data/{}/{}\".format(ep, key)\n ret = self.hdf5_file[hd5key]\n return ret\n\n def __getitem__(self, index):\n \"\"\"\n Fetch dataset sequence @index (inferred through internal index map), using the getitem_cache if available.\n \"\"\"\n if self.hdf5_cache_mode == \"all\":\n return self.getitem_cache[index]\n return self.get_item(index)\n\n def get_item(self, index):\n \"\"\"\n Main implementation of getitem when not using cache.\n \"\"\"\n\n demo_id = self._index_to_demo_id[index]\n demo_start_index = self._demo_id_to_start_indices[demo_id]\n demo_length = self._demo_id_to_demo_length[demo_id]\n\n # start at offset index if not padding for frame stacking\n demo_index_offset = 0 if self.pad_frame_stack else (self.n_frame_stack - 1)\n index_in_demo = index - demo_start_index + demo_index_offset\n\n # end at offset index if not padding for seq length\n demo_length_offset = 0 if self.pad_seq_length else (self.seq_length - 1)\n end_index_in_demo = demo_length - demo_length_offset\n\n meta = self.get_dataset_sequence_from_demo(\n demo_id,\n index_in_demo=index_in_demo,\n keys=self.dataset_keys,\n seq_length=self.seq_length\n )\n\n # determine goal index\n goal_index = None\n if self.goal_mode == \"last\":\n goal_index = end_index_in_demo - 1\n\n meta[\"obs\"] = self.get_obs_sequence_from_demo(\n demo_id,\n index_in_demo=index_in_demo,\n keys=self.obs_keys,\n num_frames_to_stack=self.n_frame_stack - 1,\n seq_length=self.seq_length,\n prefix=\"obs\"\n )\n if self.hdf5_normalize_obs:\n meta[\"obs\"] = ObsUtils.normalize_obs(meta[\"obs\"], obs_normalization_stats=self.obs_normalization_stats)\n\n if self.load_next_obs:\n meta[\"next_obs\"] = self.get_obs_sequence_from_demo(\n demo_id,\n index_in_demo=index_in_demo,\n keys=self.obs_keys,\n num_frames_to_stack=self.n_frame_stack - 1,\n seq_length=self.seq_length,\n prefix=\"next_obs\"\n )\n if self.hdf5_normalize_obs:\n meta[\"next_obs\"] = ObsUtils.normalize_obs(meta[\"next_obs\"], obs_normalization_stats=self.obs_normalization_stats)\n\n if goal_index is not None:\n goal = self.get_obs_sequence_from_demo(\n demo_id,\n index_in_demo=goal_index,\n keys=self.obs_keys,\n num_frames_to_stack=0,\n seq_length=1,\n prefix=\"next_obs\",\n )\n if self.hdf5_normalize_obs:\n goal = ObsUtils.normalize_obs(goal, obs_normalization_stats=self.obs_normalization_stats)\n meta[\"goal_obs\"] = {k: goal[k][0] for k in goal} # remove sequence dimension for goal\n\n return meta\n\n def get_sequence_from_demo(self, demo_id, index_in_demo, keys, num_frames_to_stack=0, seq_length=1):\n \"\"\"\n Extract a (sub)sequence of data items from a demo given the @keys of the items.\n\n Args:\n demo_id (str): id of the demo, e.g., demo_0\n index_in_demo (int): beginning index of the sequence wrt the demo\n keys (tuple): list of keys to extract\n num_frames_to_stack (int): numbers of frame to stack. Seq gets prepended with repeated items if out of range\n seq_length (int): sequence length to extract. Seq gets post-pended with repeated items if out of range\n\n Returns:\n a dictionary of extracted items.\n \"\"\"\n assert num_frames_to_stack >= 0\n assert seq_length >= 1\n\n demo_length = self._demo_id_to_demo_length[demo_id]\n assert index_in_demo < demo_length\n\n # determine begin and end of sequence\n seq_begin_index = max(0, index_in_demo - num_frames_to_stack)\n seq_end_index = min(demo_length, index_in_demo + seq_length)\n\n # determine sequence padding\n seq_begin_pad = max(0, num_frames_to_stack - index_in_demo) # pad for frame stacking\n seq_end_pad = max(0, index_in_demo + seq_length - demo_length) # pad for sequence length\n\n # make sure we are not padding if specified.\n if not self.pad_frame_stack:\n assert seq_begin_pad == 0\n if not self.pad_seq_length:\n assert seq_end_pad == 0\n\n # fetch observation from the dataset file\n seq = dict()\n for k in keys:\n data = self.get_dataset_for_ep(demo_id, k)\n seq[k] = data[seq_begin_index: seq_end_index].astype(\"float32\")\n\n seq = TensorUtils.pad_sequence(seq, padding=(seq_begin_pad, seq_end_pad), pad_same=True)\n pad_mask = np.array([0] * seq_begin_pad + [1] * (seq_end_index - seq_begin_index) + [0] * seq_end_pad)\n pad_mask = pad_mask[:, None].astype(np.bool)\n\n return seq, pad_mask\n\n def get_obs_sequence_from_demo(self, demo_id, index_in_demo, keys, num_frames_to_stack=0, seq_length=1, prefix=\"obs\"):\n \"\"\"\n Extract a (sub)sequence of observation items from a demo given the @keys of the items.\n\n Args:\n demo_id (str): id of the demo, e.g., demo_0\n index_in_demo (int): beginning index of the sequence wrt the demo\n keys (tuple): list of keys to extract\n num_frames_to_stack (int): numbers of frame to stack. Seq gets prepended with repeated items if out of range\n seq_length (int): sequence length to extract. Seq gets post-pended with repeated items if out of range\n prefix (str): one of \"obs\", \"next_obs\"\n\n Returns:\n a dictionary of extracted items.\n \"\"\"\n obs, pad_mask = self.get_sequence_from_demo(\n demo_id,\n index_in_demo=index_in_demo,\n keys=tuple('{}/{}'.format(prefix, k) for k in keys),\n num_frames_to_stack=num_frames_to_stack,\n seq_length=seq_length,\n )\n obs = {k.split('/')[1]: obs[k] for k in obs} # strip the prefix\n if self.get_pad_mask:\n obs[\"pad_mask\"] = pad_mask\n\n # prepare image observations from dataset\n return ObsUtils.process_obs_dict(obs)\n\n def get_dataset_sequence_from_demo(self, demo_id, index_in_demo, keys, seq_length=1):\n \"\"\"\n Extract a (sub)sequence of dataset items from a demo given the @keys of the items (e.g., states, actions).\n \n Args:\n demo_id (str): id of the demo, e.g., demo_0\n index_in_demo (int): beginning index of the sequence wrt the demo\n keys (tuple): list of keys to extract\n seq_length (int): sequence length to extract. Seq gets post-pended with repeated items if out of range\n\n Returns:\n a dictionary of extracted items.\n \"\"\"\n data, pad_mask = self.get_sequence_from_demo(\n demo_id,\n index_in_demo=index_in_demo,\n keys=keys,\n num_frames_to_stack=0, # don't frame stack for meta keys\n seq_length=seq_length,\n )\n if self.get_pad_mask:\n data[\"pad_mask\"] = pad_mask\n return data\n\n def get_trajectory_at_index(self, index):\n \"\"\"\n Method provided as a utility to get an entire trajectory, given\n the corresponding @index.\n \"\"\"\n demo_id = self.demos[index]\n demo_length = self._demo_id_to_demo_length[demo_id]\n\n meta = self.get_dataset_sequence_from_demo(\n demo_id,\n index_in_demo=0,\n keys=self.dataset_keys,\n seq_length=demo_length\n )\n meta[\"obs\"] = self.get_obs_sequence_from_demo(\n demo_id,\n index_in_demo=0,\n keys=self.obs_keys,\n seq_length=demo_length\n )\n if self.load_next_obs:\n meta[\"next_obs\"] = self.get_obs_sequence_from_demo(\n demo_id,\n index_in_demo=0,\n keys=self.obs_keys,\n seq_length=demo_length,\n prefix=\"next_obs\"\n )\n\n meta[\"ep\"] = demo_id\n return meta\n\n def get_dataset_sampler(self):\n \"\"\"\n Return instance of torch.utils.data.Sampler or None. Allows\n for dataset to define custom sampling logic, such as\n re-weighting the probability of samples being drawn.\n See the `train` function in scripts/train.py, and torch\n `DataLoader` documentation, for more info.\n \"\"\"\n return None\n" ]
[ [ "numpy.array", "numpy.zeros", "numpy.sqrt" ] ]
foobarbecue/meshcut_python3
[ "222bd484efd69246d6c45a2d73ea17fc7410eb6f" ]
[ "meshcut.py" ]
[ "\"\"\"\nFunctions to slice a mesh. For now, computes planar cross-section\n\"\"\"\nimport numpy as np\nimport numpy.linalg as la\nimport collections\n\n# ---- Geometry datastructures\n\n\ndef make_edge(v1, v2):\n \"\"\"\n We store edges as tuple where the vertex indices are sorted (so\n the edge going from v1 to v2 and v2 to v1 is the same)\n \"\"\"\n return tuple(sorted((v1, v2)))\n\n\nclass TriangleMesh(object):\n def __init__(self, verts, tris):\n \"\"\"\n Args:\n verts: The 3D vertex positions\n tris: A list of triplet containing vertex indices for each triangle\n \"\"\"\n self.verts = verts\n # For each edge, contains the list of triangles it belongs to\n # If the mesh is closed, each edge belongs to 2 triangles\n self.edges_to_tris = collections.defaultdict(lambda: [])\n # For each triangle, contains the edges it contains\n self.tris_to_edges = {}\n # For each vertex, the list of triangles it belongs to\n self.verts_to_tris = collections.defaultdict(lambda: [])\n\n self.tris = tris\n\n # Fill data structures\n for tid, f in enumerate(tris):\n tri_edges = []\n for i in range(3):\n v1 = f[i]\n v2 = f[(i + 1) % 3]\n e = make_edge(v1, v2)\n self.edges_to_tris[e].append(tid)\n tri_edges.append(e)\n self.verts_to_tris[f[i]].append(tid)\n self.tris_to_edges[tid] = tri_edges\n\n # Sanity check : max 2 faces per edge\n for e, tris in self.edges_to_tris.items():\n try:\n assert len(tris) <= 2\n except AssertionError:\n print('{} had more than 2 tris'.format(e))\n\n def edges_for_triangle(self, tidx):\n \"\"\"Returns the edges forming triangle with given index\"\"\"\n return self.tris_to_edges[tidx]\n\n def triangles_for_edge(self, edge):\n return self.edges_to_tris[edge]\n\n def triangles_for_vert(self, vidx):\n \"\"\"Returns the triangles `vidx` belongs to\"\"\"\n return self.verts_to_tris[vidx]\n\n\nclass Plane(object):\n def __init__(self, orig, normal):\n self.orig = orig\n self.n = normal / la.norm(normal)\n\n def __str__(self):\n return 'plane(o=%s, n=%s)' % (self.orig, self.n)\n\n\ndef point_to_plane_dist(p, plane):\n return np.dot((p - plane.orig), plane.n)\n\n\ndef triangle_intersects_plane(mesh, tid, plane):\n \"\"\"\n Returns true if the given triangle is cut by the plane. This will return\n false if a single vertex of the triangle lies on the plane\n \"\"\"\n dists = [point_to_plane_dist(mesh.verts[vid], plane)\n for vid in mesh.tris[tid]]\n side = np.sign(dists)\n return not (side[0] == side[1] == side[2])\n\n\n# ---- Planar cross-section\n\nINTERSECT_EDGE = 0\nINTERSECT_VERTEX = 1\n\n\ndef compute_triangle_plane_intersections(mesh, tid, plane, dist_tol):\n \"\"\"\n Compute the intersection between a triangle and a plane\n\n Returns a list of intersections in the form\n (INTERSECT_EDGE, <intersection point>, <edge>) for edges intersection\n (INTERSECT_VERTEX, <intersection point>, <vertex index) for vertices\n\n This return between 0 and 2 intersections :\n - 0 : the plane does not intersect the plane\n - 1 : one of the triangle's vertices lies on the plane (so it just\n \"touches\" the plane without really intersecting)\n - 2 : the plane slice the triangle in two parts (either vertex-edge,\n vertex-vertex or edge-edge)\n \"\"\"\n # TODO: Use a distance cache\n dists = {vid: point_to_plane_dist(mesh.verts[vid], plane)\n for vid in mesh.tris[tid]}\n # TODO: Use an edge intersection cache (we currently compute each edge\n # intersection twice : once for each tri)\n\n # This is to avoid registering the same vertex intersection twice\n # from two different edges\n vert_intersect = {vid: False for vid in dists.keys()}\n\n # Iterate through the edges, cutting the ones that intersect\n intersections = []\n for e in mesh.edges_for_triangle(tid):\n v1 = mesh.verts[e[0]]\n d1 = dists[e[0]]\n v2 = mesh.verts[e[1]]\n d2 = dists[e[1]]\n\n if np.fabs(d1) < dist_tol:\n # Avoid creating the vertex intersection twice\n if not vert_intersect[e[0]]:\n # point on plane\n intersections.append((INTERSECT_VERTEX, v1, e[0]))\n vert_intersect[e[0]] = True\n if np.fabs(d2) < dist_tol:\n if not vert_intersect[e[1]]:\n # point on plane\n intersections.append((INTERSECT_VERTEX, v2, e[1]))\n vert_intersect[e[1]] = True\n if d1 * d2 < 0:\n # Due to numerical accuracy, we could have both a vertex intersect\n # and an edge intersect on the same vertex, which is impossible\n if not vert_intersect[e[0]] and not vert_intersect[e[1]]:\n # intersection factor (between 0 and 1)\n # here is a nice drawing :\n # https://ravehgonen.files.wordpress.com/2013/02/slide8.png\n # keep in mind d1, d2 are *signed* distances (=> d1 - d2)\n s = d1 / (d1 - d2)\n vdir = v2 - v1\n ipos = v1 + vdir * s\n intersections.append((INTERSECT_EDGE, ipos, e))\n\n return intersections\n\n\ndef get_next_triangle(mesh, from_tid, plane, intersection, dist_tol):\n \"\"\"\n Returns the next triangle to visit given the intersection and\n the triangle we're coming from\n\n We look for a triangle that is cut by the plane (2 intersections) as\n opposed to one that only touch the plane (1 vertex intersection)\n \"\"\"\n if intersection[0] == INTERSECT_EDGE:\n tris = mesh.triangles_for_edge(intersection[2])\n elif intersection[0] == INTERSECT_VERTEX:\n tris = mesh.triangles_for_vert(intersection[2])\n\n for tid in tris:\n if tid != from_tid:\n intersections = compute_triangle_plane_intersections(\n mesh, tid, plane, dist_tol)\n if len(intersections) == 2:\n return tid, intersections\n return None, []\n\n\ndef cross_section_mesh(mesh, plane, dist_tol=1e-8):\n \"\"\"\n Args:\n mesh: A geom.TriangleMesh instance\n plane: The cut plane : geom.Plane instance\n dist_tol: If two points are closer than dist_tol, they are considered\n the same\n \"\"\"\n # Set of all triangles\n T = set(range(len(mesh.tris)))\n # List of all cross-section polylines\n P = []\n\n while len(T) > 0:\n tid = T.pop()\n intersections = compute_triangle_plane_intersections(\n mesh, tid, plane, dist_tol)\n\n if len(intersections) == 2:\n # We found a starting triangle for a new polyline\n p = []\n\n # We can start in either direction (intersections[0] or [1]), this\n # is arbitrary for the first triangle\n p.append(intersections[0][1])\n tid, intersections = get_next_triangle(mesh, tid, plane,\n intersections[0], dist_tol)\n\n # Loop until we have explored all the triangles for the current\n # polyline\n while tid in T:\n T.remove(tid)\n\n # get_next_triangle returns triangles that our plane actually\n # intersects (as opposed to touching only a single vertex),\n # hence the assert\n assert len(intersections) == 2\n # Of the two returned intersections, one should have the\n # intersection point equal to p[-1]\n if la.norm(intersections[0][1] - p[-1]) < dist_tol:\n intersect = intersections[1]\n else:\n assert la.norm(intersections[1][1] - p[-1]) < dist_tol, \\\n '%s not close to %s' % (str(p[-1]), str(intersections))\n intersect = intersections[0]\n\n p.append(intersect[1])\n tid, intersections = get_next_triangle(mesh, tid, plane,\n intersect, dist_tol)\n\n if tid is None:\n print('Degenerate case (probably non-closed mesh)')\n break\n\n P.append(np.array(p))\n\n return P\n\n\ndef cross_section(verts, tris, plane_orig, plane_normal, **kwargs):\n \"\"\"\n Compute the planar cross section of a mesh. This returns a set of\n polylines.\n\n Args:\n verts: Nx3 array of the vertices position\n faces: Nx3 array of the faces, containing vertex indices\n plane_orig: 3-vector indicating the plane origin\n plane_normal: 3-vector indicating the plane normal\n\n Returns:\n A list of Nx3 arrays, each representing a disconnected portion\n of the cross section as a polyline\n \"\"\"\n mesh = TriangleMesh(verts, tris)\n plane = Plane(plane_orig, plane_normal)\n return cross_section_mesh(mesh, plane, **kwargs)\n" ]
[ [ "numpy.array", "numpy.dot", "numpy.linalg.norm", "numpy.sign", "numpy.fabs" ] ]
esgyu/AI_PROJECT_SERVER
[ "b0c1d1ac44bd88d5a32920065bfbc3844c649fbc" ]
[ "src/friends_dataprep.py" ]
[ "'''\nThis Program is a Face Identification model,\nI built to detect my friends faces. The system uses\nMTCNN to detect faces, then we use the FaceNet model\nand an SVM to predict the identity of the faces\n'''\nfrom os import listdir\nfrom os.path import isdir\nfrom matplotlib import pyplot\nfrom keras.models import load_model\nimport numpy as np\nfrom PIL import Image\nfrom mtcnn.mtcnn import MTCNN\nmodel = load_model('facenet_keras.h5')\nprint(model.inputs)\nprint(model.outputs)\n\n\ndef extract_face(filename, req_size=(160, 160)):\n # load image from file\n image = Image.open(filename)\n image = image.convert('RGB')\n # convert to array\n pixels = pyplot.imread(filename)\n # create the detector, using default weights\n detector = MTCNN()\n # detect faces in the image\n results = detector.detect_faces(pixels)\n\n print(filename)\n if results == []:\n return np.zeros((160, 160, 3))\n # extract the bounding box from the first face\n x1, y1, width, height = results[0]['box']\n # take abs value to avoid negatives\n x1, y1 = abs(x1), abs(y1)\n x2, y2 = x1 + width, y1 + height\n # extract the face\n face = pixels[y1:y2, x1:x2]\n # resize the face to required size by model\n image = Image.fromarray(face)\n image = image.resize((160, 160))\n print(\"image before np\", image)\n face_array = np.asarray(image)\n print(\"extract face produces\", face_array.shape)\n return face_array\n\n\n#plot all the faces in the images in this directory\ndef plot_images(folder, plot_h, plot_w):\n i = 1\n # enumerate files\n for filename in listdir(folder):\n # add image file to path\n path = folder + filename\n # call get face\n face = extract_face(path)\n if face != []:\n print(i, face.shape)\n # plot\n pyplot.subplot(plot_h, plot_w, i)\n pyplot.axis('off')\n pyplot.imshow(face)\n i += 1\n pyplot.show()\n\n#load all the faces from images in this directory\ndef load_faces(direc):\n faces = list()\n #enumerate files\n for filename in listdir(direc):\n # add image file to path\n path = direc + filename\n # call get face\n face = extract_face(path)\n faces.append(face)\n \n return faces\n\n#To run over train and val directories\ndef load_dataset(direc):\n x, y = list(), list()\n #for every class directory in this train/val directory\n for subdir in listdir(direc):\n\n path = direc + subdir + '/'\n #if it is a file and not a dir then skip\n if not isdir(path):\n continue\n #load all faces in the class directory (subdir)\n faces = load_faces(path)\n #create labels\n labels = [subdir for i in range(len(faces))]\n #summarize progress\n print('loaded %d examples for class: %s' %(len(faces), subdir))\n print(faces)\n x.extend(faces)\n y.extend(labels)\n return np.asarray(x), np.asarray(y)\n\ntrainX, trainy = load_dataset('FriendsDataset/train/')\nprint(trainX.shape, trainy.shape)\nvalX, valy = load_dataset('FriendsDataset/val/')\nprint(trainX.shape, trainy.shape)\nnp.savez_compressed('FriendsDataset.npz', trainX, trainy, valX, valy)" ]
[ [ "numpy.asarray", "numpy.zeros", "numpy.savez_compressed", "matplotlib.pyplot.axis", "matplotlib.pyplot.show", "matplotlib.pyplot.imshow", "matplotlib.pyplot.imread", "matplotlib.pyplot.subplot" ] ]
profxj/mwh_analysis
[ "bb3854ee9e6deee7be270967fe5a7e15a7636819" ]
[ "mhw_analysis/scripts/show_system.py" ]
[ "\"\"\" Script to show an MHW System\"\"\"\n\nfrom IPython import embed\n\ndef parser(options=None):\n import argparse\n # Parse\n parser = argparse.ArgumentParser(description='Show an MHW System')\n parser.add_argument(\"dataset\", type=str, help=\"MHW System set: orig, vary\")\n parser.add_argument(\"plot_type\", type=str, help=\"Plot type: first_day\")\n parser.add_argument(\"-m\", \"--maskid\", type=int, help=\"Mask Id\")\n\n if options is None:\n pargs = parser.parse_args()\n else:\n pargs = parser.parse_args(options)\n return pargs\n\n\ndef main(pargs):\n \"\"\" Run\n \"\"\"\n import numpy as np\n import os\n import warnings\n import datetime\n from matplotlib import pyplot as plt\n\n import iris\n import iris.plot as iplot\n\n from oceanpy.sst import utils as sst_utils\n from mhw_analysis.systems import io as mhw_sys_io\n\n import cartopy.crs as ccrs\n from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER\n\n vary = False\n if pargs.dataset == 'orig':\n vary=False\n elif pargs.dataset == 'vary':\n vary=True\n else:\n raise IOError(\"Bad flavor!\")\n\n # Load the systems\n mhw_systems = mhw_sys_io.load_systems(vary=vary)\n\n # Grab the system\n if pargs.maskid is not None:\n idx = np.where(mhw_systems.mask_Id == pargs.maskid)[0][0]\n mhw_system = mhw_systems.iloc[idx]\n else:\n raise IOError(\"Must use --maskid for now\")\n\n # Date\n start_date = datetime.date.fromordinal(datetime.date(1982,1,1).toordinal() + mhw_system.zboxmin)\n print(\"Start date: {}\".format(start_date))\n\n # Grab the mask (this can get big!)\n mask_cube = mhw_sys_io.load_mask_from_system(mhw_system, vary=vary)\n\n # Plot\n if pargs.plot_type == 'first_day':\n sys_idx = mask_cube.data[:] == mhw_system.mask_Id\n mask_cube.data[np.logical_not(sys_idx)] = 0\n mask_cube.data[sys_idx] = 1\n # Date\n for kk in range(mask_cube.data.shape[2]):\n mask_cube.data[:, :, kk] *= kk + 1\n #\n mask_cube.data[mask_cube.data == 0] = 9999999\n tstart = np.min(mask_cube.data, axis=2).astype(float)\n tstart[tstart == 9999999] = np.nan\n # Cube me\n lat_coord, lon_coord = sst_utils.noaa_oi_coords(as_iris_coord=True)\n tstart_cube = iris.cube.Cube(tstart, var_name='tstart',\n dim_coords_and_dims=[(lat_coord, 0),\n (lon_coord, 1)])\n # Plot me\n # First day\n fig = plt.figure(figsize=(10, 6))\n plt.clf()\n\n proj = ccrs.PlateCarree(central_longitude=-180.0)\n ax = plt.gca(projection=proj)\n\n # Pacific events\n # Draw the contour with 25 levels.\n cm = plt.get_cmap('rainbow')\n\n cplt = iplot.contourf(tstart_cube, 20, cmap=cm) # , vmin=0, vmax=20)#, 5)\n cb = plt.colorbar(cplt, fraction=0.020, pad=0.04)\n cb.set_label('t_start (Days since )')\n\n # Add coastlines to the map created by contourf.\n plt.gca().coastlines()\n\n # Gridlines\n # https://stackoverflow.com/questions/49956355/adding-gridlines-using-cartopy\n gl = ax.gridlines(crs=ccrs.PlateCarree(), linewidth=2, color='black', alpha=0.5,\n linestyle='--', draw_labels=True)\n gl.xlabels_top = False\n gl.ylabels_left = True\n gl.ylabels_right = False\n gl.xlines = True\n gl.xformatter = LONGITUDE_FORMATTER\n gl.yformatter = LATITUDE_FORMATTER\n gl.xlabel_style = {'color': 'black', 'weight': 'bold'}\n gl.ylabel_style = {'color': 'black', 'weight': 'bold'}\n # gl.xlocator = mticker.FixedLocator([-180., -170., -160, -150., -140, -120, -60, -20.])\n # gl.ylocator = mticker.FixedLocator([30., 40., 50., 60.])\n\n plt.show()\n\n\n\n\n" ]
[ [ "numpy.logical_not", "matplotlib.pyplot.colorbar", "matplotlib.pyplot.get_cmap", "numpy.min", "matplotlib.pyplot.figure", "numpy.where", "matplotlib.pyplot.show", "matplotlib.pyplot.clf", "matplotlib.pyplot.gca" ] ]
allrod5/extra-trees
[ "ba7d94bd8a77daaa2357241f343c3b571a900131" ]
[ "tests/tree/test_tree.py" ]
[ "import numpy as np\n\nfrom extra_trees.tree.tree import ExtraTreeClassifier\nfrom extra_trees.tree.tree import ExtraTreeRegressor\n\n\ndef test_extra_tree_regressor(circles):\n X, y = circles\n indices = np.random.permutation(len(X.data))\n X_train = X[indices[:-10]]\n y_train = y[indices[:-10]]\n X_test = X[indices[-10:]]\n y_test = y[indices[-10:]]\n\n regressor = ExtraTreeRegressor()\n regressor.fit(X_train, y_train)\n predictions = regressor.predict(X_test)\n\n assert len(predictions) == len(y_test)\n\n\ndef test_extra_tree_classifier(circles):\n X, y = circles\n indices = np.random.permutation(len(X.data))\n X_train = X[indices[:-10]]\n y_train = y[indices[:-10]]\n X_test = X[indices[-10:]]\n y_test = y[indices[-10:]]\n\n classifier = ExtraTreeClassifier()\n classifier.fit(X_train, y_train)\n predictions = classifier.predict(X_test)\n\n assert len(predictions) == len(y_test)\n\n all_classes = np.unique(y)\n predicted_classes = np.unique(predictions)\n assert all(value in all_classes for value in predicted_classes)\n" ]
[ [ "numpy.unique" ] ]
arshadzahangirchowdhury/TomoEncoders
[ "9c2b15fd515d864079f198546821faee5d78df17" ]
[ "scratchpad/tomostream_roi/tomostream/tomostream3d.py" ]
[ "\n'''\nAdaptation of Tomostream orthoslice code for doing full 3d reconstructions\nThen to apply DL-based image processing or computer vision steps. \n\n\n'''\n\n\nimport pvaccess as pva\nimport numpy as np\nimport queue\nimport time\nimport h5py\nimport threading\nimport signal\n\nimport util\nimport log\nfrom epics import PV\nimport solver3d as solver\nfrom roi_utils.roi import load_seg_nn\n\n#CHECK#\n#to-do: Zliu: add path to model file here\ntorch_model_path = '/home/beams/TOMO/gas_hydrates_3dzoom_Dec2021/models/mdl-ep00230.pth'\ntorch_model_ADet = '/home/beams/TOMO/gas_hydrates_3dzoom_Dec2021/models/mdl-ep0200.pth'\n\nclass EncoderStream():\n \"\"\" Class for streaming reconstuction of ortho-slices on a machine with GPU.\n The class creates and broadcasts a pva type pv for concatenated reconstructions \n of (x,y,z) ortho-slices. Reconstructons are done by the FBP formula \n with direct discretization of the circular integral.\n Projection data is taken from the detector pv (pva type channel) \n and stored in a queue, dark and flat fields are taken from the pv broadcasted \n by the server on the detector machine (see tomoscan_stream.py from Tomoscan package).\n \n Parameters\n ----------\n args : dict\n Dictionary of pv variables.\n \"\"\"\n\n def __init__(self, pv_files, macros):\n \n log.setup_custom_logger(\"./encoderstream.log\")\n\n # init pvs\n self.config_pvs = {}\n self.control_pvs = {}\n self.pv_prefixes = {}\n\n\n if not isinstance(pv_files, list):\n pv_files = [pv_files]\n for pv_file in pv_files:\n self.read_pv_file(pv_file, macros)\n self.show_pvs()\n self.epics_pvs = {**self.config_pvs, **self.control_pvs}\n \n \n prefix = self.pv_prefixes['TomoScan']\n # tomoscan pvs\n self.epics_pvs['FrameType'] = PV(prefix + 'FrameType')\n self.epics_pvs['NumAngles'] = PV(prefix + 'NumAngles')\n \n self.epics_pvs['RotationStep'] = PV(prefix + 'RotationStep')\n \n # Replace PSOPVPrefix to link to check a TomoScanStream PV so it returns if scan IOC is down\n # self.epics_pvs['PSOPVPrefix'] = PV(prefix + 'PSOPVPrefix')\n # if self.epics_pvs['PSOPVPrefix'].get(as_string=True) == None:\n # log.error(\"TomoScan is down\")\n # log.error(\"Type exit() here and start TomoScan first\")\n # return\n \n # pva type channel for flat and dark fields pv broadcasted from the detector machine\n self.epics_pvs['PvaDark'] = pva.Channel(self.epics_pvs['DarkPVAName'].get())\n self.pva_dark = self.epics_pvs['PvaDark']\n self.epics_pvs['PvaFlat'] = pva.Channel(self.epics_pvs['FlatPVAName'].get())\n self.pva_flat = self.epics_pvs['PvaFlat'] \n self.epics_pvs['PvaTheta'] = pva.Channel(self.epics_pvs['ThetaPVAName'].get())\n self.pva_theta = self.epics_pvs['PvaTheta'] \n \n # pva type channel that contains projection and metadata\n image_pv_name = PV(self.epics_pvs['ImagePVAPName'].get()).get()\n self.epics_pvs['PvaPImage'] = pva.Channel(image_pv_name + 'Image')\n self.epics_pvs['PvaPDataType_RBV'] = pva.Channel(image_pv_name + 'DataType_RBV')\n self.pva_plugin_image = self.epics_pvs['PvaPImage']\n \n # create pva type pv for reconstrucion by copying metadata from the data pv, but replacing the sizes\n # This way the ADViewer (NDViewer) plugin can be also used for visualizing reconstructions.\n pva_image_data = self.pva_plugin_image.get('')\n pva_image_dict = pva_image_data.getStructureDict() \n self.pv_rec = pva.PvObject(pva_image_dict)\n \n # run server for reconstruction pv\n recon_pva_name = self.epics_pvs['ReconPVAName'].get()\n self.server_rec = pva.PvaServer(recon_pva_name, self.pv_rec)\n\n self.epics_pvs['StartRecon'].put('Done')\n self.epics_pvs['AbortRecon'].put('Yes')\n \n self.epics_pvs['StartRecon'].add_callback(self.pv_callback)\n self.epics_pvs['AbortRecon'].add_callback(self.pv_callback)\n self.slv = None\n \n # Set ^C, ^Z interrupt to abort the stream reconstruction\n signal.signal(signal.SIGINT, self.signal_handler)\n signal.signal(signal.SIGTSTP, self.signal_handler)\n\n\n # Start the watchdog timer thread\n thread = threading.Thread(target=self.reset_watchdog, args=(), daemon=True)\n thread.start()\n \n def pv_callback(self, pvname=None, value=None, char_value=None, **kw):\n \"\"\"Callback function that is called by pyEpics when certain EPICS PVs are changed\n\n The PVs that are handled are:\n\n - ``StartScan`` : Calls ``run_fly_scan()``\n\n - ``AbortScan`` : Calls ``abort_scan()``\n \n \"\"\"\n log.debug('pv_callback pvName=%s, value=%s, char_value=%s', pvname, value, char_value) \n if (pvname.find('StartRecon') != -1) and (value == 1):\n thread = threading.Thread(target=self.begin_stream, args=())\n thread.start() \n elif (pvname.find('AbortRecon') != -1) and (value == 0):\n thread = threading.Thread(target=self.abort_stream, args=())\n thread.start() \n\n def signal_handler(self, sig, frame):\n \"\"\"Calls abort_scan when ^C or ^Z is typed\"\"\"\n if (sig == signal.SIGINT) or (sig == signal.SIGTSTP):\n self.abort_stream() \n\n def reset_watchdog(self):\n \"\"\"Sets the watchdog timer to 5 every 3 seconds\"\"\"\n while True:\n self.epics_pvs['Watchdog'].put(5)\n time.sleep(3) \n \n def reinit_monitors(self):\n \"\"\"Reinit pv monitoring functions with updating data sizes\"\"\"\n\n log.warning('reinit monitors with updating data sizes')\n # stop monitors\n self.pva_dark.stopMonitor()\n self.pva_flat.stopMonitor()\n self.pva_plugin_image.stopMonitor() \n while(self.pva_dark.isMonitorActive() or \n self.pva_flat.isMonitorActive() or\n self.pva_plugin_image.isMonitorActive()):\n time.sleep(0.01)\n time.sleep(0.5)# need to wait for some reason? to check\n # take new data sizes\n pva_image_data = self.pva_plugin_image.get('')\n width = pva_image_data['dimension'][0]['size']\n height = pva_image_data['dimension'][1]['size']\n self.pv_rec['dimension'] = [{'size': 3*width, 'fullSize': 3*width, 'binning': 1},\n {'size': width, 'fullSize': width, 'binning': 1}]\n # self.theta = self.epics_pvs['ThetaArray'].get()[:self.epics_pvs['NumAngles'].get()] \n self.theta = self.pva_theta.get()['value']\n print(self.theta)\n #exit()\n # update limits on sliders\n # epics_pvs['OrthoXlimit'].put(width-1)\n # epics_pvs['OrthoYlimit'].put(width-1)\n # epics_pvs['OrthoZlimit'].put(height-1) \n \n ## create a queue to store projections\n # find max size of the queue, the size is equal to the number of angles in the interval of size pi\n if(max(self.theta)<180):\n buffer_size = len(self.theta)\n else: \n dtheta = self.theta[1]-self.theta[0]\n buffer_size = np.where(self.theta-self.theta[0]>180-dtheta)[0][0]\n if(buffer_size*width*height>pow(2,32)):\n log.error('buffer_size %s not enough memory', buffer_size)\n exit(0) \n # queue\n self.data_queue = queue.Queue(maxsize=buffer_size)\n# self.recon_queue = queue.Queue(maxsize=1)\n # take datatype \n datatype_list = self.epics_pvs['PvaPDataType_RBV'].get()['value'] \n self.datatype = datatype_list['choices'][datatype_list['index']].lower() \n \n # update parameters from in the GUI\n center = self.epics_pvs['Center'].get()\n idx = self.epics_pvs['OrthoX'].get()\n idy = self.epics_pvs['OrthoY'].get()\n idz = self.epics_pvs['OrthoZ'].get()\n rotx = self.epics_pvs['RotX'].get()\n roty = self.epics_pvs['RotY'].get()\n rotz = self.epics_pvs['RotZ'].get()\n fbpfilter = self.epics_pvs['FilterType'].get(as_string=True) \n dezinger = self.epics_pvs['Dezinger'].get(as_string=False) \n \n if hasattr(self,'width'): # update parameters for new sizes \n self.epics_pvs['Center'].put(center*width/self.width)\n self.epics_pvs['OrthoX'].put(int(idx*width/self.width))\n self.epics_pvs['OrthoY'].put(int(idy*width/self.width))\n self.epics_pvs['OrthoZ'].put(int(idz*width/self.width))\n\n ## create solver class on GPU \n self.slv = solver.Solver(buffer_size, width, height, \n center, idx, idy, idz, rotx, roty, rotz, fbpfilter, dezinger, self.datatype)\n\n self.slv.dn_model = load_seg_nn(torch_model_path)\n \n # temp buffers for storing data taken from the queue\n self.proj_buffer = np.zeros([buffer_size, width*height], dtype=self.datatype)\n self.theta_buffer = np.zeros(buffer_size, dtype='float32')\n self.ids_buffer = np.zeros(buffer_size, dtype='int32')\n\n self.width = width\n self.height = height\n self.buffer_size = buffer_size\n \n ## start PV monitoring\n # start monitoring dark and flat fields pv\n self.pva_dark.monitor(self.add_dark,'')\n self.pva_flat.monitor(self.add_flat,'') \n # start monitoring projection data \n self.pva_plugin_image.monitor(self.add_data,'')\n self.stream_is_running = True\n\n def add_data(self, pv):\n \"\"\"PV monitoring function for adding projection data and corresponding angle to the queue\"\"\"\n frame_type = self.epics_pvs['FrameType'].get(as_string=True)\n if(self.stream_is_running and self.epics_pvs['FrameType'].get(as_string=True) == 'Projection'):\n cur_id = pv['uniqueId'] # unique projection id for determining angles and places in the buffers \n # write projection, theta, and id into the queue\n data_item = {'projection': pv['value'][0][util.type_dict[self.datatype]],\n 'theta': self.theta[min(cur_id,len(self.theta)-1)],\n 'id': np.mod(cur_id, self.buffer_size)\n }\n if(not self.data_queue.full()):\n self.data_queue.put(data_item)\n else:\n log.warning(\"queue is full, skip frame\")\n# pass\n# log.info('id: %s type %s queue size %s', cur_id, frame_type, self.data_queue.qsize())\n\n def add_dark(self, pv):\n \"\"\"PV monitoring function for reading new dark fields from manually running pv server \n on the detector machine\"\"\"\n \n if(self.stream_is_running and len(pv['value'])==self.width*self.height): # if pv with dark field has cocrrect sizes\n data = pv['value'].reshape(self.height, self.width)\n self.slv.set_dark(data)\n print('Norm dark', np.linalg.norm(data))\n log.error('new dark fields acquired')\n\n \n def add_flat(self, pv):\n \"\"\"PV monitoring function for reading new flat fields from manually running pv server \n on the detector machine\"\"\"\n\n if(self.stream_is_running and len(pv['value'])==self.width*self.height): # if pv with flat has correct sizes\n data = pv['value'].reshape(self.height, self.width)\n self.slv.set_flat(data)\n print('Norm flat', np.linalg.norm(data))\n log.error('new flat fields acquired')\n \n def begin_stream(self):\n \"\"\"Run streaming reconstruction by sending new incoming projections from the queue to the solver class,\n and broadcasting the reconstruction result to a pv variable\n \"\"\"\n \n self.reinit_monitors()\n self.epics_pvs['ReconStatus'].put('Running')\n \n \n while(self.stream_is_running):\n # take parameters from the GUI \n center = self.epics_pvs['Center'].get()\n idx = self.epics_pvs['OrthoX'].get()\n idy = self.epics_pvs['OrthoY'].get()\n idz = self.epics_pvs['OrthoZ'].get()\n rotx = self.epics_pvs['RotX'].get()\n roty = self.epics_pvs['RotY'].get()\n rotz = self.epics_pvs['RotZ'].get()\n fbpfilter = self.epics_pvs['FilterType'].get(as_string=True)\n dezinger = self.epics_pvs['Dezinger'].get(as_string=False)\n # take items from the queue\n nitem = 0\n \n while ((not self.data_queue.empty()) and (nitem < self.buffer_size)):\n item = self.data_queue.get()\n # reinit if data sizes were updated (e.g. after data binning by ROI1)\n if(len(item['projection'])!=self.width*self.height):\n self.reinit_monitors()\n\n self.proj_buffer[nitem] = item['projection']\n self.theta_buffer[nitem] = item['theta']\n self.ids_buffer[nitem] = item['id'] \n nitem += 1\n \n if(nitem == 0):\n continue\n\n \n# log.info('center %s: idx, idy, idz: %s %s %s, rotx, roty, rotz: %s %s %s, filter: %s, dezinger: %s', center, idx, idy, idz, rotx, roty, rotz, fbpfilter, dezinger)\n \n # reconstruct on GPU\n util.tic()\n# log.info(\"DATA SHAPE: %s\"%str(self.proj_buffer[:nitem].shape))\n\n rec = self.slv.recon_optimized(\n self.proj_buffer[:nitem], self.theta_buffer[:nitem], self.ids_buffer[:nitem], center, idx, idy, idz, rotx, roty, rotz, fbpfilter, dezinger)\n self.epics_pvs['ReconTime'].put(util.toc())\n self.epics_pvs['BufferSize'].put(f'{nitem}/{self.buffer_size}') \n # write result to pv\n idz, idy, idx = self.slv.roi_pt\n rec[0:self.width,idx:idx+3] = np.nan\n rec[idy:idy+3,0:self.width] = np.nan\n rec[0:self.width,self.width+idx:self.width+idx+3] = np.nan\n rec[idz:idz+3,self.width:2*self.width] = np.nan\n rec[0:self.width,2*self.width+idy:2*self.width+idy+3] = np.nan\n rec[idz:idz+3,2*self.width:3*self.width] = np.nan\n self.pv_rec['value'] = ({'floatValue': rec.flatten()},) \n \n self.epics_pvs['OrthoX'].put(idx)\n self.epics_pvs['OrthoY'].put(idy)\n self.epics_pvs['OrthoZ'].put(idz)\n \n \n self.epics_pvs['StartRecon'].put('Done') \n self.epics_pvs['ReconStatus'].put('Stopped')\n \n def abort_stream(self):\n \"\"\"Aborts streaming that is running.\n \"\"\"\n self.epics_pvs['ReconStatus'].put('Aborting reconstruction')\n if(self.slv is not None):\n self.slv.free()\n self.stream_is_running = False\n\n def read_pv_file(self, pv_file_name, macros):\n \"\"\"Reads a file containing a list of EPICS PVs to be used by TomoScan.\n\n\n Parameters\n ----------\n pv_file_name : str\n Name of the file to read\n macros: dict\n Dictionary of macro substitution to perform when reading the file\n \"\"\"\n\n pv_file = open(pv_file_name)\n lines = pv_file.read()\n pv_file.close()\n lines = lines.splitlines()\n for line in lines:\n is_config_pv = True\n if line.find('#controlPV') != -1:\n line = line.replace('#controlPV', '')\n is_config_pv = False\n line = line.lstrip()\n # Skip lines starting with #\n if line.startswith('#'):\n continue\n # Skip blank lines\n if line == '':\n continue\n pvname = line\n # Do macro substitution on the pvName\n for key in macros:\n pvname = pvname.replace(key, macros[key])\n # Replace macros in dictionary key with nothing\n dictentry = line\n for key in macros:\n dictentry = dictentry.replace(key, '')\n epics_pv = PV(pvname)\n\n if is_config_pv:\n self.config_pvs[dictentry] = epics_pv\n else:\n self.control_pvs[dictentry] = epics_pv\n # if dictentry.find('PVAPName') != -1:\n # pvname = epics_pv.value\n # key = dictentry.replace('PVAPName', '')\n # self.control_pvs[key] = PV(pvname)\n if dictentry.find('PVName') != -1:\n pvname = epics_pv.value\n key = dictentry.replace('PVName', '')\n self.control_pvs[key] = PV(pvname)\n if dictentry.find('PVPrefix') != -1:\n pvprefix = epics_pv.value\n key = dictentry.replace('PVPrefix', '')\n self.pv_prefixes[key] = pvprefix\n \n def show_pvs(self):\n \"\"\"Prints the current values of all EPICS PVs in use.\n\n The values are printed in three sections:\n\n - config_pvs : The PVs that are part of the scan configuration and\n are saved by save_configuration()\n\n - control_pvs : The PVs that are used for EPICS control and status,\n but are not saved by save_configuration()\n\n - pv_prefixes : The prefixes for PVs that are used for the areaDetector camera,\n file plugin, etc.\n \"\"\"\n\n print('configPVS:')\n for config_pv in self.config_pvs:\n print(config_pv, ':', self.config_pvs[config_pv].get(as_string=True))\n\n print('')\n print('controlPVS:')\n for control_pv in self.control_pvs:\n print(control_pv, ':', self.control_pvs[control_pv].get(as_string=True))\n\n print('')\n print('pv_prefixes:')\n for pv_prefix in self.pv_prefixes:\n print(pv_prefix, ':', self.pv_prefixes[pv_prefix])\n" ]
[ [ "numpy.where", "numpy.linalg.norm", "numpy.mod", "numpy.zeros" ] ]
vivekmathema/peakonly
[ "4dfbd053dc14289b790b1320ea6e02db85a6a5d2" ]
[ "run_batch.py" ]
[ "import sys\nimport os\nimport torch\nfrom tqdm import tqdm\nfrom utils.roi import get_ROIs\nfrom utils.models import Classifier, Integrator\nfrom utils.matching import construct_mzregions, rt_grouping, align_component\nfrom utils.run_utils import find_mzML, classifier_prediction, border_prediction,\\\n correct_classification, border_correction, build_features, feature_collapsing\nfrom utils.postprocess import ResultTable\n\nif __name__ == '__main__':\n if len(sys.argv) != 5:\n print('''Run script in the following format:\n python3 run.py path_to_file delta_mz roi_minimum_points peak_minimum_points''')\n exit()\n path = sys.argv[1]\n delta_mz = float(sys.argv[2])\n required_points = int(sys.argv[3])\n peak_minimum_points = int(sys.argv[4])\n\n ### ROI detection ###\n # search .mzML files in directory\n files = find_mzML(path)\n print('In the directory {} found {} files'.format(path, len(files)))\n # get ROIs for every file\n ROIs = {}\n for file in files:\n ROIs[file] = get_ROIs(file, delta_mz, required_points)\n\n ### ROI alignment ###\n # construct mz regions\n mzregions = construct_mzregions(ROIs, delta_mz)\n # group ROIs in mz regions based on RT\n components = rt_grouping(mzregions)\n # component alignment\n aligned_components = []\n for component in components:\n aligned_components.append(align_component(component))\n\n ### Classification, integration and correction ###\n # load models\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n classify = Classifier()\n classify.load_state_dict(torch.load('data/Classifier', map_location=device))\n classify.to(device)\n classify.eval()\n integrate = Integrator()\n integrate.load_state_dict(torch.load('data/Integrator', map_location=device))\n integrate.to(device)\n integrate.eval()\n # run through components\n component_number = 0\n features = []\n for component in tqdm(aligned_components):\n # predict labels and correct them\n labels = {}\n for sample, roi in zip(component.samples, component.rois):\n labels[sample] = classifier_prediction(roi, classify, device)\n correct_classification(labels)\n # predict borders and correct them\n borders = {}\n to_delete = []\n for j, (sample, roi) in enumerate(zip(component.samples, component.rois)):\n if labels[sample] == 1:\n border = border_prediction(roi, integrate, device, peak_minimum_points)\n if len(border) == 0: # if no border were predicted\n to_delete.append(j)\n else:\n borders[sample] = border\n else:\n to_delete.append(j)\n\n if len(borders) > len(files) // 3: # enough rois contain a peak\n component.pop(to_delete) # delete ROIs which don't contain peaks\n border_correction(component, borders)\n features.extend(build_features(component, borders, component_number))\n component_number += 1\n features = feature_collapsing(features)\n print('total number of features: {}'.format(len(features)))\n # explicitly delete features which were found in not enough quantity of ROIs\n to_delete = []\n for i, feature in enumerate(features):\n if len(feature) <= len(files) // 3: # to do: adjustable parameter\n to_delete.append(i)\n for j in to_delete[::-1]:\n features.pop(j)\n print('total number of features: {}'.format(len(features)))\n\n ### Save all features to csv file (zero filling is missing now) ###\n table = ResultTable(files, features)\n table.fill_zeros(delta_mz)\n table.to_csv(os.path.join(path, 'resultTable.csv'))\n" ]
[ [ "torch.cuda.is_available", "torch.load" ] ]
kant/DL_Segmention_Template
[ "cd791d79fefb1f9a7ee4fbd691c09f1f23180a9a" ]
[ "experiments/data_loaders/standard_loader.py" ]
[ "\"\"\"\nCopyright (c) 2018. All rights reserved.\nCreated by Resnick Xing on 2018/5/10\n\"\"\"\nimport glob,cv2,numpy as np\nimport matplotlib.pyplot as plt\nfrom perception.bases.data_loader_base import DataLoaderBase\nfrom configs.utils.utils import write_hdf5,load_hdf5\n\nclass DataLoader(DataLoaderBase):\n\tdef __init__(self, config=None):\n\t\tsuper(DataLoader, self).__init__(config)\n\t\t# 路径(data_path)、图片类型(img_type)\n\t\tself.train_img_path=config.train_img_path\n\t\tself.train_groundtruth_path = config.train_groundtruth_path\n\t\tself.train_type=config.train_datatype\n\t\tself.val_img_path=config.val_img_path\n\t\tself.val_groundtruth_path=config.val_groundtruth_path\n\t\tself.val_type = config.val_datatype\n\n\t\t# 实验名称(exp_name)\n\t\tself.exp_name=config.exp_name\n\t\tself.hdf5_path=config.hdf5_path\n\t\tself.height=config.height\n\t\tself.width=config.width\n\t\tself.num_seg_class=config.seg_num\n\n\tdef _access_dataset(self,origin_path,groundtruth_path,datatype):\n\t\t\"\"\"\n\n\t\t:param origin_path: 原始图片路径(path for original image)\n\t\t:param groundtruth_path: GT图片路径(path for groundtruth image)\n\t\t:param datatype: 图片格式(dataType for origin and gt)\n\t\t:return: 张量类型(Tensor) imgs, groundTruth\n\t\t\"\"\"\n\t\torgList = glob.glob(origin_path+\"*.\"+datatype) #文件名列表 filename list\n\t\tgtList = glob.glob(groundtruth_path+\"*.\"+datatype)\n\n\t\tassert (len(orgList) == len(gtList)) # 原始图片和GT图片数量应当一致\n\n\t\timgs = np.empty((len(orgList), self.height, self.width, 3))\n\t\tgroundTruth = np.empty((len(gtList), self.num_seg_class, self.height, self.width))\n\n\t\tfor index in range(len(orgList)):\n\t\t\torgPath=orgList[index]\n\t\t\torgImg=plt.imread(orgPath)\n\t\t\timgs[index]=np.asarray(orgImg)\n\n\t\t\tfor no_seg in range(self.num_seg_class):\n\t\t\t\tgtPath=gtList[index]\n\t\t\t\tgtImg=plt.imread(gtPath,0)\n\t\t\t\tgroundTruth[index,no_seg]=np.asarray(gtImg)\n\t\tprint(\"[INFO] 读取数据...\")\n\t\tassert (np.max(groundTruth) == 255)\n\t\tassert (np.min(groundTruth) == 0)\n\t\treturn imgs,groundTruth\n\n\n\n\tdef prepare_dataset(self):\n\n\t\t# 训练图片汇成HDF5合集 preapare train_img/groundtruth.hdf5\n\t\timgs_train, groundTruth=self._access_dataset(self.train_img_path,self.train_groundtruth_path,self.train_type)\n\t\twrite_hdf5(imgs_train,self.hdf5_path+\"/train_img.hdf5\")\n\t\twrite_hdf5(groundTruth, self.hdf5_path+\"/train_groundtruth.hdf5\")\n\t\tprint(\"[INFO] 保存训练数据\")\n\t\t# 测试图片汇成HDF5合集 preapare val_img/groundtruth.hdf5\n\t\timgs_val, groundTruth = self._access_dataset(self.val_img_path, self.val_groundtruth_path, self.val_type)\n\t\twrite_hdf5(imgs_val, self.hdf5_path + \"/val_img.hdf5\")\n\t\twrite_hdf5(groundTruth, self.hdf5_path + \"/val_groundtruth.hdf5\")\n\t\tprint(\"[INFO] 保存验证数据\")\n\n\tdef get_train_data(self):\n\t\timgs_train=load_hdf5(self.hdf5_path+\"/train_img.hdf5\")\n\t\tgroundTruth=load_hdf5(self.hdf5_path+\"/train_groundtruth.hdf5\")\n\t\treturn imgs_train,groundTruth\n\n\tdef get_val_data(self):\n\t\timgs_val=load_hdf5(self.hdf5_path+\"/val_img.hdf5\")\n\t\tgroundTruth=load_hdf5(self.hdf5_path+\"/val_groundtruth.hdf5\")\n\t\treturn imgs_val,groundTruth" ]
[ [ "numpy.max", "numpy.min", "numpy.asarray", "matplotlib.pyplot.imread" ] ]
AlexBruBuxo/TFG--ASC-Deep-Learning
[ "589ec7179e99ddb27b1f0a77e084d556db9fc75f" ]
[ "venv/Lib/site-packages/librosa/feature/spectral.py" ]
[ "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"Spectral feature extraction\"\"\"\n\nimport numpy as np\nimport scipy\nimport scipy.signal\nimport scipy.fftpack\n\nfrom .. import util\nfrom .. import filters\nfrom ..util.exceptions import ParameterError\nfrom ..util.decorators import moved\n\nfrom ..core.time_frequency import fft_frequencies\nfrom ..core.audio import zero_crossings, to_mono\nfrom ..core.spectrum import power_to_db, _spectrogram\nfrom ..core.constantq import cqt, hybrid_cqt\nfrom ..core.pitch import estimate_tuning\n\n\n__all__ = ['spectral_centroid',\n 'spectral_bandwidth',\n 'spectral_contrast',\n 'spectral_rolloff',\n 'spectral_flatness',\n 'poly_features',\n 'rms',\n 'rmse',\n 'zero_crossing_rate',\n 'chroma_stft',\n 'chroma_cqt',\n 'chroma_cens',\n 'melspectrogram',\n 'mfcc',\n 'tonnetz']\n\n\n# -- Spectral features -- #\ndef spectral_centroid(y=None, sr=22050, S=None, n_fft=2048, hop_length=512,\n freq=None):\n '''Compute the spectral centroid.\n\n Each frame of a magnitude spectrogram is normalized and treated as a\n distribution over frequency bins, from which the mean (centroid) is\n extracted per frame.\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n audio time series\n\n sr : number > 0 [scalar]\n audio sampling rate of `y`\n\n S : np.ndarray [shape=(d, t)] or None\n (optional) spectrogram magnitude\n\n n_fft : int > 0 [scalar]\n FFT window size\n\n hop_length : int > 0 [scalar]\n hop length for STFT. See `librosa.core.stft` for details.\n\n freq : None or np.ndarray [shape=(d,) or shape=(d, t)]\n Center frequencies for spectrogram bins.\n If `None`, then FFT bin center frequencies are used.\n Otherwise, it can be a single array of `d` center frequencies,\n or a matrix of center frequencies as constructed by\n `librosa.core.ifgram`\n\n Returns\n -------\n centroid : np.ndarray [shape=(1, t)]\n centroid frequencies\n\n See Also\n --------\n librosa.core.stft\n Short-time Fourier Transform\n\n librosa.core.ifgram\n Instantaneous-frequency spectrogram\n\n Examples\n --------\n From time-series input:\n\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> cent = librosa.feature.spectral_centroid(y=y, sr=sr)\n >>> cent\n array([[ 4382.894, 626.588, ..., 5037.07 , 5413.398]])\n\n From spectrogram input:\n\n >>> S, phase = librosa.magphase(librosa.stft(y=y))\n >>> librosa.feature.spectral_centroid(S=S)\n array([[ 4382.894, 626.588, ..., 5037.07 , 5413.398]])\n\n Using variable bin center frequencies:\n\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> if_gram, D = librosa.ifgram(y)\n >>> librosa.feature.spectral_centroid(S=np.abs(D), freq=if_gram)\n array([[ 4420.719, 625.769, ..., 5011.86 , 5221.492]])\n\n Plot the result\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure()\n >>> plt.subplot(2, 1, 1)\n >>> plt.semilogy(cent.T, label='Spectral centroid')\n >>> plt.ylabel('Hz')\n >>> plt.xticks([])\n >>> plt.xlim([0, cent.shape[-1]])\n >>> plt.legend()\n >>> plt.subplot(2, 1, 2)\n >>> librosa.display.specshow(librosa.amplitude_to_db(S, ref=np.max),\n ... y_axis='log', x_axis='time')\n >>> plt.title('log Power spectrogram')\n >>> plt.tight_layout()\n '''\n\n S, n_fft = _spectrogram(y=y, S=S, n_fft=n_fft, hop_length=hop_length)\n\n if not np.isrealobj(S):\n raise ParameterError('Spectral centroid is only defined '\n 'with real-valued input')\n elif np.any(S < 0):\n raise ParameterError('Spectral centroid is only defined '\n 'with non-negative energies')\n\n # Compute the center frequencies of each bin\n if freq is None:\n freq = fft_frequencies(sr=sr, n_fft=n_fft)\n\n if freq.ndim == 1:\n freq = freq.reshape((-1, 1))\n\n # Column-normalize S\n return np.sum(freq * util.normalize(S, norm=1, axis=0),\n axis=0, keepdims=True)\n\n\ndef spectral_bandwidth(y=None, sr=22050, S=None, n_fft=2048, hop_length=512,\n freq=None, centroid=None, norm=True, p=2):\n '''Compute p'th-order spectral bandwidth:\n\n (sum_k S[k] * (freq[k] - centroid)**p)**(1/p)\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n audio time series\n\n sr : number > 0 [scalar]\n audio sampling rate of `y`\n\n S : np.ndarray [shape=(d, t)] or None\n (optional) spectrogram magnitude\n\n n_fft : int > 0 [scalar]\n FFT window size\n\n hop_length : int > 0 [scalar]\n hop length for STFT. See `librosa.core.stft` for details.\n\n freq : None or np.ndarray [shape=(d,) or shape=(d, t)]\n Center frequencies for spectrogram bins.\n If `None`, then FFT bin center frequencies are used.\n Otherwise, it can be a single array of `d` center frequencies,\n or a matrix of center frequencies as constructed by\n `librosa.core.ifgram`\n\n centroid : None or np.ndarray [shape=(1, t)]\n pre-computed centroid frequencies\n\n norm : bool\n Normalize per-frame spectral energy (sum to one)\n\n p : float > 0\n Power to raise deviation from spectral centroid.\n\n\n Returns\n -------\n bandwidth : np.ndarray [shape=(1, t)]\n frequency bandwidth for each frame\n\n\n Examples\n --------\n From time-series input\n\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> spec_bw = librosa.feature.spectral_bandwidth(y=y, sr=sr)\n >>> spec_bw\n array([[ 3379.878, 1429.486, ..., 3235.214, 3080.148]])\n\n From spectrogram input\n\n >>> S, phase = librosa.magphase(librosa.stft(y=y))\n >>> librosa.feature.spectral_bandwidth(S=S)\n array([[ 3379.878, 1429.486, ..., 3235.214, 3080.148]])\n\n Using variable bin center frequencies\n\n >>> if_gram, D = librosa.ifgram(y)\n >>> librosa.feature.spectral_bandwidth(S=np.abs(D), freq=if_gram)\n array([[ 3380.011, 1429.11 , ..., 3235.22 , 3080.148]])\n\n Plot the result\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure()\n >>> plt.subplot(2, 1, 1)\n >>> plt.semilogy(spec_bw.T, label='Spectral bandwidth')\n >>> plt.ylabel('Hz')\n >>> plt.xticks([])\n >>> plt.xlim([0, spec_bw.shape[-1]])\n >>> plt.legend()\n >>> plt.subplot(2, 1, 2)\n >>> librosa.display.specshow(librosa.amplitude_to_db(S, ref=np.max),\n ... y_axis='log', x_axis='time')\n >>> plt.title('log Power spectrogram')\n >>> plt.tight_layout()\n\n '''\n\n S, n_fft = _spectrogram(y=y, S=S, n_fft=n_fft, hop_length=hop_length)\n\n if not np.isrealobj(S):\n raise ParameterError('Spectral bandwidth is only defined '\n 'with real-valued input')\n elif np.any(S < 0):\n raise ParameterError('Spectral bandwidth is only defined '\n 'with non-negative energies')\n\n if centroid is None:\n centroid = spectral_centroid(y=y, sr=sr, S=S,\n n_fft=n_fft,\n hop_length=hop_length,\n freq=freq)\n\n # Compute the center frequencies of each bin\n if freq is None:\n freq = fft_frequencies(sr=sr, n_fft=n_fft)\n\n if freq.ndim == 1:\n deviation = np.abs(np.subtract.outer(freq, centroid[0]))\n else:\n deviation = np.abs(freq - centroid[0])\n\n # Column-normalize S\n if norm:\n S = util.normalize(S, norm=1, axis=0)\n\n return np.sum(S * deviation**p, axis=0, keepdims=True)**(1./p)\n\n\ndef spectral_contrast(y=None, sr=22050, S=None, n_fft=2048, hop_length=512,\n freq=None, fmin=200.0, n_bands=6, quantile=0.02,\n linear=False):\n '''Compute spectral contrast [1]_\n\n .. [1] Jiang, Dan-Ning, Lie Lu, Hong-Jiang Zhang, Jian-Hua Tao,\n and Lian-Hong Cai.\n \"Music type classification by spectral contrast feature.\"\n In Multimedia and Expo, 2002. ICME'02. Proceedings.\n 2002 IEEE International Conference on, vol. 1, pp. 113-116.\n IEEE, 2002.\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n audio time series\n\n sr : number > 0 [scalar]\n audio sampling rate of `y`\n\n S : np.ndarray [shape=(d, t)] or None\n (optional) spectrogram magnitude\n\n n_fft : int > 0 [scalar]\n FFT window size\n\n hop_length : int > 0 [scalar]\n hop length for STFT. See `librosa.core.stft` for details.\n\n freq : None or np.ndarray [shape=(d,)]\n Center frequencies for spectrogram bins.\n If `None`, then FFT bin center frequencies are used.\n Otherwise, it can be a single array of `d` center frequencies.\n\n fmin : float > 0\n Frequency cutoff for the first bin `[0, fmin]`\n Subsequent bins will cover `[fmin, 2*fmin]`, `[2*fmin, 4*fmin]`, etc.\n\n n_bands : int > 1\n number of frequency bands\n\n quantile : float in (0, 1)\n quantile for determining peaks and valleys\n\n linear : bool\n If `True`, return the linear difference of magnitudes:\n `peaks - valleys`.\n\n If `False`, return the logarithmic difference:\n `log(peaks) - log(valleys)`.\n\n\n Returns\n -------\n contrast : np.ndarray [shape=(n_bands + 1, t)]\n each row of spectral contrast values corresponds to a given\n octave-based frequency\n\n\n Examples\n --------\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> S = np.abs(librosa.stft(y))\n >>> contrast = librosa.feature.spectral_contrast(S=S, sr=sr)\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure()\n >>> plt.subplot(2, 1, 1)\n >>> librosa.display.specshow(librosa.amplitude_to_db(S,\n ... ref=np.max),\n ... y_axis='log')\n >>> plt.colorbar(format='%+2.0f dB')\n >>> plt.title('Power spectrogram')\n >>> plt.subplot(2, 1, 2)\n >>> librosa.display.specshow(contrast, x_axis='time')\n >>> plt.colorbar()\n >>> plt.ylabel('Frequency bands')\n >>> plt.title('Spectral contrast')\n >>> plt.tight_layout()\n '''\n\n S, n_fft = _spectrogram(y=y, S=S, n_fft=n_fft, hop_length=hop_length)\n\n # Compute the center frequencies of each bin\n if freq is None:\n freq = fft_frequencies(sr=sr, n_fft=n_fft)\n\n freq = np.atleast_1d(freq)\n\n if freq.ndim != 1 or len(freq) != S.shape[0]:\n raise ParameterError('freq.shape mismatch: expected '\n '({:d},)'.format(S.shape[0]))\n\n if n_bands < 1 or not isinstance(n_bands, int):\n raise ParameterError('n_bands must be a positive integer')\n\n if not 0.0 < quantile < 1.0:\n raise ParameterError('quantile must lie in the range (0, 1)')\n\n if fmin <= 0:\n raise ParameterError('fmin must be a positive number')\n\n octa = np.zeros(n_bands + 2)\n octa[1:] = fmin * (2.0**np.arange(0, n_bands + 1))\n\n if np.any(octa[:-1] >= 0.5 * sr):\n raise ParameterError('Frequency band exceeds Nyquist. '\n 'Reduce either fmin or n_bands.')\n\n valley = np.zeros((n_bands + 1, S.shape[1]))\n peak = np.zeros_like(valley)\n\n for k, (f_low, f_high) in enumerate(zip(octa[:-1], octa[1:])):\n current_band = np.logical_and(freq >= f_low, freq <= f_high)\n\n idx = np.flatnonzero(current_band)\n\n if k > 0:\n current_band[idx[0] - 1] = True\n\n if k == n_bands:\n current_band[idx[-1] + 1:] = True\n\n sub_band = S[current_band]\n\n if k < n_bands:\n sub_band = sub_band[:-1]\n\n # Always take at least one bin from each side\n idx = np.rint(quantile * np.sum(current_band))\n idx = int(np.maximum(idx, 1))\n\n sortedr = np.sort(sub_band, axis=0)\n\n valley[k] = np.mean(sortedr[:idx], axis=0)\n peak[k] = np.mean(sortedr[-idx:], axis=0)\n\n if linear:\n return peak - valley\n else:\n return power_to_db(peak) - power_to_db(valley)\n\n\ndef spectral_rolloff(y=None, sr=22050, S=None, n_fft=2048, hop_length=512,\n freq=None, roll_percent=0.85):\n '''Compute roll-off frequency.\n\n The roll-off frequency is defined for each frame as the center frequency\n for a spectrogram bin such that at least roll_percent (0.85 by default)\n of the energy of the spectrum in this frame is contained in this bin and\n the bins below. This can be used to, e.g., approximate the maximum (or\n minimum) frequency by setting roll_percent to a value close to 1 (or 0).\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n audio time series\n\n sr : number > 0 [scalar]\n audio sampling rate of `y`\n\n S : np.ndarray [shape=(d, t)] or None\n (optional) spectrogram magnitude\n\n n_fft : int > 0 [scalar]\n FFT window size\n\n hop_length : int > 0 [scalar]\n hop length for STFT. See `librosa.core.stft` for details.\n\n freq : None or np.ndarray [shape=(d,) or shape=(d, t)]\n Center frequencies for spectrogram bins.\n If `None`, then FFT bin center frequencies are used.\n Otherwise, it can be a single array of `d` center frequencies,\n\n .. note:: `freq` is assumed to be sorted in increasing order\n\n roll_percent : float [0 < roll_percent < 1]\n Roll-off percentage.\n\n Returns\n -------\n rolloff : np.ndarray [shape=(1, t)]\n roll-off frequency for each frame\n\n\n Examples\n --------\n From time-series input\n\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> # Approximate maximum frequencies with roll_percent=0.85 (default)\n >>> rolloff = librosa.feature.spectral_rolloff(y=y, sr=sr)\n >>> rolloff\n array([[ 8376.416, 968.994, ..., 8925.513, 9108.545]])\n >>> # Approximate minimum frequencies with roll_percent=0.1\n >>> rolloff = librosa.feature.spectral_rolloff(y=y, sr=sr, roll_percent=0.1)\n >>> rolloff\n array([[ 75.36621094, 64.59960938, 64.59960938, ..., 75.36621094,\n 75.36621094, 64.59960938]])\n\n\n From spectrogram input\n\n >>> S, phase = librosa.magphase(librosa.stft(y))\n >>> librosa.feature.spectral_rolloff(S=S, sr=sr)\n array([[ 8376.416, 968.994, ..., 8925.513, 9108.545]])\n\n >>> # With a higher roll percentage:\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> librosa.feature.spectral_rolloff(y=y, sr=sr, roll_percent=0.95)\n array([[ 10012.939, 3003.882, ..., 10034.473, 10077.539]])\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure()\n >>> plt.subplot(2, 1, 1)\n >>> plt.semilogy(rolloff.T, label='Roll-off frequency')\n >>> plt.ylabel('Hz')\n >>> plt.xticks([])\n >>> plt.xlim([0, rolloff.shape[-1]])\n >>> plt.legend()\n >>> plt.subplot(2, 1, 2)\n >>> librosa.display.specshow(librosa.amplitude_to_db(S, ref=np.max),\n ... y_axis='log', x_axis='time')\n >>> plt.title('log Power spectrogram')\n >>> plt.tight_layout()\n\n '''\n\n if not 0.0 < roll_percent < 1.0:\n raise ParameterError('roll_percent must lie in the range (0, 1)')\n\n S, n_fft = _spectrogram(y=y, S=S, n_fft=n_fft, hop_length=hop_length)\n\n if not np.isrealobj(S):\n raise ParameterError('Spectral rolloff is only defined '\n 'with real-valued input')\n elif np.any(S < 0):\n raise ParameterError('Spectral rolloff is only defined '\n 'with non-negative energies')\n\n # Compute the center frequencies of each bin\n if freq is None:\n freq = fft_frequencies(sr=sr, n_fft=n_fft)\n\n # Make sure that frequency can be broadcast\n if freq.ndim == 1:\n freq = freq.reshape((-1, 1))\n\n total_energy = np.cumsum(S, axis=0)\n\n threshold = roll_percent * total_energy[-1]\n\n ind = np.where(total_energy < threshold, np.nan, 1)\n\n return np.nanmin(ind * freq, axis=0, keepdims=True)\n\n\ndef spectral_flatness(y=None, S=None, n_fft=2048, hop_length=512,\n amin=1e-10, power=2.0):\n '''Compute spectral flatness\n\n Spectral flatness (or tonality coefficient) is a measure to\n quantify how much noise-like a sound is, as opposed to being\n tone-like [1]_. A high spectral flatness (closer to 1.0)\n indicates the spectrum is similar to white noise.\n It is often converted to decibel.\n\n .. [1] Dubnov, Shlomo \"Generalization of spectral flatness\n measure for non-gaussian linear processes\"\n IEEE Signal Processing Letters, 2004, Vol. 11.\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n audio time series\n\n S : np.ndarray [shape=(d, t)] or None\n (optional) pre-computed spectrogram magnitude\n\n n_fft : int > 0 [scalar]\n FFT window size\n\n hop_length : int > 0 [scalar]\n hop length for STFT. See `librosa.core.stft` for details.\n\n amin : float > 0 [scalar]\n minimum threshold for `S` (=added noise floor for numerical stability)\n\n power : float > 0 [scalar]\n Exponent for the magnitude spectrogram.\n e.g., 1 for energy, 2 for power, etc.\n Power spectrogram is usually used for computing spectral flatness.\n\n Returns\n -------\n flatness : np.ndarray [shape=(1, t)]\n spectral flatness for each frame.\n The returned value is in [0, 1] and often converted to dB scale.\n\n\n Examples\n --------\n From time-series input\n\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> flatness = librosa.feature.spectral_flatness(y=y)\n >>> flatness\n array([[ 1.00000e+00, 5.82299e-03, 5.64624e-04, ..., 9.99063e-01,\n 1.00000e+00, 1.00000e+00]], dtype=float32)\n\n From spectrogram input\n\n >>> S, phase = librosa.magphase(librosa.stft(y))\n >>> librosa.feature.spectral_flatness(S=S)\n array([[ 1.00000e+00, 5.82299e-03, 5.64624e-04, ..., 9.99063e-01,\n 1.00000e+00, 1.00000e+00]], dtype=float32)\n\n From power spectrogram input\n\n >>> S, phase = librosa.magphase(librosa.stft(y))\n >>> S_power = S ** 2\n >>> librosa.feature.spectral_flatness(S=S_power, power=1.0)\n array([[ 1.00000e+00, 5.82299e-03, 5.64624e-04, ..., 9.99063e-01,\n 1.00000e+00, 1.00000e+00]], dtype=float32)\n\n '''\n if amin <= 0:\n raise ParameterError('amin must be strictly positive')\n\n S, n_fft = _spectrogram(y=y, S=S, n_fft=n_fft, hop_length=hop_length,\n power=1.)\n\n if not np.isrealobj(S):\n raise ParameterError('Spectral flatness is only defined '\n 'with real-valued input')\n elif np.any(S < 0):\n raise ParameterError('Spectral flatness is only defined '\n 'with non-negative energies')\n\n S_thresh = np.maximum(amin, S ** power)\n gmean = np.exp(np.mean(np.log(S_thresh), axis=0, keepdims=True))\n amean = np.mean(S_thresh, axis=0, keepdims=True)\n return gmean / amean\n\n\ndef rms(y=None, S=None, frame_length=2048, hop_length=512,\n center=True, pad_mode='reflect'):\n '''Compute root-mean-square (RMS) value for each frame, either from the\n audio samples `y` or from a spectrogram `S`.\n\n Computing the RMS value from audio samples is faster as it doesn't require\n a STFT calculation. However, using a spectrogram will give a more accurate\n representation of energy over time because its frames can be windowed,\n thus prefer using `S` if it's already available.\n\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n (optional) audio time series. Required if `S` is not input.\n\n S : np.ndarray [shape=(d, t)] or None\n (optional) spectrogram magnitude. Required if `y` is not input.\n\n frame_length : int > 0 [scalar]\n length of analysis frame (in samples) for energy calculation\n\n hop_length : int > 0 [scalar]\n hop length for STFT. See `librosa.core.stft` for details.\n\n center : bool\n If `True` and operating on time-domain input (`y`), pad the signal\n by `frame_length//2` on either side.\n\n If operating on spectrogram input, this has no effect.\n\n pad_mode : str\n Padding mode for centered analysis. See `np.pad` for valid\n values.\n\n Returns\n -------\n rms : np.ndarray [shape=(1, t)]\n RMS value for each frame\n\n\n Examples\n --------\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> librosa.feature.rms(y=y)\n array([[ 0. , 0.056, ..., 0. , 0. ]], dtype=float32)\n\n Or from spectrogram input\n\n >>> S, phase = librosa.magphase(librosa.stft(y))\n >>> rms = librosa.feature.rms(S=S)\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure()\n >>> plt.subplot(2, 1, 1)\n >>> plt.semilogy(rms.T, label='RMS Energy')\n >>> plt.xticks([])\n >>> plt.xlim([0, rms.shape[-1]])\n >>> plt.legend(loc='best')\n >>> plt.subplot(2, 1, 2)\n >>> librosa.display.specshow(librosa.amplitude_to_db(S, ref=np.max),\n ... y_axis='log', x_axis='time')\n >>> plt.title('log Power spectrogram')\n >>> plt.tight_layout()\n\n Use a STFT window of constant ones and no frame centering to get consistent\n results with the RMS computed from the audio samples `y`\n\n >>> S = librosa.magphase(librosa.stft(y, window=np.ones, center=False))[0]\n >>> librosa.feature.rms(S=S)\n\n '''\n if y is not None and S is not None:\n raise ValueError('Either `y` or `S` should be input.')\n if y is not None:\n y = to_mono(y)\n if center:\n y = np.pad(y, int(frame_length // 2), mode=pad_mode)\n\n x = util.frame(y,\n frame_length=frame_length,\n hop_length=hop_length)\n elif S is not None:\n x, _ = _spectrogram(y=y, S=S,\n n_fft=frame_length,\n hop_length=hop_length)\n else:\n raise ValueError('Either `y` or `S` must be input.')\n return np.sqrt(np.mean(np.abs(x)**2, axis=0, keepdims=True))\n\n\nrmse = moved('librosa.feature.rmse', '0.6.3', '0.7.0')(rms)\n\n\ndef poly_features(y=None, sr=22050, S=None, n_fft=2048, hop_length=512,\n order=1, freq=None):\n '''Get coefficients of fitting an nth-order polynomial to the columns\n of a spectrogram.\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n audio time series\n\n sr : number > 0 [scalar]\n audio sampling rate of `y`\n\n S : np.ndarray [shape=(d, t)] or None\n (optional) spectrogram magnitude\n\n n_fft : int > 0 [scalar]\n FFT window size\n\n hop_length : int > 0 [scalar]\n hop length for STFT. See `librosa.core.stft` for details.\n\n order : int > 0\n order of the polynomial to fit\n\n freq : None or np.ndarray [shape=(d,) or shape=(d, t)]\n Center frequencies for spectrogram bins.\n If `None`, then FFT bin center frequencies are used.\n Otherwise, it can be a single array of `d` center frequencies,\n or a matrix of center frequencies as constructed by\n `librosa.core.ifgram`\n\n Returns\n -------\n coefficients : np.ndarray [shape=(order+1, t)]\n polynomial coefficients for each frame.\n\n `coeffecients[0]` corresponds to the highest degree (`order`),\n\n `coefficients[1]` corresponds to the next highest degree (`order-1`),\n\n down to the constant term `coefficients[order]`.\n\n Examples\n --------\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> S = np.abs(librosa.stft(y))\n\n Fit a degree-0 polynomial (constant) to each frame\n\n >>> p0 = librosa.feature.poly_features(S=S, order=0)\n\n Fit a linear polynomial to each frame\n\n >>> p1 = librosa.feature.poly_features(S=S, order=1)\n\n Fit a quadratic to each frame\n\n >>> p2 = librosa.feature.poly_features(S=S, order=2)\n\n Plot the results for comparison\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure(figsize=(8, 8))\n >>> ax = plt.subplot(4,1,1)\n >>> plt.plot(p2[2], label='order=2', alpha=0.8)\n >>> plt.plot(p1[1], label='order=1', alpha=0.8)\n >>> plt.plot(p0[0], label='order=0', alpha=0.8)\n >>> plt.xticks([])\n >>> plt.ylabel('Constant')\n >>> plt.legend()\n >>> plt.subplot(4,1,2, sharex=ax)\n >>> plt.plot(p2[1], label='order=2', alpha=0.8)\n >>> plt.plot(p1[0], label='order=1', alpha=0.8)\n >>> plt.xticks([])\n >>> plt.ylabel('Linear')\n >>> plt.subplot(4,1,3, sharex=ax)\n >>> plt.plot(p2[0], label='order=2', alpha=0.8)\n >>> plt.xticks([])\n >>> plt.ylabel('Quadratic')\n >>> plt.subplot(4,1,4, sharex=ax)\n >>> librosa.display.specshow(librosa.amplitude_to_db(S, ref=np.max),\n ... y_axis='log')\n >>> plt.tight_layout()\n '''\n\n S, n_fft = _spectrogram(y=y, S=S, n_fft=n_fft, hop_length=hop_length)\n\n # Compute the center frequencies of each bin\n if freq is None:\n freq = fft_frequencies(sr=sr, n_fft=n_fft)\n\n # If frequencies are constant over frames, then we only need to fit once\n if freq.ndim == 1:\n coefficients = np.polyfit(freq, S, order)\n else:\n # Else, fit each frame independently and stack the results\n coefficients = np.concatenate([[np.polyfit(freq[:, i], S[:, i], order)]\n for i in range(S.shape[1])], axis=0).T\n\n return coefficients\n\n\ndef zero_crossing_rate(y, frame_length=2048, hop_length=512, center=True,\n **kwargs):\n '''Compute the zero-crossing rate of an audio time series.\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)]\n Audio time series\n\n frame_length : int > 0\n Length of the frame over which to compute zero crossing rates\n\n hop_length : int > 0\n Number of samples to advance for each frame\n\n center : bool\n If `True`, frames are centered by padding the edges of `y`.\n This is similar to the padding in `librosa.core.stft`,\n but uses edge-value copies instead of reflection.\n\n kwargs : additional keyword arguments\n See `librosa.core.zero_crossings`\n\n .. note:: By default, the `pad` parameter is set to `False`, which\n differs from the default specified by\n `librosa.core.zero_crossings`.\n\n Returns\n -------\n zcr : np.ndarray [shape=(1, t)]\n `zcr[0, i]` is the fraction of zero crossings in the\n `i` th frame\n\n See Also\n --------\n librosa.core.zero_crossings\n Compute zero-crossings in a time-series\n\n Examples\n --------\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> librosa.feature.zero_crossing_rate(y)\n array([[ 0.134, 0.139, ..., 0.387, 0.322]])\n\n '''\n\n util.valid_audio(y)\n\n if center:\n y = np.pad(y, int(frame_length // 2), mode='edge')\n\n y_framed = util.frame(y, frame_length, hop_length)\n\n kwargs['axis'] = 0\n kwargs.setdefault('pad', False)\n\n crossings = zero_crossings(y_framed, **kwargs)\n\n return np.mean(crossings, axis=0, keepdims=True)\n\n\n# -- Chroma --#\ndef chroma_stft(y=None, sr=22050, S=None, norm=np.inf, n_fft=2048,\n hop_length=512, tuning=None, **kwargs):\n \"\"\"Compute a chromagram from a waveform or power spectrogram.\n\n This implementation is derived from `chromagram_E` [1]_\n\n .. [1] Ellis, Daniel P.W. \"Chroma feature analysis and synthesis\"\n 2007/04/21\n http://labrosa.ee.columbia.edu/matlab/chroma-ansyn/\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n audio time series\n\n sr : number > 0 [scalar]\n sampling rate of `y`\n\n S : np.ndarray [shape=(d, t)] or None\n power spectrogram\n\n norm : float or None\n Column-wise normalization.\n See `librosa.util.normalize` for details.\n\n If `None`, no normalization is performed.\n\n n_fft : int > 0 [scalar]\n FFT window size if provided `y, sr` instead of `S`\n\n hop_length : int > 0 [scalar]\n hop length if provided `y, sr` instead of `S`\n\n tuning : float in `[-0.5, 0.5)` [scalar] or None.\n Deviation from A440 tuning in fractional bins (cents).\n If `None`, it is automatically estimated.\n\n kwargs : additional keyword arguments\n Arguments to parameterize chroma filters.\n See `librosa.filters.chroma` for details.\n\n Returns\n -------\n chromagram : np.ndarray [shape=(n_chroma, t)]\n Normalized energy for each chroma bin at each frame.\n\n See Also\n --------\n librosa.filters.chroma\n Chroma filter bank construction\n librosa.util.normalize\n Vector normalization\n\n Examples\n --------\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> librosa.feature.chroma_stft(y=y, sr=sr)\n array([[ 0.974, 0.881, ..., 0.925, 1. ],\n [ 1. , 0.841, ..., 0.882, 0.878],\n ...,\n [ 0.658, 0.985, ..., 0.878, 0.764],\n [ 0.969, 0.92 , ..., 0.974, 0.915]])\n\n Use an energy (magnitude) spectrum instead of power spectrogram\n\n >>> S = np.abs(librosa.stft(y))\n >>> chroma = librosa.feature.chroma_stft(S=S, sr=sr)\n >>> chroma\n array([[ 0.884, 0.91 , ..., 0.861, 0.858],\n [ 0.963, 0.785, ..., 0.968, 0.896],\n ...,\n [ 0.871, 1. , ..., 0.928, 0.829],\n [ 1. , 0.982, ..., 0.93 , 0.878]])\n\n Use a pre-computed power spectrogram with a larger frame\n\n >>> S = np.abs(librosa.stft(y, n_fft=4096))**2\n >>> chroma = librosa.feature.chroma_stft(S=S, sr=sr)\n >>> chroma\n array([[ 0.685, 0.477, ..., 0.961, 0.986],\n [ 0.674, 0.452, ..., 0.952, 0.926],\n ...,\n [ 0.844, 0.575, ..., 0.934, 0.869],\n [ 0.793, 0.663, ..., 0.964, 0.972]])\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure(figsize=(10, 4))\n >>> librosa.display.specshow(chroma, y_axis='chroma', x_axis='time')\n >>> plt.colorbar()\n >>> plt.title('Chromagram')\n >>> plt.tight_layout()\n\n \"\"\"\n\n S, n_fft = _spectrogram(y=y, S=S, n_fft=n_fft, hop_length=hop_length,\n power=2)\n\n n_chroma = kwargs.get('n_chroma', 12)\n\n if tuning is None:\n tuning = estimate_tuning(S=S, sr=sr, bins_per_octave=n_chroma)\n\n # Get the filter bank\n if 'A440' not in kwargs:\n kwargs['A440'] = 440.0 * 2.0**(float(tuning) / n_chroma)\n\n chromafb = filters.chroma(sr, n_fft, **kwargs)\n\n # Compute raw chroma\n raw_chroma = np.dot(chromafb, S)\n\n # Compute normalization factor for each frame\n return util.normalize(raw_chroma, norm=norm, axis=0)\n\n\ndef chroma_cqt(y=None, sr=22050, C=None, hop_length=512, fmin=None,\n norm=np.inf, threshold=0.0, tuning=None, n_chroma=12,\n n_octaves=7, window=None, bins_per_octave=None, cqt_mode='full'):\n r'''Constant-Q chromagram\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)]\n audio time series\n\n sr : number > 0\n sampling rate of `y`\n\n C : np.ndarray [shape=(d, t)] [Optional]\n a pre-computed constant-Q spectrogram\n\n hop_length : int > 0\n number of samples between successive chroma frames\n\n fmin : float > 0\n minimum frequency to analyze in the CQT.\n Default: 'C1' ~= 32.7 Hz\n\n norm : int > 0, +-np.inf, or None\n Column-wise normalization of the chromagram.\n\n threshold : float\n Pre-normalization energy threshold. Values below the\n threshold are discarded, resulting in a sparse chromagram.\n\n tuning : float\n Deviation (in cents) from A440 tuning\n\n n_chroma : int > 0\n Number of chroma bins to produce\n\n n_octaves : int > 0\n Number of octaves to analyze above `fmin`\n\n window : None or np.ndarray\n Optional window parameter to `filters.cq_to_chroma`\n\n bins_per_octave : int > 0\n Number of bins per octave in the CQT.\n Default: matches `n_chroma`\n\n cqt_mode : ['full', 'hybrid']\n Constant-Q transform mode\n\n Returns\n -------\n chromagram : np.ndarray [shape=(n_chroma, t)]\n The output chromagram\n\n See Also\n --------\n librosa.util.normalize\n librosa.core.cqt\n librosa.core.hybrid_cqt\n chroma_stft\n\n Examples\n --------\n Compare a long-window STFT chromagram to the CQT chromagram\n\n\n >>> y, sr = librosa.load(librosa.util.example_audio_file(),\n ... offset=10, duration=15)\n >>> chroma_stft = librosa.feature.chroma_stft(y=y, sr=sr,\n ... n_chroma=12, n_fft=4096)\n >>> chroma_cq = librosa.feature.chroma_cqt(y=y, sr=sr)\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure()\n >>> plt.subplot(2,1,1)\n >>> librosa.display.specshow(chroma_stft, y_axis='chroma')\n >>> plt.title('chroma_stft')\n >>> plt.colorbar()\n >>> plt.subplot(2,1,2)\n >>> librosa.display.specshow(chroma_cq, y_axis='chroma', x_axis='time')\n >>> plt.title('chroma_cqt')\n >>> plt.colorbar()\n >>> plt.tight_layout()\n\n '''\n\n cqt_func = {'full': cqt, 'hybrid': hybrid_cqt}\n\n if bins_per_octave is None:\n bins_per_octave = n_chroma\n\n # Build the CQT if we don't have one already\n if C is None:\n C = np.abs(cqt_func[cqt_mode](y, sr=sr,\n hop_length=hop_length,\n fmin=fmin,\n n_bins=n_octaves * bins_per_octave,\n bins_per_octave=bins_per_octave,\n tuning=tuning))\n\n # Map to chroma\n cq_to_chr = filters.cq_to_chroma(C.shape[0],\n bins_per_octave=bins_per_octave,\n n_chroma=n_chroma,\n fmin=fmin,\n window=window)\n chroma = cq_to_chr.dot(C)\n\n if threshold is not None:\n chroma[chroma < threshold] = 0.0\n\n # Normalize\n if norm is not None:\n chroma = util.normalize(chroma, norm=norm, axis=0)\n\n return chroma\n\n\ndef chroma_cens(y=None, sr=22050, C=None, hop_length=512, fmin=None,\n tuning=None, n_chroma=12,\n n_octaves=7, bins_per_octave=None, cqt_mode='full', window=None,\n norm=2, win_len_smooth=41, smoothing_window='hann'):\n r'''Computes the chroma variant \"Chroma Energy Normalized\" (CENS), following [1]_.\n\n To compute CENS features, following steps are taken after obtaining chroma vectors using `chroma_cqt`:\n 1. L-1 normalization of each chroma vector\n 2. Quantization of amplitude based on \"log-like\" amplitude thresholds\n 3. (optional) Smoothing with sliding window. Default window length = 41 frames\n 4. (not implemented) Downsampling\n\n CENS features are robust to dynamics, timbre and articulation, thus these are commonly used in audio\n matching and retrieval applications.\n\n .. [1] Meinard Müller and Sebastian Ewert\n \"Chroma Toolbox: MATLAB implementations for extracting variants of chroma-based audio features\"\n In Proceedings of the International Conference on Music Information Retrieval (ISMIR), 2011.\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)]\n audio time series\n\n sr : number > 0\n sampling rate of `y`\n\n C : np.ndarray [shape=(d, t)] [Optional]\n a pre-computed constant-Q spectrogram\n\n hop_length : int > 0\n number of samples between successive chroma frames\n\n fmin : float > 0\n minimum frequency to analyze in the CQT.\n Default: 'C1' ~= 32.7 Hz\n\n norm : int > 0, +-np.inf, or None\n Column-wise normalization of the chromagram.\n\n tuning : float\n Deviation (in cents) from A440 tuning\n\n n_chroma : int > 0\n Number of chroma bins to produce\n\n n_octaves : int > 0\n Number of octaves to analyze above `fmin`\n\n window : None or np.ndarray\n Optional window parameter to `filters.cq_to_chroma`\n\n bins_per_octave : int > 0\n Number of bins per octave in the CQT.\n Default: matches `n_chroma`\n\n cqt_mode : ['full', 'hybrid']\n Constant-Q transform mode\n\n win_len_smooth : int > 0 or None\n Length of temporal smoothing window. `None` disables temporal smoothing.\n Default: 41\n\n smoothing_window : str, float or tuple\n Type of window function for temporal smoothing. See `filters.get_window` for possible inputs.\n Default: 'hann'\n\n Returns\n -------\n chroma_cens : np.ndarray [shape=(n_chroma, t)]\n The output cens-chromagram\n\n See Also\n --------\n chroma_cqt\n Compute a chromagram from a constant-Q transform.\n\n chroma_stft\n Compute a chromagram from an STFT spectrogram or waveform.\n\n filters.get_window\n Compute a window function.\n\n Examples\n --------\n Compare standard cqt chroma to CENS.\n\n\n >>> y, sr = librosa.load(librosa.util.example_audio_file(),\n ... offset=10, duration=15)\n >>> chroma_cens = librosa.feature.chroma_cens(y=y, sr=sr)\n >>> chroma_cq = librosa.feature.chroma_cqt(y=y, sr=sr)\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure()\n >>> plt.subplot(2,1,1)\n >>> librosa.display.specshow(chroma_cq, y_axis='chroma')\n >>> plt.title('chroma_cq')\n >>> plt.colorbar()\n >>> plt.subplot(2,1,2)\n >>> librosa.display.specshow(chroma_cens, y_axis='chroma', x_axis='time')\n >>> plt.title('chroma_cens')\n >>> plt.colorbar()\n >>> plt.tight_layout()\n '''\n if not ((win_len_smooth is None) or (isinstance(win_len_smooth, int) and win_len_smooth > 0)):\n raise ParameterError('win_len_smooth={} must be a positive integer or None'.format(win_len_smooth))\n\n chroma = chroma_cqt(y=y, C=C, sr=sr,\n hop_length=hop_length,\n fmin=fmin,\n bins_per_octave=bins_per_octave,\n tuning=tuning,\n norm=None,\n n_chroma=n_chroma,\n n_octaves=n_octaves,\n cqt_mode=cqt_mode,\n window=window)\n\n # L1-Normalization\n chroma = util.normalize(chroma, norm=1, axis=0)\n\n # Quantize amplitudes\n QUANT_STEPS = [0.4, 0.2, 0.1, 0.05]\n QUANT_WEIGHTS = [0.25, 0.25, 0.25, 0.25]\n\n chroma_quant = np.zeros_like(chroma)\n\n for cur_quant_step_idx, cur_quant_step in enumerate(QUANT_STEPS):\n chroma_quant += (chroma > cur_quant_step) * QUANT_WEIGHTS[cur_quant_step_idx]\n\n if win_len_smooth:\n # Apply temporal smoothing\n win = filters.get_window(smoothing_window, win_len_smooth + 2, fftbins=False)\n win /= np.sum(win)\n win = np.atleast_2d(win)\n\n cens = scipy.signal.convolve2d(chroma_quant, win,\n mode='same', boundary='fill')\n else:\n cens = chroma_quant\n\n # L2-Normalization\n return util.normalize(cens, norm=norm, axis=0)\n\n\ndef tonnetz(y=None, sr=22050, chroma=None):\n '''Computes the tonal centroid features (tonnetz), following the method of\n [1]_.\n\n .. [1] Harte, C., Sandler, M., & Gasser, M. (2006). \"Detecting Harmonic\n Change in Musical Audio.\" In Proceedings of the 1st ACM Workshop\n on Audio and Music Computing Multimedia (pp. 21-26).\n Santa Barbara, CA, USA: ACM Press. doi:10.1145/1178723.1178727.\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n Audio time series.\n\n sr : number > 0 [scalar]\n sampling rate of `y`\n\n chroma : np.ndarray [shape=(n_chroma, t)] or None\n Normalized energy for each chroma bin at each frame.\n\n If `None`, a cqt chromagram is performed.\n\n Returns\n -------\n tonnetz : np.ndarray [shape(6, t)]\n Tonal centroid features for each frame.\n\n Tonnetz dimensions:\n - 0: Fifth x-axis\n - 1: Fifth y-axis\n - 2: Minor x-axis\n - 3: Minor y-axis\n - 4: Major x-axis\n - 5: Major y-axis\n\n See Also\n --------\n chroma_cqt\n Compute a chromagram from a constant-Q transform.\n\n chroma_stft\n Compute a chromagram from an STFT spectrogram or waveform.\n\n Examples\n --------\n Compute tonnetz features from the harmonic component of a song\n\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> y = librosa.effects.harmonic(y)\n >>> tonnetz = librosa.feature.tonnetz(y=y, sr=sr)\n >>> tonnetz\n array([[-0.073, -0.053, ..., -0.054, -0.073],\n [ 0.001, 0.001, ..., -0.054, -0.062],\n ...,\n [ 0.039, 0.034, ..., 0.044, 0.064],\n [ 0.005, 0.002, ..., 0.011, 0.017]])\n\n Compare the tonnetz features to `chroma_cqt`\n\n >>> import matplotlib.pyplot as plt\n >>> plt.subplot(2, 1, 1)\n >>> librosa.display.specshow(tonnetz, y_axis='tonnetz')\n >>> plt.colorbar()\n >>> plt.title('Tonal Centroids (Tonnetz)')\n >>> plt.subplot(2, 1, 2)\n >>> librosa.display.specshow(librosa.feature.chroma_cqt(y, sr=sr),\n ... y_axis='chroma', x_axis='time')\n >>> plt.colorbar()\n >>> plt.title('Chroma')\n >>> plt.tight_layout()\n\n '''\n\n if y is None and chroma is None:\n raise ParameterError('Either the audio samples or the chromagram must be '\n 'passed as an argument.')\n\n if chroma is None:\n chroma = chroma_cqt(y=y, sr=sr)\n\n # Generate Transformation matrix\n dim_map = np.linspace(0, 12, num=chroma.shape[0], endpoint=False)\n\n scale = np.asarray([7. / 6, 7. / 6,\n 3. / 2, 3. / 2,\n 2. / 3, 2. / 3])\n\n V = np.multiply.outer(scale, dim_map)\n\n # Even rows compute sin()\n V[::2] -= 0.5\n\n R = np.array([1, 1, # Fifths\n 1, 1, # Minor\n 0.5, 0.5]) # Major\n\n phi = R[:, np.newaxis] * np.cos(np.pi * V)\n\n # Do the transform to tonnetz\n return phi.dot(util.normalize(chroma, norm=1, axis=0))\n\n\n# -- Mel spectrogram and MFCCs -- #\ndef mfcc(y=None, sr=22050, S=None, n_mfcc=20, dct_type=2, norm='ortho', **kwargs):\n \"\"\"Mel-frequency cepstral coefficients (MFCCs)\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n audio time series\n\n sr : number > 0 [scalar]\n sampling rate of `y`\n\n S : np.ndarray [shape=(d, t)] or None\n log-power Mel spectrogram\n\n n_mfcc: int > 0 [scalar]\n number of MFCCs to return\n\n dct_type : None, or {1, 2, 3}\n Discrete cosine transform (DCT) type.\n By default, DCT type-2 is used.\n\n norm : None or 'ortho'\n If `dct_type` is `2 or 3`, setting `norm='ortho'` uses an ortho-normal\n DCT basis.\n\n Normalization is not supported for `dct_type=1`.\n\n kwargs : additional keyword arguments\n Arguments to `melspectrogram`, if operating\n on time series input\n\n Returns\n -------\n M : np.ndarray [shape=(n_mfcc, t)]\n MFCC sequence\n\n See Also\n --------\n melspectrogram\n scipy.fftpack.dct\n\n Examples\n --------\n Generate mfccs from a time series\n\n >>> y, sr = librosa.load(librosa.util.example_audio_file(), offset=30, duration=5)\n >>> librosa.feature.mfcc(y=y, sr=sr)\n array([[ -5.229e+02, -4.944e+02, ..., -5.229e+02, -5.229e+02],\n [ 7.105e-15, 3.787e+01, ..., -7.105e-15, -7.105e-15],\n ...,\n [ 1.066e-14, -7.500e+00, ..., 1.421e-14, 1.421e-14],\n [ 3.109e-14, -5.058e+00, ..., 2.931e-14, 2.931e-14]])\n\n Use a pre-computed log-power Mel spectrogram\n\n >>> S = librosa.feature.melspectrogram(y=y, sr=sr, n_mels=128,\n ... fmax=8000)\n >>> librosa.feature.mfcc(S=librosa.power_to_db(S))\n array([[ -5.207e+02, -4.898e+02, ..., -5.207e+02, -5.207e+02],\n [ -2.576e-14, 4.054e+01, ..., -3.997e-14, -3.997e-14],\n ...,\n [ 7.105e-15, -3.534e+00, ..., 0.000e+00, 0.000e+00],\n [ 3.020e-14, -2.613e+00, ..., 3.553e-14, 3.553e-14]])\n\n Get more components\n\n >>> mfccs = librosa.feature.mfcc(y=y, sr=sr, n_mfcc=40)\n\n Visualize the MFCC series\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure(figsize=(10, 4))\n >>> librosa.display.specshow(mfccs, x_axis='time')\n >>> plt.colorbar()\n >>> plt.title('MFCC')\n >>> plt.tight_layout()\n\n Compare different DCT bases\n\n >>> m_slaney = librosa.feature.mfcc(y=y, sr=sr, dct_type=2)\n >>> m_htk = librosa.feature.mfcc(y=y, sr=sr, dct_type=3)\n >>> plt.figure(figsize=(10, 6))\n >>> plt.subplot(2, 1, 1)\n >>> librosa.display.specshow(m_slaney, x_axis='time')\n >>> plt.title('RASTAMAT / Auditory toolbox (dct_type=2)')\n >>> plt.colorbar()\n >>> plt.subplot(2, 1, 2)\n >>> librosa.display.specshow(m_htk, x_axis='time')\n >>> plt.title('HTK-style (dct_type=3)')\n >>> plt.colorbar()\n >>> plt.tight_layout()\n \"\"\"\n\n if S is None:\n S = power_to_db(melspectrogram(y=y, sr=sr, **kwargs))\n\n return scipy.fftpack.dct(S, axis=0, type=dct_type, norm=norm)[:n_mfcc]\n\n\ndef melspectrogram(y=None, sr=22050, S=None, n_fft=2048, hop_length=512,\n power=2.0, **kwargs):\n \"\"\"Compute a mel-scaled spectrogram.\n\n If a spectrogram input `S` is provided, then it is mapped directly onto\n the mel basis `mel_f` by `mel_f.dot(S)`.\n\n If a time-series input `y, sr` is provided, then its magnitude spectrogram\n `S` is first computed, and then mapped onto the mel scale by\n `mel_f.dot(S**power)`. By default, `power=2` operates on a power spectrum.\n\n Parameters\n ----------\n y : np.ndarray [shape=(n,)] or None\n audio time-series\n\n sr : number > 0 [scalar]\n sampling rate of `y`\n\n S : np.ndarray [shape=(d, t)]\n spectrogram\n\n n_fft : int > 0 [scalar]\n length of the FFT window\n\n hop_length : int > 0 [scalar]\n number of samples between successive frames.\n See `librosa.core.stft`\n\n power : float > 0 [scalar]\n Exponent for the magnitude melspectrogram.\n e.g., 1 for energy, 2 for power, etc.\n\n kwargs : additional keyword arguments\n Mel filter bank parameters.\n See `librosa.filters.mel` for details.\n\n Returns\n -------\n S : np.ndarray [shape=(n_mels, t)]\n Mel spectrogram\n\n See Also\n --------\n librosa.filters.mel\n Mel filter bank construction\n\n librosa.core.stft\n Short-time Fourier Transform\n\n\n Examples\n --------\n >>> y, sr = librosa.load(librosa.util.example_audio_file())\n >>> librosa.feature.melspectrogram(y=y, sr=sr)\n array([[ 2.891e-07, 2.548e-03, ..., 8.116e-09, 5.633e-09],\n [ 1.986e-07, 1.162e-02, ..., 9.332e-08, 6.716e-09],\n ...,\n [ 3.668e-09, 2.029e-08, ..., 3.208e-09, 2.864e-09],\n [ 2.561e-10, 2.096e-09, ..., 7.543e-10, 6.101e-10]])\n\n Using a pre-computed power spectrogram\n\n >>> D = np.abs(librosa.stft(y))**2\n >>> S = librosa.feature.melspectrogram(S=D)\n\n >>> # Passing through arguments to the Mel filters\n >>> S = librosa.feature.melspectrogram(y=y, sr=sr, n_mels=128,\n ... fmax=8000)\n\n >>> import matplotlib.pyplot as plt\n >>> plt.figure(figsize=(10, 4))\n >>> librosa.display.specshow(librosa.power_to_db(S,\n ... ref=np.max),\n ... y_axis='mel', fmax=8000,\n ... x_axis='time')\n >>> plt.colorbar(format='%+2.0f dB')\n >>> plt.title('Mel spectrogram')\n >>> plt.tight_layout()\n\n\n \"\"\"\n\n S, n_fft = _spectrogram(y=y, S=S, n_fft=n_fft, hop_length=hop_length,\n power=power)\n\n # Build a Mel filter\n mel_basis = filters.mel(sr, n_fft, **kwargs)\n\n return np.dot(mel_basis, S)\n" ]
[ [ "numpy.dot", "numpy.subtract.outer", "numpy.mean", "numpy.where", "numpy.sort", "numpy.cos", "numpy.cumsum", "numpy.zeros_like", "numpy.log", "numpy.nanmin", "numpy.logical_and", "numpy.arange", "numpy.multiply.outer", "numpy.polyfit", "numpy.flatnonzero", "numpy.atleast_2d", "numpy.array", "numpy.zeros", "scipy.fftpack.dct", "numpy.asarray", "numpy.sum", "numpy.any", "numpy.atleast_1d", "numpy.isrealobj", "numpy.abs", "numpy.linspace", "scipy.signal.convolve2d", "numpy.maximum" ] ]
colineRamee/UTM_simulator
[ "fe0cb59b1a3a64f279fbc12f90c33455a2522d50" ]
[ "scripts/example_analysis.py" ]
[ "import os\nimport numpy as np\nimport json\nimport matplotlib.pyplot as plt\nimport matplotlib as mpl\nfrom scripts.analyses import efficiency, severity_per_valid_los, los_per_agent, los_per_agent_flight_hour, nmac_per_agent_flight_hour, average_throughput, ground_delay, time_to_think, coline_boxplot\nimport pickle\n\n# Load results\n# This can take a minute so the results are pickled, and this section can be commented when working on the plots\n\nresults = {}\nmy_path = '../logs/example/'\nfor filename in os.listdir(my_path):\n with open(my_path + filename) as f:\n data = json.load(f)\n t0 = data[\"times\"][\"time_all_started_after_t_density\"]\n algo_type = data[\"inputs\"][\"algorithm_type\"]\n n_intruders = data[\"inputs\"][\"n_intruders\"]\n sim_type = data[\"inputs\"][\"simulation_type\"]\n if algo_type == 'straight':\n sim_type = 'baseline'\n experiment = results.setdefault(sim_type, {}).setdefault(algo_type, {}).setdefault(n_intruders, {})\n t_efficiency, e_efficiency, t = efficiency(data, t_0_measure=t0)\n experiment.setdefault('time_efficiency', []).append(t_efficiency)\n experiment.setdefault('energy_efficiency', []).append(e_efficiency)\n hip = severity_per_valid_los(data, t_0_measure=t0, desired_separation_distance=data[\"inputs\"][\"h_collision_dist_m\"])\n experiment.setdefault('hip', []).append(hip)\n n_los = los_per_agent(data, t_0_measure=t0)\n experiment.setdefault('los_per_agent', []).append(n_los)\n n_los_per_flight_hour = los_per_agent_flight_hour(data, t_0_measure=t0)\n experiment.setdefault('los_per_agent_flight_hour', []).append(n_los_per_flight_hour)\n n_nmac_per_agent_flight_hour = nmac_per_agent_flight_hour(data, t_0_measure=t0, nmac=152.4)\n experiment.setdefault('nmac_per_agent_flight_hour', []).append(n_nmac_per_agent_flight_hour)\n q = average_throughput(data, t_0_measure=t0)\n experiment.setdefault('throughput', []).append(q)\n ground_delays = ground_delay(data, t_0_measure=t0)\n experiment.setdefault('ground_delays', []).append(ground_delays)\n compute_time = time_to_think(data, t_0_measure=t0)\n experiment.setdefault('compute_time', []).append(compute_time)\n\npickle.dump(results, open(\"saved_results.p\", \"wb\"))\n\n# You only need to run the above section once\n\nresults = pickle.load(open(\"saved_results.p\", \"rb\"))\n\n# Create plots\nmpl.rcParams['font.size'] = 12\nmpl.rcParams['font.family'] = 'serif'\nmpl.rcParams['font.serif'] = 'Times New Roman'\n\n# Throughput\nfig0, ax0 = plt.subplots(1, 1, figsize=(6, 4))\nplt.tight_layout()\nfig0.subplots_adjust(top=0.75)\n\n# Efficiencies\nfig1, ax1 = plt.subplots(1, 2, figsize=(6, 4))\nfig1.subplots_adjust(top=0.75, wspace=0.3)\n\n# Throughput distribution across 10 runs\nfig_q_distrib, ax_q_distrib = plt.subplots(1, 1, figsize=(6, 4))\nax_q_distrib.set_ylabel('Q (agents/min)')\nax_q_distrib.set_xlabel('N')\n\n# Efficiencies\nfig2, ax2 = plt.subplots(2, 1, figsize=(6, 5))\nfig2.subplots_adjust(top=0.83, hspace=0.05, right=0.99, left=0.11)\n\n# Safety (LoS/h and average HIP)\nfig3, ax3 = plt.subplots(2, 1, figsize=(6, 4))\nfig3.subplots_adjust(top=0.85, hspace=0.2)\n\n# Safety (HIP distribution and NMAC/h)\nfig4, ax4 = plt.subplots(2, 1, figsize=(6, 4))\nfig4.subplots_adjust(top=0.85, hspace=0.2)\n\n# Runtime\nfig5, ax5 = plt.subplots(1, 1, figsize=(6, 3))\nplt.tight_layout()\n\noffset = 0\ni = -1\nj = -1\nn_categories = 6\nfor simulation_type in results:\n if simulation_type == 'reactive':\n access = 'Free-access, '\n elif simulation_type == 'strategic':\n access = '4DT contract, '\n else:\n access = ''\n for algo_type in results[simulation_type]:\n if algo_type == 'MVP_Bluesky':\n label = access + 'MVP'\n elif algo_type == 'LocalVO':\n label = access + 'Local VO'\n elif algo_type == 'straight':\n label = 'Baseline, straight'\n else:\n label = access + algo_type\n\n i += 1\n n_intruders = []\n q = []\n eta_e = []\n eta_t = []\n los = []\n hip = []\n los_per_hour = []\n nmac_per_hour = []\n throughput = []\n energy_efficiency = []\n time_efficiency = []\n loss_of_separation = []\n horizontal_intrusion_parameter = []\n compute_time = []\n for n in sorted(results[simulation_type][algo_type]):\n runs = results[simulation_type][algo_type][n]\n n_intruders.append(n)\n q.append(sum(runs['throughput']) / len(runs['throughput']))\n los_per_hour.append(sum(runs['los_per_agent_flight_hour']) / len(runs['los_per_agent_flight_hour']))\n nmac_per_hour.append(sum(runs['nmac_per_agent_flight_hour']) / len(runs['nmac_per_agent_flight_hour']))\n throughput.append(runs['throughput'])\n eff = list(np.concatenate(runs['energy_efficiency']))\n eta_e.append(np.mean(eff))\n energy_efficiency.append(eff)\n eff = list(np.concatenate(runs['time_efficiency']))\n eta_t.append(np.mean(eff))\n time_efficiency.append(eff)\n lossOfSeparation = list(np.concatenate(runs['los_per_agent']))\n los.append(np.mean(lossOfSeparation))\n loss_of_separation.append(lossOfSeparation)\n intrusion = list(np.concatenate(runs['hip']))\n hip.append(np.mean(intrusion))\n horizontal_intrusion_parameter.append(intrusion)\n think = list(np.concatenate(runs['compute_time']))\n compute_time.append(np.mean(think))\n\n p = ax0.plot(n_intruders, q, label=label)\n color = p[-1].get_color()\n ax1[0].plot(n_intruders, eta_e, label=label)\n ax1[1].plot(n_intruders, eta_t, label=label)\n # ax1bis[0].plot(n_intruders, los, label=label)\n # ax1bis[1].plot(n_intruders, hip, label=label)\n if algo_type != 'straight':\n ax5.plot(n_intruders, compute_time, label=label)\n if simulation_type != 'strategic':\n j += 1\n if algo_type == 'ORCA':\n ax3[0].plot(n_intruders[1:], los_per_hour[1:], label=label, color=color)\n else:\n ax3[0].plot(n_intruders, los_per_hour, label=label, color=color)\n ax3[1].plot(n_intruders, hip, label=label, color=color)\n ax4[1].plot(n_intruders, nmac_per_hour, label=label, color=color, marker='o', linestyle='None', fillstyle='none', mew=2)\n coline_boxplot(ax4[0], horizontal_intrusion_parameter, n_intruders, j, 3, color, label=label)\n\n coline_boxplot(ax_q_distrib, throughput, n_intruders, i, n_categories, color, label=label)\n if algo_type != 'straight':\n coline_boxplot(ax2[0], energy_efficiency, n_intruders, i + offset, n_categories - 1, color, label=label)\n coline_boxplot(ax2[1], time_efficiency, n_intruders, i + offset, n_categories - 1, color, label=label)\n else:\n offset = -1\n\n# Throughput\nax0.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc='lower left', ncol=2, borderaxespad=0.)\nax0.set_ylim(bottom=0)\nax0.set_xlim(left=0)\nax0.set_ylabel('Q (agents/min)')\nax0.set_xlabel('N')\nax0.grid()\n\nax1[0].legend(bbox_to_anchor=(0., 1.02, 1., .102), loc='lower left', ncol=2, borderaxespad=0.)\nymin, ymax = ax1[1].get_ylim()\nax1[0].set_ylim(ymin, ymax)\nax1[0].set_ylabel(r\"$\\eta_{energy}$\")\nax1[0].set_xlabel('N')\nax1[0].set_xlim(left=0)\nax1[0].grid()\nax1[1].set_ylabel(r\"$\\eta_{time}$\")\nax1[1].set_xlabel('N')\nax1[1].set_xlim(left=0)\nax1[1].grid()\n\nax2[0].legend(bbox_to_anchor=(0., 1.02, 1., .102), loc='lower left', ncol=2, borderaxespad=0.)\nax2[0].set_ylabel(r\"$\\eta_{energy}$\")\nax2[0].set_ylim(bottom=0, top=1.1)\nax2[0].grid()\nax2[1].set_ylabel(r\"$\\eta_{time}$\")\nax2[1].set_xlabel('N')\nax2[1].set_ylim(bottom=0, top=1.1)\nax2[1].grid()\n\nax3[0].legend(bbox_to_anchor=(0., 1.02, 1., .102), loc='lower left', ncol=2, borderaxespad=0.)\nax3[0].grid()\nax3[0].set_ylabel(r\"$n_{LOS}/h$\")\nax3[0].set_yscale('log')\nax3[0].set_yticks([0.1, 0.3, 1, 3, 10, 30, 100])\nax3[0].get_yaxis().set_major_formatter(mpl.ticker.ScalarFormatter())\nax3[1].set_ylabel('HIP')\nax3[1].set_xlabel('N')\nax3[1].grid()\n\nax4[0].legend(bbox_to_anchor=(0., 1.02, 1., .102), loc='lower left', ncol=2, borderaxespad=0.)\nax4[0].grid()\nax4[1].set_ylabel(r\"$n_{NMAC}/h$\")\nax4[0].set_ylabel('HIP')\nax4[1].set_xlabel('N')\nax4[1].grid()\nax4[1].set_yscale('log')\nax4[1].set_yticks([0.001, 0.01, 0.1, 1, 10, 100])\n\nax5.set_ylim(bottom=0)\nax5.set_xlim(left=0)\nax5.set_xlabel('N')\nax5.set_ylabel('Planning time (s)')\nax5.grid()\nax5.legend()\nplt.tight_layout()\n\nplt.show()\n" ]
[ [ "numpy.concatenate", "matplotlib.ticker.ScalarFormatter", "matplotlib.pyplot.subplots", "numpy.mean", "matplotlib.pyplot.tight_layout", "matplotlib.pyplot.show" ] ]
awentzonline/tartangan
[ "2d36a81fa0ae91fe6b9b4e1f26763285630837fb" ]
[ "tartangan/models/losses.py" ]
[ "import torch\nimport torch.nn.functional as F\n\n\n# Found these hinge loss functions in this BigGAN repo:\n# https://github.com/ajbrock/BigGAN-PyTorch\ndef discriminator_hinge_loss(real, fake):\n loss_real = torch.mean(F.relu(1. - real))\n loss_fake = torch.mean(F.relu(1. + fake))\n return loss_real, loss_fake\n\n\ndef generator_hinge_loss(fake):\n return -torch.mean(fake)\n\n\ndef gradient_penalty(preds, data):\n \"\"\"\n https://arxiv.org/pdf/1801.04406.pdf\n https://github.com/LMescheder/GAN_stability/blob/master/gan_training/train.py\n \"\"\"\n batch_size = data.size(0)\n grad_dout = torch.autograd.grad(\n outputs=preds.sum(), inputs=data,\n create_graph=True, retain_graph=True, only_inputs=True\n )[0]\n grad_dout2 = grad_dout.pow(2)\n assert(grad_dout2.size() == data.size())\n reg = grad_dout2.view(batch_size, -1).sum(1).mean()\n return reg\n" ]
[ [ "torch.nn.functional.relu", "torch.mean" ] ]
WISDEM/WOMBAT
[ "260611a5f2d78f9f6e96865be34f5248acf07079" ]
[ "wombat/windfarm/windfarm.py" ]
[ "\"\"\"Creates the Windfarm class/model.\"\"\"\n\nimport os # type: ignore\nimport numpy as np\nimport pandas as pd # type: ignore\nimport logging # type: ignore\nimport networkx as nx # type: ignore\nfrom math import fsum\nfrom geopy import distance # type: ignore\nfrom itertools import chain, combinations\n\nfrom wombat.core import RepairManager, WombatEnvironment\nfrom wombat.core.library import load_yaml\nfrom wombat.windfarm.system import Cable, System\n\n\nclass Windfarm:\n \"\"\"The primary class for operating on objects within a windfarm. The substations,\n cables, and turbines are created as a network object to be more appropriately accessed\n and controlled.\n \"\"\"\n\n def __init__(\n self,\n env: WombatEnvironment,\n windfarm_layout: str,\n repair_manager: RepairManager,\n ) -> None:\n\n self.env = env\n self.repair_manager = repair_manager\n\n # self._logging_setup()\n\n self._create_graph_layout(windfarm_layout)\n self._create_turbines_and_substations()\n self._create_cables()\n self.capacity = sum(self.node_system(turb).capacity for turb in self.turbine_id)\n self._create_substation_turbine_map()\n self.calculate_distance_matrix()\n\n self.repair_manager._register_windfarm(self)\n\n self.env.process(self._log_operations())\n\n def _logging_setup(self) -> None:\n \"\"\"Completes the setup for logging data.\n\n Parameters\n ----------\n which : str\n One of \"events\" or \"operations\". For creating event logs or operational\n status logs.\n \"\"\"\n\n logging.basicConfig(\n format=\"%(asctime)s :: %(name)s :: %(levelname)s :: %(message)s\",\n filename=self.env.operations_log_fname,\n filemode=\"w\",\n level=logging.DEBUG,\n )\n self._operations_logger = logging.getLogger(__name__)\n\n def _create_graph_layout(self, windfarm_layout: str) -> None:\n \"\"\"Creates a network layout of the windfarm start from the substation(s) to\n be able to capture downstream turbines that can be cut off in the event of a cable failure.\n\n Parameters\n ----------\n windfarm_layout : str\n Filename to use for reading in the windfarm layout; must be a csv file.\n \"\"\"\n layout_path = str(self.env.data_dir / \"windfarm\" / windfarm_layout)\n layout = (\n pd.read_csv(layout_path)\n .sort_values(by=[\"string\", \"order\"])\n .reset_index(drop=True)\n )\n layout.subassembly = layout.subassembly.fillna(\"\")\n\n windfarm = nx.DiGraph()\n windfarm.add_nodes_from(layout.id.values)\n\n # Assign the data attributes to the graph nodes\n for col in (\"name\", \"latitude\", \"longitude\", \"subassembly\"):\n d = {i: n for i, n in layout[[\"id\", col]].values}\n nx.set_node_attributes(windfarm, d, name=col)\n\n # Determine which nodes are substations and which are turbines\n substation_filter = layout.id == layout.substation_id\n self.substation_id = layout[substation_filter].id.unique()\n self.turbine_id = layout[~substation_filter].id.unique()\n _type = {True: \"substation\", False: \"turbine\"}\n d = {i: _type[val] for i, val in zip(layout.id, substation_filter.values)}\n nx.set_node_attributes(windfarm, d, name=\"type\")\n\n substations = layout[substation_filter].copy()\n turbines = layout[~substation_filter].copy()\n substation_sections = [\n turbines[turbines.substation_id == substation]\n for substation in substations.id\n ]\n for section in substation_sections:\n for _, row in section.iterrows():\n if row.order == 0:\n start: str = row.substation_id\n else:\n start = current # noqa: F821\n current: str = row.id\n windfarm.add_edge(\n start, current, length=row.distance, cable=row.upstream_cable\n )\n\n self.graph = windfarm\n\n def _create_turbines_and_substations(self) -> None:\n for system_id, data in self.graph.nodes(data=True):\n if data[\"subassembly\"] == \"\":\n raise ValueError(\n \"A 'subassembly' file must be specified for all nodes in the windfarm layout!\"\n )\n\n subassembly_dict = load_yaml(\n self.env.data_dir / \"windfarm\", data[\"subassembly\"]\n )\n self.graph.nodes[system_id][\"system\"] = System(\n self.env,\n self.repair_manager,\n system_id,\n data[\"name\"],\n subassembly_dict,\n data[\"type\"],\n )\n\n def _create_cables(self) -> None:\n for start_node, end_node, data in self.graph.edges(data=True):\n\n start_coordinates = (\n self.graph.nodes[start_node][\"latitude\"],\n self.graph.nodes[start_node][\"longitude\"],\n )\n end_coordinates = (\n self.graph.nodes[end_node][\"latitude\"],\n self.graph.nodes[end_node][\"longitude\"],\n )\n\n # If the real distance/cable length is not input, then the geodesic distance\n # is calculated\n if data[\"length\"] == 0:\n data[\"length\"] = distance.geodesic(\n start_coordinates, end_coordinates, ellipsoid=\"WGS-84\"\n ).km\n\n upstream_turbines = nx.dfs_successors(self.graph, end_node)\n cable_dict = load_yaml(\n os.path.join(self.env.data_dir, \"windfarm\"), data[\"cable\"]\n )\n data[\"cable\"] = Cable(\n self,\n self.env,\n f\"cable::{start_node}::{end_node}\",\n start_node,\n upstream_turbines,\n cable_dict,\n )\n\n # Calaculate the geometric center point\n end_points = np.array((start_coordinates, end_coordinates))\n data[\"latitude\"], data[\"longitude\"] = end_points.mean(axis=0)\n\n def calculate_distance_matrix(self) -> None:\n \"\"\"Calculates hte geodesic distance, in km, between all of the windfarm's nodes, e.g.,\n substations and turbines, and cables.\n \"\"\"\n ids = list(self.graph.nodes())\n ids.extend([data[\"cable\"].id for *_, data in self.graph.edges(data=True)])\n coords = [\n (data[\"latitude\"], data[\"longitude\"])\n for *_, data in (*self.graph.nodes(data=True), *self.graph.edges(data=True))\n ]\n\n dist = [distance.geodesic(c1, c2).km for c1, c2 in combinations(coords, 2)]\n dist_arr = np.ones((len(ids), len(ids)))\n triangle_ix = np.triu_indices_from(dist_arr, 1)\n dist_arr[triangle_ix] = dist_arr.T[triangle_ix] = dist\n\n # Set the self distance to infinity, so that only one crew can be dropped off\n # at a single point\n np.fill_diagonal(dist_arr, np.inf)\n\n self.distance_matrix = pd.DataFrame(dist_arr, index=ids, columns=ids)\n\n def _create_substation_turbine_map(self) -> None:\n \"\"\"Creates ``substation_turbine_map``, a dictionary, that maps substation(s) to\n the dependent turbines in the windfarm, and the weighting of each turbine in the\n windfarm.\n \"\"\"\n # Get all turbines dependent on each substation\n s_t_map = {\n s_id: list(\n chain.from_iterable(nx.dfs_successors(self.graph, source=s_id).values())\n )\n for s_id in self.substation_id\n }\n\n # Reorient the mapping to have the turbine list and the capacity-based weighting\n # of each turbine\n s_t_map = {\n s_id: dict( # type: ignore\n turbines=np.array(s_t_map[s_id]),\n weights=np.array([self.node_system(t).capacity for t in s_t_map[s_id]])\n / self.capacity,\n )\n for s_id in s_t_map\n }\n self.substation_turbine_map = s_t_map\n\n def _log_operations(self):\n \"\"\"Logs the operational data for a simulation.\"\"\"\n system_list = list(self.graph.nodes)\n columns = [\n \"datetime\",\n \"name\",\n \"level\",\n \"env_datetime\",\n \"env_time\",\n ] + system_list\n self.env._operations_logger.info(\" :: \".join(columns))\n\n message = [self.env.simulation_time, self.env.now]\n message.extend(\n [self.node_system(system).operating_level for system in system_list]\n )\n message = \" :: \".join((f\"{m}\" for m in message)) # type: ignore\n self.env._operations_logger.info(message)\n\n HOURS = 1\n while True:\n yield self.env.timeout(HOURS)\n message = [self.env.simulation_time, self.env.now] + [\n self.node_system(system).operating_level for system in system_list\n ]\n message = \" :: \".join(f\"{m}\" for m in message) # type: ignore\n self.env._operations_logger.info(message)\n\n def node_system(self, system_id: str) -> System:\n \"\"\"Returns the desired node in the windfarm.\n\n Parameters\n ----------\n system_id : str\n The system's unique identifier, ``wombat.windfarm.System.id``.\n\n Returns\n -------\n System\n The ``System`` object.\n \"\"\"\n return self.graph.nodes[system_id][\"system\"]\n\n @property\n def current_availability(self) -> float:\n \"\"\"Calculates the product of all system ``operating_level`` variables across the\n windfarm using the following forumation\n\n .. math::\n \\sum{\n OperatingLevel_{substation_{i}} *\n \\sum{OperatingLevel_{turbine_{j}} * Weight_{turbine_{j}}}\n }\n\n where the :math:``{OperatingLevel}`` is the product of the operating level\n of each subassembly on a given system (substation or turbine), and the\n :math:``{Weight}`` is the proportion of one turbine's capacity relative to\n the whole windfarm.\n \"\"\" # noqa: W605\n operating_levels = {\n s_id: [\n self.node_system(t).operating_level\n for t in self.substation_turbine_map[s_id][\"turbines\"] # type: ignore\n ]\n for s_id in self.substation_turbine_map\n }\n availability = fsum(\n [\n self.node_system(s_id).operating_level\n * fsum(\n operating_levels[s_id]\n * self.substation_turbine_map[s_id][\"weights\"] # type: ignore\n )\n for s_id in self.substation_turbine_map\n ]\n )\n return availability\n" ]
[ [ "numpy.array", "numpy.fill_diagonal", "numpy.triu_indices_from", "pandas.DataFrame", "pandas.read_csv" ] ]
tubs-eis/VANAGA
[ "4a4084b97720b2820cd20c1f4c42263360921f2f" ]
[ "pareto_selection_nano.py" ]
[ "import numpy as np\nimport config as cf\nfrom crowding_nano import reduce_by_crowding\n\n\ndef identify_pareto(scores, population_ids):\n \"\"\"Identifies a single Pareto front, and returns the\n population IDs of the selected solutions\"\"\"\n\n population_size = scores.shape[0]\n # Create a starting list of items on the Pareto front\n # All items start off as being labelled as on the Pareto front\n pareto_front = np.ones(population_size, dtype=bool)\n if cf.tracked_fitness == 'min_metric':\n # Loop through each item. This will then be compared with all other items\n for i in range(population_size):\n # Loop though all other items\n for j in range(population_size):\n # Check if our 'j' point is dominated by out 'i' point\n if all(scores[j] <= scores[i]):\n if any(scores[j] < scores[i]):\n # i dominates j. Label 'j' point as not on the Pareto front\n pareto_front[i] = 0\n # Stop further comparisons with 'i' (no more comparisons needed)\n break\n\n elif cf.tracked_fitness == 'max_metric':\n # Loop through each item. This will then be compared with all other items\n for i in range(population_size):\n # Loop though all other items\n for j in range(population_size):\n # Check if our 'i' point is dominated by out 'j' point\n if all(scores[j] >= scores[i]):\n if any(scores[j] > scores[i]):\n # j dominates i. Label 'i' point as not on the Pareto front\n pareto_front[i] = 0\n # Stop further comparisons with 'i' (no more comparisons needed)\n break\n\n # Returns ids of scenarios on pareto front\n return population_ids[pareto_front]\n\n\ndef build_pareto_population(population, scores, min_population_size, max_population_size):\n \"\"\"As necessary repeats Pareto front selection to build a population\n within defined size limits. It will reduce a Pareto front by applying\n crowding selection as necessary\"\"\"\n unselected_population_ids = np.arange(population.shape[0])\n all_population_ids = np.arange(population.shape[0])\n pareto_front = []\n while len(pareto_front) < min_population_size:\n temp_pareto_front = identify_pareto(scores[unselected_population_ids, :], unselected_population_ids)\n # Check size of total Pareto front\n # If larger than max size reduce new Pareto front by crowding\n combined_pareto_size = len(pareto_front) + len(temp_pareto_front)\n if combined_pareto_size > max_population_size:\n number_to_select = len(temp_pareto_front) - (combined_pareto_size - max_population_size)\n selected_individuals = reduce_by_crowding(scores[temp_pareto_front], number_to_select)\n temp_pareto_front = temp_pareto_front[selected_individuals]\n\n # Add latest Pareto front to full Pareto front\n pareto_front = np.hstack((pareto_front, temp_pareto_front))\n # Update unselected population ID by using sets to find IDs in all ids that are not in the selected front\n unselected_set = set(all_population_ids) - set(pareto_front)\n unselected_population_ids = np.array(list(unselected_set))\n\n population = population[pareto_front.astype(int)]\n scores = scores[pareto_front.astype(int)]\n return population, scores\n" ]
[ [ "numpy.hstack", "numpy.ones", "numpy.arange" ] ]
srinivasakumar-a/trading_calendars
[ "3e528d063531c78e0ac2d7dd374cd95a6d079584" ]
[ "trading_calendars/xbkk_holidays.py" ]
[ "from datetime import timedelta\n\nimport pandas as pd\nfrom pandas.tseries.holiday import (\n Holiday,\n next_monday_or_tuesday,\n sunday_to_monday,\n weekend_to_monday,\n)\nfrom pytz import UTC\n\nfrom .common_holidays import european_labour_day, new_years_day, new_years_eve\nfrom .trading_calendar import SUNDAY, MONDAY\n\n\ndef new_years_eve_observance(holidays):\n # New Year's Eve is a holiday every year except for 2003 for some reason.\n holidays = holidays[holidays.year != 2003]\n\n return pd.to_datetime([weekend_to_monday(day) for day in holidays])\n\n\ndef new_years_day_observance(holidays):\n # There was no extra observance of New Year's Day in 2006.\n holidays = holidays[holidays.year != 2006]\n\n return pd.to_datetime([next_monday_or_tuesday(day) for day in holidays])\n\n\ndef songkran_festival_last_day_observance(dt):\n \"\"\"\n This function is similar to the pandas function `next_monday_or_tuesday`\n except it does not observe Saturday holidays on Monday.\n \"\"\"\n if dt.weekday() == SUNDAY or dt.weekday() == MONDAY:\n return dt + timedelta(days=1)\n return dt\n\n\nNewYearsDay = new_years_day(observance=new_years_day_observance)\n\nChakriMemorialDay = Holiday(\n 'Chakri Memorial Day',\n month=4,\n day=6,\n observance=weekend_to_monday,\n)\n\n# Thai New Year. This does not follow the usual observe-next-trading-day rule.\nSongkranFestival1 = Holiday('Songkran Festival', month=4, day=13)\nSongkranFestival2 = Holiday(\n 'Songkran Festival',\n month=4,\n day=14,\n observance=sunday_to_monday,\n)\nSongkranFestival3 = Holiday(\n 'Songkran Festival',\n month=4,\n day=15,\n observance=songkran_festival_last_day_observance,\n)\n\nLabourDay = european_labour_day(observance=weekend_to_monday)\n\nCoronationDay2016AndBefore = Holiday(\n 'Coronation Day For King #9',\n month=5,\n day=5,\n observance=weekend_to_monday,\n end_date='2017',\n)\nCoronationDay2019AndAfter = Holiday(\n 'Coronation Day For King #10',\n month=5,\n day=4,\n observance=weekend_to_monday,\n start_date='2019',\n)\n\nHMQueensBirthday = Holiday(\n \"Her Majesty The Queen's Birthday\",\n month=6,\n day=3,\n observance=weekend_to_monday,\n start_date='2019',\n)\nHMKingsBirthday = Holiday(\n \"His Majesty The King's Birthday\",\n month=7,\n day=28,\n observance=weekend_to_monday,\n start_date='2017',\n)\nHMQueenMothersBirthday = Holiday(\n \"Her Majesty The Queen Mother's Birthday\",\n month=8,\n day=12,\n observance=weekend_to_monday,\n)\n\n# This holiday was historically used as a \"catch up\" day for the exchange, so\n# it does not need to follow the usual observe-next-trading-day rule.\nHalfYearHoliday = Holiday(\n 'Half Year Holiday',\n month=7,\n day=1,\n start_date='2002',\n end_date='2017',\n)\n\nThePassingOfKingBhumibol = Holiday(\n 'The Passing of King Bhumibol',\n month=10,\n day=13,\n observance=weekend_to_monday,\n start_date='2017',\n)\n\nChulalongkornDay = Holiday(\n 'Chulalongkorn Day',\n month=10,\n day=23,\n observance=weekend_to_monday,\n)\n\nKingBhumibolsBirthday = Holiday(\n \"King Bhumibol's Birthday\",\n month=12,\n day=5,\n observance=weekend_to_monday,\n)\n\nThailandConstitutionDay = Holiday(\n 'Thailand Constitution Day',\n month=12,\n day=10,\n observance=weekend_to_monday,\n)\n\nNewYearsEve = new_years_eve(observance=new_years_eve_observance)\n\n# Adhoc Holidays\n# --------------\n\nnew_years_bridge_days = [\n pd.Timestamp('2002-12-30', tz=UTC),\n pd.Timestamp('2004-01-02', tz=UTC),\n pd.Timestamp('2009-01-02', tz=UTC),\n pd.Timestamp('2013-12-30', tz=UTC),\n pd.Timestamp('2015-01-02', tz=UTC),\n]\n\nasanha_bucha_bridge_days = [\n pd.Timestamp('2009-07-06', tz=UTC),\n pd.Timestamp('2016-07-18', tz=UTC),\n]\n\nqueens_birthday_bridge_days = [\n pd.Timestamp('2010-08-13', tz=UTC),\n pd.Timestamp('2014-08-11', tz=UTC),\n]\n\ncoronation_bridge_days = [\n pd.Timestamp('2015-05-04', tz=UTC),\n pd.Timestamp('2016-05-06', tz=UTC),\n]\n\nvesak_bridge_days = [\n pd.Timestamp('2011-05-16', tz=UTC),\n]\n\nmisc_adhoc = [\n pd.Timestamp('2006-04-19', tz=UTC), # Special Holiday\n pd.Timestamp('2006-06-12', tz=UTC), # Special Holiday\n pd.Timestamp('2006-06-13', tz=UTC), # Special Holiday\n pd.Timestamp('2006-09-20', tz=UTC), # Exchange Holiday\n pd.Timestamp('2007-12-24', tz=UTC), # Exchange Holiday\n pd.Timestamp('2010-05-20', tz=UTC), # Closure Due to Security Concerns\n pd.Timestamp('2010-05-21', tz=UTC), # Closure Due to Security Concerns\n pd.Timestamp('2012-04-09', tz=UTC), # Bank Holiday\n pd.Timestamp('2017-10-26', tz=UTC), # Cremation of King Bhumibol\n]\n\n# Lunar Holidays\n# --------------\n\n# Makha Bucha (also known as Magha Puja) is celebrated on the day of the Full\n# Moon of Magha in the Buddhist calendar. This falls sometime between February\n# and March.\nmakha_bucha = pd.to_datetime([\n '1981-02-18',\n '1982-02-08',\n '1983-02-27',\n '1984-02-16',\n '1985-03-06',\n '1986-02-24',\n '1987-02-13',\n '1988-03-03',\n '1989-02-20',\n '1990-02-09',\n '1991-02-28',\n '1992-02-18',\n '1993-03-08',\n '1994-02-25',\n '1995-02-15',\n '1996-03-05',\n '1997-02-22',\n '1998-02-11',\n '1999-03-02',\n '2000-02-19',\n '2001-02-08',\n '2002-02-26',\n '2003-02-17',\n '2004-03-05',\n '2005-02-23',\n '2006-02-13',\n '2007-03-05',\n '2008-02-21',\n '2009-02-09',\n '2010-03-01',\n '2011-02-18',\n '2012-03-07',\n '2013-02-25',\n '2014-02-14',\n '2015-03-04',\n '2016-02-22',\n '2017-02-13',\n '2018-03-01',\n '2019-02-19',\n])\n\n# Vesak (also known as Buddha Day) is celebrated on the day of the Full Moon of\n# Visakha in the Buddhist calendar. This typically falls in May.\nvesak = pd.to_datetime([\n '1981-05-18',\n '1982-05-07',\n '1983-05-26',\n '1984-05-15',\n '1985-06-02',\n '1986-05-23',\n '1987-05-13',\n '1988-05-31',\n '1989-05-20',\n '1990-05-09',\n '1991-05-28',\n '1992-05-16',\n '1993-06-04',\n '1994-05-24',\n '1995-05-14',\n '1996-06-01',\n '1997-05-22',\n '1998-05-11',\n '1999-05-30',\n '2000-05-18',\n '2001-05-07',\n '2002-05-27',\n '2003-05-15',\n '2004-06-02',\n '2005-05-23',\n '2006-05-12',\n '2007-05-31',\n '2008-05-19',\n '2009-05-08',\n '2010-05-28',\n '2011-05-17',\n '2012-06-04',\n '2013-05-24',\n '2014-05-13',\n '2015-06-01',\n '2016-05-20',\n '2017-05-10',\n '2018-05-29',\n '2019-05-20',\n])\n\n# Asanha Bucha (also known as Asalha Puja) is celebrated on the day of the Full\n# Moon of Asadha in the Buddhist calendar. This typically falls in July.\nasanha_bucha = pd.to_datetime([\n '1981-07-17',\n '1982-07-06',\n '1983-07-24',\n '1984-07-12',\n '1985-07-31',\n '1986-07-21',\n '1987-07-10',\n '1988-07-28',\n '1989-07-18',\n '1990-07-07',\n '1991-07-26',\n '1992-07-14',\n '1993-08-02',\n '1994-07-22',\n '1995-07-12',\n '1996-07-30',\n '1997-07-19',\n '1998-07-09',\n '1999-07-28',\n '2000-07-16',\n '2001-07-05',\n '2002-07-25',\n '2003-07-14',\n '2004-08-02',\n '2005-07-22',\n '2006-07-11',\n '2007-07-30',\n '2008-07-17',\n '2009-07-07',\n '2010-07-26',\n '2011-07-15',\n '2012-08-02',\n '2013-07-22',\n '2014-07-11',\n '2015-07-30',\n '2016-07-19',\n '2017-07-10',\n '2018-07-27',\n '2019-07-16',\n])\n" ]
[ [ "pandas.to_datetime", "pandas.tseries.holiday.weekend_to_monday", "pandas.tseries.holiday.Holiday", "pandas.tseries.holiday.next_monday_or_tuesday", "pandas.Timestamp" ] ]
m-salewski/stay_classification
[ "e3f9deadf51c97029a0f9a4bb669a5af68abf7c6" ]
[ "src/stay_classification/bounding_box_classifier/bounding_box_classifier_maxloc.py" ]
[ "import numpy as np\n\nimport matplotlib.pyplot as plt\n\ndef get_max_loc_(d_thresh):\n \n \"\"\"\n Get the approximate location of the largest cluster, ie that with the most events\n \"\"\"\n \n def meth(arr):\n \n loc = None\n \n # Try the bins using the steps in the dist. thresh.\n # TODO: test if this matters? - a least in the early stages\n bins = np.arange(arr.min(), arr.max(), d_thresh)\n \n if bins.size > 1:\n hist_data, hist_bins = np.histogram(arr, bins=bins)\n else: \n hist_data, hist_bins = np.histogram(arr)\n \n if bins.size <= 1: \n # When the distance is too small\n hist_data, hist_bins = np.histogram(arr)\n max_bin = np.where(hist_data == hist_data.max())[0] \n loc = 0.5*(hist_bins[max_bin][0] + hist_bins[max_bin+1][0])\n \n else:\n # Here the bins are shifted to better approximate the location\n #NOTE: this might be overkill\n counts = 0\n best_loc = 0.0\n \n shift_frac = 1/2 # Could also use 1/3\n shift_intervals = 1\n shifts = range(2*shift_intervals+1)\n \n for n in shifts:\n \n # Shift the bins to maximize counts in a bin\n bins_ = bins+(n-shift_intervals)*d_thresh*shift_frac \n hist_data, hist_bins = np.histogram(arr, bins=bins_)\n \n # Save the location with the most counts\n max_counts = hist_data.max() \n max_bin = np.where(hist_data == max_counts)[0] \n #Since the chosen bin is the left edge, \n # to get the location, the midpoint is used.\n loc = 0.5*(hist_bins[max_bin][0] + hist_bins[max_bin+1][0])\n #print(max_counts, loc, hist_data, hist_bins)\n if max_counts > counts:\n best_loc = loc\n counts = max_counts\n \n loc = best_loc\n\n return loc\n\n \n return meth\n\ndef plot_max_loc_(d_thresh):\n \n plt.figure(figsize=[10,6])\n \n def meth(arr, ax=None):\n \n if ax == None:\n ax = plt.subplot(111)\n \n # Get the bins using the steps in the dist. thresh. \n bins = np.arange(arr.min(), arr.max(), d_thresh)\n \n if bins.size <= 1: \n bins = None\n\n hist_data = ax.hist(arr, bins=bins)\n hist_data, hist_bins = np.histogram(arr, bins=hist_data[1])\n\n #print(hist_data,hist_bins, hist_data.max())\n max_bin = np.where(hist_data == hist_data.max()) \n loc = hist_bins[max_bin][0] \n \n else:\n align = ['edge', 'center', 'edge']\n counts = 0\n best_loc = 0.0\n \n shift_frac = 1/2\n shift_intervals = 1\n shifts = range(2*shift_intervals+1)\n \n for n in shifts:\n \n # Shift the bins to maximize counts in a bin\n bins_ = bins+(n-shift_intervals)*d_thresh*shift_frac \n hist_data, hist_bins =np.histogram(arr, bins=bins_)\n \n width=d_thresh\n if n > 0: width=-d_thresh\n \n # Save the location with the most counts\n max_counts = hist_data.max() \n max_bin = np.where(hist_data == max_counts) \n loc = hist_bins[max_bin][0] \n if max_counts > counts:\n best_loc = loc\n counts = max_counts\n \n ax.bar(hist_bins[:-1], hist_data, width=d_thresh, \n align='center', alpha=0.5, \n label=f'{(n-shift_intervals)/3:5.2f}: max. loc. {loc:6.3f}')\n \n loc = best_loc \n \n ax.set_title(f\"Current location with max. events: {loc:6.3}\")\n ax.set_xlabel(rf\"$x$-bins ($\\Delta x$ ={d_thresh:6.4})[km]\")\n ax.set_ylabel(\"Counts\")\n ax.legend()\n \n return loc, counts, ax\n \n return meth\n" ]
[ [ "numpy.histogram", "matplotlib.pyplot.subplot", "numpy.where", "matplotlib.pyplot.figure" ] ]
JoonHyeongPark/IMMethyl
[ "bb4aa43475460b0ce0dd8b46606f3e61c672ee8f" ]
[ "CpG site Correlation/Separation Version/P_Code_Server4_PANCANCER.py" ]
[ "# -*- coding: utf-8 -*-\ncancerlist = [\"PANCANCER\"] # server1\n\nfrom operator import itemgetter\nfrom scipy import stats\nimport numpy as np\n\nbetavalue_arr = []\ncytoact_arr = []\n\nprobe_name = []\nsample_id = []\n\nstart_number = 180000\n\nSEP_NAME = \".SEP_4.\"\n\nprobe_count = 60000\nsample_count = 0\n\nprobe_separation_number = 1000\nprobe_iteration = 0\n\n######################################################################################################################################################\n\ndef getting_cytoact() :\n cytoact_file = open(\"TCGA_methylation_cowork_1.txt\", 'r')\n header = cytoact_file.readline().split() # header 읽기\n\n id_posit = header.index(\"id\") # sample ID positioning\n cytoact_posit = header.index(\"CytAct\") # CytAct positioning\n cytodata = cytoact_file.readlines() # 데이터 테이블 통째로 읽어들임\n cytoact_file.close()\n \n for line in cytodata :\n line = line.split()\n sample_id.append(line[id_posit].replace('_', '')) # sample ID 추출 (주형으로 사용할 것)\n \n sample_count = len(sample_id)\n \n for i in range(0, sample_count) : cytoact_arr.append(None) # CytAct value table 초기화\n\n for line in cytodata :\n line = line.split() # 1 sample data를 분절해서 CytAct value 추출하기 위함\n \n if(line[cytoact_posit] != \"NA\") : # CytAct value가 결측치가 아니라면\n sample_posit = sample_id.index(line[id_posit].replace('_', ''))\n cytoact_arr[sample_posit] = float(line[cytoact_posit]) # 저장한다\n return;\n\n######################################################################################################################################################\n\ngetting_cytoact()\nprint(\"CytAct_Completed\")\n\n######################################################################################################################################################\n\ndef reset_betavalue() :\n \n del betavalue_arr[:]\n for reset_x in range(0, probe_separation_number) : betavalue_arr.append({})\n \n return\n\n######################################################################################################################################################\n\nimport math\ndef mean(x):\n sum = 0.0\n for i in x: sum += i\n return sum / len(x) \n\ndef sampleStandardDeviation(x):\n sumv = 0.0\n for i in x:\n sumv += (i - mean(x)) ** 2\n return math.sqrt(sumv / (len(x) - 1))\n\ndef pearson(x, y):\n scorex = []\n scorey = []\n\n for i in x: scorex.append((i - mean(x)) / sampleStandardDeviation(x)) \n for j in y: scorey.append((j - mean(y)) / sampleStandardDeviation(y))\n\n return (sum([i * j for i, j in zip(scorex, scorey)])) / (len(x) - 1)\n\n######################################################################################################################################################\n\ndef getting_betavalue(name) :\n \n tumor_pearson_output = open(name + SEP_NAME + \"Tumor_Cor_CpGsite&CytAct_pearson.txt\", 'w'); tumor_spearman_output = open(name + SEP_NAME + \".Tumor_Cor_CpGSite&CytAct_spearman.txt\", 'w')\n tumor_pearson_output.write(\"CpGsite\\t%s\\tP_value\\n\" % name); tumor_spearman_output.write(\"CpGsite\\t%s\\tP_value\\n\" % name)\n \n filename1 = name + \".humanmethylation450.tumor.txt\" # cancer name별로 파일명이 다름을 고려해줌\n# filename2 = name + \".humanmethylation450.normal.txt\" # cancer name별로 파일명이 다름을 고려해줌\n input_file1 = open(filename1, 'r')\n# input_file2 = open(filename2, 'r')\n \n sample_name1= input_file1.readline().split() # header에 sample ID가 있으므로 별도로 읽어준 후\n# sample_name2 = input_file2.readline().split()\n\n for separation_count in range(0, start_number) : input_file1.readline()\n \n del sample_name1[0]; del sample_name1[0]\n# del sample_name2[0]; del sample_name2[0] # 쓰레기 데이터를 제외\n \n input_file1.readline() # betavalue임을 명시하는 row를 읽고 폐기\n# input_file2.readline()\n \n probe_separation_number_copy = probe_separation_number\n\n i = 0\n while i < probe_count :\n \n ##################################################################################################################################################\n del probe_name[:]\n escape = False\n \n for normal_iteration in range(0, probe_separation_number_copy) : \n line1 = input_file1.readline().split() # 한 probe에 대한 여러 sample ID의 betavalue를 읽어들임\n# line2 = input_file2.readline().split()\n \n if(len(line1) == 0) : # 끝까지 읽은 경우 루프 문을 빠져나감\n escape = True; probe_separation_number_copy = normal_iteration\n break \n \n# while line1[0] != line2[0] : # 관심 있는 probe와 같아질 때까지 normal data를 훑어 내려감\n# ############ 관심 없는 probe이므로 출력해준 뒤 Not interested 표시\n# tumor_pearson_output.write(\"%s\\tNot interested\\n\" % line2[0]); tumor_spearman_output.write(\"%s\\tNot interested\\n\" % line2[0])\n# line2 = input_file2.readline().split() # 새로운 probe를 읽어들임\n\n probe_name.append(line1[0]) # CpG site name 추출한 뒤, betavalue가 아니므로 리스트에서 제거\n del line1[0]; #del line2[0]\n \n ##################################################################################################################################################\n \n for j in range(0, len(line1)) : # sample ID의 개수만큼 반복함\n sample_name1[j] = sample_name1[j][:15].replace('-', '') # sample ID의 형식을 통일해줌\n\n if(line1[j] != \"NA\" and sample_name1[j] in sample_id) : # 결측치가 아니고, sample id에 포함된 경우\n betavalue_arr[normal_iteration][sample_name1[j]] = [float(line1[j]), None] # sample ID에 맞는 index에 betavalue 저장\n \n ##################################################################################################################################################\n \n# for j in range(0, len(line2)) : # sample ID의 개수만큼 반복함\n# sample_name2[j] = sample_name2[j][:15].replace('-', '') # sample ID의 형식을 통일해줌\n\n# if(line2[j] != \"NA\" and sample_name2[j] in sample_name1) : # 결측치가 아니고, sample id에 포함된 경우\n# betavalue_arr[normal_iteration][sample_name2[j]][1] = float(line2[j]) # sample ID에 맞는 index에 betavalue 저장\n \n ##################################################################################################################################################\n \n for j in range(0, probe_separation_number_copy) :\n printline1 = \"%s\\t\" % probe_name[j]; printline2 = \"%s\\t\" % probe_name[j]\n \n tumor = [None, None]; correction_FC = [None, None]; correction_minus = [None, None]\n tumor[0] = list(); tumor[1] = list()\n check_tumor = False\n\n iteration_number = len(betavalue_arr[j])\n each_site_sample = betavalue_arr[j].items()\n \n for k in range(0, iteration_number) :\n \n if(each_site_sample[k][1][0] != None) :\n \n tumor[0].append(float(each_site_sample[k][1][0])); tumor[1].append(float(cytoact_arr[sample_id.index(each_site_sample[k][0])]))\n check_tumor = True\n \n \n if(check_tumor) :\n tumor_pearson_pair = stats.pearsonr(tumor[0], tumor[1]); tumor_spearman_pair = stats.spearmanr(tumor[0], tumor[1])\n printline1 += \"%f\\t%.3f\" % (tumor_pearson_pair[0], tumor_pearson_pair[1]); printline2 += \"%f\\t%.3f\" % (tumor_spearman_pair[0], tumor_spearman_pair[1])\n \n else : printline1 += \"NA\\tNA\"; printline2 += \"NA\\tNA\"\n \n printline1 += \"\\n\"; printline2 += \"\\n\"\n \n tumor_pearson_output.write(printline1)\n tumor_spearman_output.write(printline2)\n \n if(escape) : break\n \n ##################################################################################################################################################\n \n i += probe_separation_number\n\n print(i, probe_separation_number, \"We are processing %s\" % SEP_NAME)\n \n input_file1.close(); #input_file2.close()\n \n tumor_pearson_output.close(); tumor_spearman_output.close()\n \n return\n\ndef process(cancer_name) :\n \n reset_betavalue()\n getting_betavalue(cancer_name)\n \n return\n\nfor cancer_name in cancerlist :\n process(cancer_name); print(cancer_name + \" completed\")\n\nprint(\"END\")\n" ]
[ [ "scipy.stats.spearmanr", "scipy.stats.pearsonr" ] ]
jordyantunes/Imagine
[ "783cedaa53635b21e18ef41ab1524d56e368d120" ]
[ "src/imagine/rl/actor_critic.py" ]
[ "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport numpy as np\nfrom torch.distributions import Normal\n\n\"\"\"\nthe input x in both networks should be [o, g], where o is the observation and g is the goal.\n\n\"\"\"\n\nclass Feedforward(torch.nn.Module):\n def __init__(self, input_size, layers_sizes):\n super(Feedforward, self).__init__()\n self.input_size = input_size\n self.layers_sizes = layers_sizes\n self.fc_layers = nn.ModuleList()\n self.activations = []\n\n size_tmp = self.input_size\n for i, size in enumerate(self.layers_sizes):\n self.activations.append(nn.ReLU() if i < len(self.layers_sizes) - 1 else None)\n fc = torch.nn.Linear(size_tmp, size)\n nn.init.kaiming_uniform_(fc.weight)\n nn.init.zeros_(fc.bias)\n self.fc_layers.append(fc)\n size_tmp = size\n\n def forward(self, input):\n for fc, activation in zip(self.fc_layers, self.activations):\n input = fc(input)\n if activation:\n input = activation(input)\n return input\n\n\nclass Actor(nn.Module):\n \"\"\"\n Deepset implementation of Actor network\n \"\"\"\n def __init__(self, dims, layers, hidden):\n super(Actor, self).__init__()\n self.layers = layers\n self.dimo = dims['obs']\n self.dimg = dims['g_encoding']\n self.dimu = dims['acts']\n self.inds_objs = dims['inds_objs']\n self.hidden = hidden\n\n self.half_o = self.dimo // 2\n self.n_objs = len(self.inds_objs)\n self.dim_obj = 2 * len(self.inds_objs[0])\n self.dim_body = self.inds_objs[0][0] * 2\n\n self.fc_cast = Feedforward(self.dimg, [self.dim_body + self.dim_obj])\n self.fc_actor = Feedforward(self.dim_body + self.dim_obj,\n [self.hidden] + [self.n_objs * (self.dim_obj + self.dim_body)])\n self.fc_pi = Feedforward(self.n_objs * (self.dim_obj + self.dim_body),\n [self.hidden] * self.layers + [self.dimu])\n\n self.sigmoid = nn.Sigmoid()\n self.tanh = nn.Tanh()\n\n def forward(self, o, g):\n attention = F.sigmoid(self.fc_cast(g))\n obs_body = torch.cat(tensors=[o[:, :self.inds_objs[0][0]],\n o[:, self.half_o: self.half_o + self.inds_objs[0][0]]], dim=1)\n input_pi = torch.zeros([len(o), self.n_objs * (self.dim_obj + self.dim_body)])\n for i in range(self.n_objs):\n obs_obj = torch.cat(tensors=[o[:, self.inds_objs[i][0]: self.inds_objs[i][-1] + 1],\n o[:,\n self.inds_objs[i][0] + self.half_o: self.inds_objs[i][-1] + 1 + self.half_o]],\n dim=1)\n body_obj_input = torch.cat(dim=1, tensors=[obs_body, obs_obj])\n deepset_input = torch.mul(body_obj_input, attention)\n\n input_obj = F.relu(self.fc_actor(deepset_input))\n input_pi += input_obj\n\n return self.tanh(self.fc_pi(input_pi))\n\n def get_attention(self, g):\n return self.sigmoid(self.fc_cast(g))\n\n def get_norm_per_object(self, o , g):\n attention = F.sigmoid(self.fc_cast(g))\n obs_body = torch.cat(tensors=[o[:, :self.inds_objs[0][0]],\n o[:, self.half_o: self.half_o + self.inds_objs[0][0]]], dim=1)\n norms_per_object = []\n for i in range(self.n_objs):\n obs_obj = torch.cat(tensors=[o[:, self.inds_objs[i][0]: self.inds_objs[i][-1] + 1],\n o[:,\n self.inds_objs[i][0] + self.half_o: self.inds_objs[i][-1] + 1 + self.half_o]],\n dim=1)\n body_obj_input = torch.cat(dim=1, tensors=[obs_body, obs_obj])\n deepset_input = torch.mul(body_obj_input, attention)\n\n norms_per_object.append(torch.mean(F.relu(self.fc_actor(deepset_input))))\n return norms_per_object\n\n\n def load_from_tf_params(self, params_dict, name='main'):\n self.fc_cast.fc_layers[0].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/pi/attention_0/kernel:0'.format(name)]),\n dtype=torch.float32))\n self.fc_cast.fc_layers[0].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/pi/attention_0/bias:0'.format(name)], dtype=torch.float32))\n self.fc_actor.fc_layers[0].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/pi/obj_0/kernel:0'.format(name)]), dtype=torch.float32))\n self.fc_actor.fc_layers[0].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/pi/obj_0/bias:0'.format(name)], dtype=torch.float32))\n self.fc_actor.fc_layers[1].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/pi/obj_1/kernel:0'.format(name)]), dtype=torch.float32))\n self.fc_actor.fc_layers[1].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/pi/obj_1/bias:0'.format(name)], dtype=torch.float32))\n self.fc_pi.fc_layers[0].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/pi/pi_0/kernel:0'.format(name)]), dtype=torch.float32))\n self.fc_pi.fc_layers[0].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/pi/pi_0/bias:0'.format(name)], dtype=torch.float32))\n self.fc_pi.fc_layers[1].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/pi/pi_1/kernel:0'.format(name)]), dtype=torch.float32))\n self.fc_pi.fc_layers[1].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/pi/pi_1/bias:0'.format(name)], dtype=torch.float32))\n\n\n\nclass Critic(nn.Module):\n \"\"\"\n Deepset implementation of Critic network\n \"\"\"\n def __init__(self, dims, layers, hidden):\n super(Critic, self).__init__()\n self.layers = layers\n self.dimo = dims['obs']\n self.dimg = dims['g_encoding']\n self.dimu = dims['acts']\n self.inds_objs = dims['inds_objs']\n self.hidden = hidden\n\n self.half_o = self.dimo // 2\n self.n_objs = len(self.inds_objs)\n self.dim_obj = 2 * len(self.inds_objs[0])\n self.dim_body = self.inds_objs[0][0] * 2\n\n self.fc_cast = Feedforward(self.dimg, [self.dim_body + self.dim_obj + self.dimu])\n self.fc_critic = Feedforward(self.dim_body + self.dim_obj + self.dimu,\n [self.hidden] + [self.n_objs * (self.dim_obj + self.dim_body + self.dimu)])\n self.fc_Q = Feedforward(self.n_objs * (self.dim_obj + self.dim_body + self.dimu),\n [self.hidden] * self.layers + [1])\n\n self.sigmoid = nn.Sigmoid()\n self.tanh = nn.Tanh()\n\n def forward(self, o, g, actions):\n attention = F.sigmoid(self.fc_cast(g))\n obs_body = torch.cat(tensors=[o[:, :self.inds_objs[0][0]],\n o[:, self.half_o: self.half_o + self.inds_objs[0][0]]], dim=1)\n input_Q = torch.zeros([len(o), self.n_objs * (self.dim_obj + self.dim_body + self.dimu)])\n for i in range(self.n_objs):\n obs_obj = torch.cat(tensors=[o[:, self.inds_objs[i][0]: self.inds_objs[i][-1] + 1],\n o[:,\n self.inds_objs[i][0] + self.half_o: self.inds_objs[i][-1] + 1 + self.half_o]],\n dim=1)\n body_obj_act_input = torch.cat(dim=1, tensors=[obs_body, obs_obj, actions])\n deepset_input = torch.mul(body_obj_act_input, attention)\n\n input_obj = F.relu(self.fc_critic(deepset_input))\n input_Q += input_obj\n\n return self.fc_Q(input_Q)\n\n def get_attention(self, g):\n return self.sigmoid(self.fc_cast(g))\n\n def load_from_tf_params(self, params_dict, name='main'):\n self.fc_cast.fc_layers[0].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/Q/attention_0/kernel:0'.format(name)]),\n dtype=torch.float32))\n self.fc_cast.fc_layers[0].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/Q/attention_0/bias:0'.format(name)], dtype=torch.float32))\n self.fc_critic.fc_layers[0].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/Q/obj_0/kernel:0'.format(name)]), dtype=torch.float32))\n self.fc_critic.fc_layers[0].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/Q/obj_0/bias:0'.format(name)], dtype=torch.float32))\n self.fc_critic.fc_layers[1].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/Q/obj_1/kernel:0'.format(name)]), dtype=torch.float32))\n self.fc_critic.fc_layers[1].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/Q/obj_1/bias:0'.format(name)], dtype=torch.float32))\n self.fc_Q.fc_layers[0].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/Q/critic_0/kernel:0'.format(name)]), dtype=torch.float32))\n self.fc_Q.fc_layers[0].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/Q/critic_0/bias:0'.format(name)], dtype=torch.float32))\n self.fc_Q.fc_layers[1].weight = torch.nn.Parameter(\n torch.tensor(np.transpose(params_dict['ddpg/{}/Q/critic_1/kernel:0'.format(name)]), dtype=torch.float32))\n self.fc_Q.fc_layers[1].bias = torch.nn.Parameter(\n torch.tensor(params_dict['ddpg/{}/Q/critic_1/bias:0'.format(name)], dtype=torch.float32))\n" ]
[ [ "torch.nn.Linear", "torch.cat", "torch.nn.init.kaiming_uniform_", "torch.mul", "torch.nn.ModuleList", "torch.nn.Sigmoid", "torch.nn.Tanh", "torch.nn.ReLU", "torch.nn.init.zeros_" ] ]
ianbtr/pandas
[ "e4c17f79ba7fc07bf7a3d66e4637a1eb3cdfea4d" ]
[ "pandas/core/series.py" ]
[ "\"\"\"\nData structure for 1-dimensional cross-sectional and time series data\n\"\"\"\nfrom io import StringIO\nfrom shutil import get_terminal_size\nfrom textwrap import dedent\nfrom typing import (\n IO,\n TYPE_CHECKING,\n Any,\n Callable,\n Iterable,\n List,\n Optional,\n Tuple,\n Type,\n)\nimport warnings\n\nimport numpy as np\n\nfrom pandas._config import get_option\n\nfrom pandas._libs import lib, properties, reshape, tslibs\nfrom pandas._typing import Axis, DtypeObj, Label\nfrom pandas.compat.numpy import function as nv\nfrom pandas.util._decorators import Appender, Substitution, doc\nfrom pandas.util._validators import validate_bool_kwarg, validate_percentile\n\nfrom pandas.core.dtypes.cast import (\n convert_dtypes,\n maybe_cast_to_extension_array,\n validate_numeric_casting,\n)\nfrom pandas.core.dtypes.common import (\n _is_unorderable_exception,\n ensure_platform_int,\n is_bool,\n is_categorical_dtype,\n is_dict_like,\n is_extension_array_dtype,\n is_integer,\n is_iterator,\n is_list_like,\n is_object_dtype,\n is_scalar,\n)\nfrom pandas.core.dtypes.generic import (\n ABCDataFrame,\n ABCDatetimeIndex,\n ABCMultiIndex,\n ABCPeriodIndex,\n ABCSeries,\n)\nfrom pandas.core.dtypes.inference import is_hashable\nfrom pandas.core.dtypes.missing import (\n isna,\n na_value_for_dtype,\n notna,\n remove_na_arraylike,\n)\n\nimport pandas as pd\nfrom pandas.core import algorithms, base, generic, nanops, ops\nfrom pandas.core.accessor import CachedAccessor\nfrom pandas.core.arrays import ExtensionArray\nfrom pandas.core.arrays.categorical import CategoricalAccessor\nfrom pandas.core.arrays.sparse import SparseAccessor\nimport pandas.core.common as com\nfrom pandas.core.construction import (\n create_series_with_explicit_dtype,\n extract_array,\n is_empty_data,\n sanitize_array,\n)\nfrom pandas.core.generic import NDFrame\nfrom pandas.core.indexers import unpack_1tuple\nfrom pandas.core.indexes.accessors import CombinedDatetimelikeProperties\nfrom pandas.core.indexes.api import (\n Float64Index,\n Index,\n IntervalIndex,\n InvalidIndexError,\n MultiIndex,\n ensure_index,\n)\nimport pandas.core.indexes.base as ibase\nfrom pandas.core.indexes.datetimes import DatetimeIndex\nfrom pandas.core.indexes.period import PeriodIndex\nfrom pandas.core.indexes.timedeltas import TimedeltaIndex\nfrom pandas.core.indexing import check_bool_indexer\nfrom pandas.core.internals import SingleBlockManager\nfrom pandas.core.strings import StringMethods\nfrom pandas.core.tools.datetimes import to_datetime\n\nimport pandas.io.formats.format as fmt\nimport pandas.plotting\n\nif TYPE_CHECKING:\n from pandas.core.frame import DataFrame\n from pandas.core.groupby.generic import SeriesGroupBy\n\n__all__ = [\"Series\"]\n\n_shared_doc_kwargs = dict(\n axes=\"index\",\n klass=\"Series\",\n axes_single_arg=\"{0 or 'index'}\",\n axis=\"\"\"axis : {0 or 'index'}\n Parameter needed for compatibility with DataFrame.\"\"\",\n inplace=\"\"\"inplace : boolean, default False\n If True, performs operation inplace and returns None.\"\"\",\n unique=\"np.ndarray\",\n duplicated=\"Series\",\n optional_by=\"\",\n optional_mapper=\"\",\n optional_labels=\"\",\n optional_axis=\"\",\n versionadded_to_excel=\"\\n .. versionadded:: 0.20.0\\n\",\n)\n\n\ndef _coerce_method(converter):\n \"\"\"\n Install the scalar coercion methods.\n \"\"\"\n\n def wrapper(self):\n if len(self) == 1:\n return converter(self.iloc[0])\n raise TypeError(f\"cannot convert the series to {converter}\")\n\n wrapper.__name__ = f\"__{converter.__name__}__\"\n return wrapper\n\n\n# ----------------------------------------------------------------------\n# Series class\n\n\nclass Series(base.IndexOpsMixin, generic.NDFrame):\n \"\"\"\n One-dimensional ndarray with axis labels (including time series).\n\n Labels need not be unique but must be a hashable type. The object\n supports both integer- and label-based indexing and provides a host of\n methods for performing operations involving the index. Statistical\n methods from ndarray have been overridden to automatically exclude\n missing data (currently represented as NaN).\n\n Operations between Series (+, -, /, *, **) align values based on their\n associated index values-- they need not be the same length. The result\n index will be the sorted union of the two indexes.\n\n Parameters\n ----------\n data : array-like, Iterable, dict, or scalar value\n Contains data stored in Series.\n\n .. versionchanged:: 0.23.0\n If data is a dict, argument order is maintained for Python 3.6\n and later.\n\n index : array-like or Index (1d)\n Values must be hashable and have the same length as `data`.\n Non-unique index values are allowed. Will default to\n RangeIndex (0, 1, 2, ..., n) if not provided. If both a dict and index\n sequence are used, the index will override the keys found in the\n dict.\n dtype : str, numpy.dtype, or ExtensionDtype, optional\n Data type for the output Series. If not specified, this will be\n inferred from `data`.\n See the :ref:`user guide <basics.dtypes>` for more usages.\n name : str, optional\n The name to give to the Series.\n copy : bool, default False\n Copy input data.\n \"\"\"\n\n _typ = \"series\"\n\n _name: Label\n _metadata: List[str] = [\"name\"]\n _internal_names_set = {\"index\"} | generic.NDFrame._internal_names_set\n _accessors = {\"dt\", \"cat\", \"str\", \"sparse\"}\n _deprecations = (\n base.IndexOpsMixin._deprecations\n | generic.NDFrame._deprecations\n | frozenset([\"compress\", \"ptp\"])\n )\n\n # Override cache_readonly bc Series is mutable\n hasnans = property(\n base.IndexOpsMixin.hasnans.func, doc=base.IndexOpsMixin.hasnans.__doc__\n )\n _data: SingleBlockManager\n div: Callable[[\"Series\", Any], \"Series\"]\n rdiv: Callable[[\"Series\", Any], \"Series\"]\n\n # ----------------------------------------------------------------------\n # Constructors\n\n def __init__(\n self, data=None, index=None, dtype=None, name=None, copy=False, fastpath=False\n ):\n\n # we are called internally, so short-circuit\n if fastpath:\n\n # data is an ndarray, index is defined\n if not isinstance(data, SingleBlockManager):\n data = SingleBlockManager.from_array(data, index)\n if copy:\n data = data.copy()\n if index is None:\n index = data.index\n\n else:\n\n name = ibase.maybe_extract_name(name, data, type(self))\n\n if is_empty_data(data) and dtype is None:\n # gh-17261\n warnings.warn(\n \"The default dtype for empty Series will be 'object' instead \"\n \"of 'float64' in a future version. Specify a dtype explicitly \"\n \"to silence this warning.\",\n DeprecationWarning,\n stacklevel=2,\n )\n # uncomment the line below when removing the DeprecationWarning\n # dtype = np.dtype(object)\n\n if index is not None:\n index = ensure_index(index)\n\n if data is None:\n data = {}\n if dtype is not None:\n dtype = self._validate_dtype(dtype)\n\n if isinstance(data, MultiIndex):\n raise NotImplementedError(\n \"initializing a Series from a MultiIndex is not supported\"\n )\n elif isinstance(data, Index):\n\n if dtype is not None:\n # astype copies\n data = data.astype(dtype)\n else:\n # need to copy to avoid aliasing issues\n data = data._values.copy()\n if isinstance(data, ABCDatetimeIndex) and data.tz is not None:\n # GH#24096 need copy to be deep for datetime64tz case\n # TODO: See if we can avoid these copies\n data = data._values.copy(deep=True)\n copy = False\n\n elif isinstance(data, np.ndarray):\n if len(data.dtype):\n # GH#13296 we are dealing with a compound dtype, which\n # should be treated as 2D\n raise ValueError(\n \"Cannot construct a Series from an ndarray with \"\n \"compound dtype. Use DataFrame instead.\"\n )\n pass\n elif isinstance(data, ABCSeries):\n if index is None:\n index = data.index\n else:\n data = data.reindex(index, copy=copy)\n data = data._data\n elif is_dict_like(data):\n data, index = self._init_dict(data, index, dtype)\n dtype = None\n copy = False\n elif isinstance(data, SingleBlockManager):\n if index is None:\n index = data.index\n elif not data.index.equals(index) or copy:\n # GH#19275 SingleBlockManager input should only be called\n # internally\n raise AssertionError(\n \"Cannot pass both SingleBlockManager \"\n \"`data` argument and a different \"\n \"`index` argument. `copy` must be False.\"\n )\n\n elif is_extension_array_dtype(data):\n pass\n elif isinstance(data, (set, frozenset)):\n raise TypeError(f\"'{type(data).__name__}' type is unordered\")\n else:\n data = com.maybe_iterable_to_list(data)\n\n if index is None:\n if not is_list_like(data):\n data = [data]\n index = ibase.default_index(len(data))\n elif is_list_like(data):\n\n # a scalar numpy array is list-like but doesn't\n # have a proper length\n try:\n if len(index) != len(data):\n raise ValueError(\n f\"Length of passed values is {len(data)}, \"\n f\"index implies {len(index)}.\"\n )\n except TypeError:\n pass\n\n # create/copy the manager\n if isinstance(data, SingleBlockManager):\n if dtype is not None:\n data = data.astype(dtype=dtype, errors=\"ignore\", copy=copy)\n elif copy:\n data = data.copy()\n else:\n data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)\n\n data = SingleBlockManager.from_array(data, index)\n\n generic.NDFrame.__init__(self, data)\n self.name = name\n self._set_axis(0, index, fastpath=True)\n\n def _init_dict(self, data, index=None, dtype=None):\n \"\"\"\n Derive the \"_data\" and \"index\" attributes of a new Series from a\n dictionary input.\n\n Parameters\n ----------\n data : dict or dict-like\n Data used to populate the new Series.\n index : Index or index-like, default None\n Index for the new Series: if None, use dict keys.\n dtype : dtype, default None\n The dtype for the new Series: if None, infer from data.\n\n Returns\n -------\n _data : BlockManager for the new Series\n index : index for the new Series\n \"\"\"\n # Looking for NaN in dict doesn't work ({np.nan : 1}[float('nan')]\n # raises KeyError), so we iterate the entire dict, and align\n if data:\n keys, values = zip(*data.items())\n values = list(values)\n elif index is not None:\n # fastpath for Series(data=None). Just use broadcasting a scalar\n # instead of reindexing.\n values = na_value_for_dtype(dtype)\n keys = index\n else:\n keys, values = [], []\n\n # Input is now list-like, so rely on \"standard\" construction:\n\n # TODO: passing np.float64 to not break anything yet. See GH-17261\n s = create_series_with_explicit_dtype(\n values, index=keys, dtype=dtype, dtype_if_empty=np.float64\n )\n\n # Now we just make sure the order is respected, if any\n if data and index is not None:\n s = s.reindex(index, copy=False)\n return s._data, s.index\n\n # ----------------------------------------------------------------------\n\n @property\n def _constructor(self) -> Type[\"Series\"]:\n return Series\n\n @property\n def _constructor_expanddim(self) -> Type[\"DataFrame\"]:\n from pandas.core.frame import DataFrame\n\n return DataFrame\n\n # types\n @property\n def _can_hold_na(self):\n return self._data._can_hold_na\n\n _index = None\n\n def _set_axis(self, axis: int, labels, fastpath: bool = False) -> None:\n \"\"\"\n Override generic, we want to set the _typ here.\n\n This is called from the cython code when we set the `index` attribute\n directly, e.g. `series.index = [1, 2, 3]`.\n \"\"\"\n if not fastpath:\n labels = ensure_index(labels)\n\n is_all_dates = labels.is_all_dates\n if is_all_dates:\n if not isinstance(labels, (DatetimeIndex, PeriodIndex, TimedeltaIndex)):\n try:\n labels = DatetimeIndex(labels)\n # need to set here because we changed the index\n if fastpath:\n self._data.set_axis(axis, labels)\n except (tslibs.OutOfBoundsDatetime, ValueError):\n # labels may exceeds datetime bounds,\n # or not be a DatetimeIndex\n pass\n\n object.__setattr__(self, \"_index\", labels)\n if not fastpath:\n # The ensure_index call aabove ensures we have an Index object\n self._data.set_axis(axis, labels)\n\n # ndarray compatibility\n @property\n def dtype(self) -> DtypeObj:\n \"\"\"\n Return the dtype object of the underlying data.\n \"\"\"\n return self._data.dtype\n\n @property\n def dtypes(self) -> DtypeObj:\n \"\"\"\n Return the dtype object of the underlying data.\n \"\"\"\n # DataFrame compatibility\n return self.dtype\n\n @property\n def name(self) -> Label:\n \"\"\"\n Return the name of the Series.\n\n The name of a Series becomes its index or column name if it is used\n to form a DataFrame. It is also used whenever displaying the Series\n using the interpreter.\n\n Returns\n -------\n label (hashable object)\n The name of the Series, also the column name if part of a DataFrame.\n\n See Also\n --------\n Series.rename : Sets the Series name when given a scalar input.\n Index.name : Corresponding Index property.\n\n Examples\n --------\n The Series name can be set initially when calling the constructor.\n\n >>> s = pd.Series([1, 2, 3], dtype=np.int64, name='Numbers')\n >>> s\n 0 1\n 1 2\n 2 3\n Name: Numbers, dtype: int64\n >>> s.name = \"Integers\"\n >>> s\n 0 1\n 1 2\n 2 3\n Name: Integers, dtype: int64\n\n The name of a Series within a DataFrame is its column name.\n\n >>> df = pd.DataFrame([[1, 2], [3, 4], [5, 6]],\n ... columns=[\"Odd Numbers\", \"Even Numbers\"])\n >>> df\n Odd Numbers Even Numbers\n 0 1 2\n 1 3 4\n 2 5 6\n >>> df[\"Even Numbers\"].name\n 'Even Numbers'\n \"\"\"\n return self._name\n\n @name.setter\n def name(self, value: Label) -> None:\n if not is_hashable(value):\n raise TypeError(\"Series.name must be a hashable type\")\n object.__setattr__(self, \"_name\", value)\n\n @property\n def values(self):\n \"\"\"\n Return Series as ndarray or ndarray-like depending on the dtype.\n\n .. warning::\n\n We recommend using :attr:`Series.array` or\n :meth:`Series.to_numpy`, depending on whether you need\n a reference to the underlying data or a NumPy array.\n\n Returns\n -------\n numpy.ndarray or ndarray-like\n\n See Also\n --------\n Series.array : Reference to the underlying data.\n Series.to_numpy : A NumPy array representing the underlying data.\n\n Examples\n --------\n >>> pd.Series([1, 2, 3]).values\n array([1, 2, 3])\n\n >>> pd.Series(list('aabc')).values\n array(['a', 'a', 'b', 'c'], dtype=object)\n\n >>> pd.Series(list('aabc')).astype('category').values\n [a, a, b, c]\n Categories (3, object): [a, b, c]\n\n Timezone aware datetime data is converted to UTC:\n\n >>> pd.Series(pd.date_range('20130101', periods=3,\n ... tz='US/Eastern')).values\n array(['2013-01-01T05:00:00.000000000',\n '2013-01-02T05:00:00.000000000',\n '2013-01-03T05:00:00.000000000'], dtype='datetime64[ns]')\n \"\"\"\n return self._data.external_values()\n\n @property\n def _values(self):\n \"\"\"\n Return the internal repr of this data (defined by Block.interval_values).\n This are the values as stored in the Block (ndarray or ExtensionArray\n depending on the Block class), with datetime64[ns] and timedelta64[ns]\n wrapped in ExtensionArrays to match Index._values behavior.\n\n Differs from the public ``.values`` for certain data types, because of\n historical backwards compatibility of the public attribute (e.g. period\n returns object ndarray and datetimetz a datetime64[ns] ndarray for\n ``.values`` while it returns an ExtensionArray for ``._values`` in those\n cases).\n\n Differs from ``.array`` in that this still returns the numpy array if\n the Block is backed by a numpy array (except for datetime64 and\n timedelta64 dtypes), while ``.array`` ensures to always return an\n ExtensionArray.\n\n Overview:\n\n dtype | values | _values | array |\n ----------- | ------------- | ------------- | ------------- |\n Numeric | ndarray | ndarray | PandasArray |\n Category | Categorical | Categorical | Categorical |\n dt64[ns] | ndarray[M8ns] | DatetimeArray | DatetimeArray |\n dt64[ns tz] | ndarray[M8ns] | DatetimeArray | DatetimeArray |\n td64[ns] | ndarray[m8ns] | TimedeltaArray| ndarray[m8ns] |\n Period | ndarray[obj] | PeriodArray | PeriodArray |\n Nullable | EA | EA | EA |\n\n \"\"\"\n return self._data.internal_values()\n\n @Appender(base.IndexOpsMixin.array.__doc__) # type: ignore\n @property\n def array(self) -> ExtensionArray:\n return self._data._block.array_values()\n\n # ops\n def ravel(self, order=\"C\"):\n \"\"\"\n Return the flattened underlying data as an ndarray.\n\n Returns\n -------\n numpy.ndarray or ndarray-like\n Flattened data of the Series.\n\n See Also\n --------\n numpy.ndarray.ravel : Return a flattened array.\n \"\"\"\n return self._values.ravel(order=order)\n\n def __len__(self) -> int:\n \"\"\"\n Return the length of the Series.\n \"\"\"\n return len(self._data)\n\n def view(self, dtype=None) -> \"Series\":\n \"\"\"\n Create a new view of the Series.\n\n This function will return a new Series with a view of the same\n underlying values in memory, optionally reinterpreted with a new data\n type. The new data type must preserve the same size in bytes as to not\n cause index misalignment.\n\n Parameters\n ----------\n dtype : data type\n Data type object or one of their string representations.\n\n Returns\n -------\n Series\n A new Series object as a view of the same data in memory.\n\n See Also\n --------\n numpy.ndarray.view : Equivalent numpy function to create a new view of\n the same data in memory.\n\n Notes\n -----\n Series are instantiated with ``dtype=float64`` by default. While\n ``numpy.ndarray.view()`` will return a view with the same data type as\n the original array, ``Series.view()`` (without specified dtype)\n will try using ``float64`` and may fail if the original data type size\n in bytes is not the same.\n\n Examples\n --------\n >>> s = pd.Series([-2, -1, 0, 1, 2], dtype='int8')\n >>> s\n 0 -2\n 1 -1\n 2 0\n 3 1\n 4 2\n dtype: int8\n\n The 8 bit signed integer representation of `-1` is `0b11111111`, but\n the same bytes represent 255 if read as an 8 bit unsigned integer:\n\n >>> us = s.view('uint8')\n >>> us\n 0 254\n 1 255\n 2 0\n 3 1\n 4 2\n dtype: uint8\n\n The views share the same underlying values:\n\n >>> us[0] = 128\n >>> s\n 0 -128\n 1 -1\n 2 0\n 3 1\n 4 2\n dtype: int8\n \"\"\"\n return self._constructor(\n self._values.view(dtype), index=self.index\n ).__finalize__(self)\n\n # ----------------------------------------------------------------------\n # NDArray Compat\n _HANDLED_TYPES = (Index, ExtensionArray, np.ndarray)\n\n def __array_ufunc__(\n self, ufunc: Callable, method: str, *inputs: Any, **kwargs: Any\n ):\n # TODO: handle DataFrame\n cls = type(self)\n\n # for binary ops, use our custom dunder methods\n result = ops.maybe_dispatch_ufunc_to_dunder_op(\n self, ufunc, method, *inputs, **kwargs\n )\n if result is not NotImplemented:\n return result\n\n # Determine if we should defer.\n no_defer = (np.ndarray.__array_ufunc__, cls.__array_ufunc__)\n\n for item in inputs:\n higher_priority = (\n hasattr(item, \"__array_priority__\")\n and item.__array_priority__ > self.__array_priority__\n )\n has_array_ufunc = (\n hasattr(item, \"__array_ufunc__\")\n and type(item).__array_ufunc__ not in no_defer\n and not isinstance(item, self._HANDLED_TYPES)\n )\n if higher_priority or has_array_ufunc:\n return NotImplemented\n\n # align all the inputs.\n names = [getattr(x, \"name\") for x in inputs if hasattr(x, \"name\")]\n types = tuple(type(x) for x in inputs)\n # TODO: dataframe\n alignable = [x for x, t in zip(inputs, types) if issubclass(t, Series)]\n\n if len(alignable) > 1:\n # This triggers alignment.\n # At the moment, there aren't any ufuncs with more than two inputs\n # so this ends up just being x1.index | x2.index, but we write\n # it to handle *args.\n index = alignable[0].index\n for s in alignable[1:]:\n index |= s.index\n inputs = tuple(\n x.reindex(index) if issubclass(t, Series) else x\n for x, t in zip(inputs, types)\n )\n else:\n index = self.index\n\n inputs = tuple(extract_array(x, extract_numpy=True) for x in inputs)\n result = getattr(ufunc, method)(*inputs, **kwargs)\n\n name = names[0] if len(set(names)) == 1 else None\n\n def construct_return(result):\n if lib.is_scalar(result):\n return result\n elif result.ndim > 1:\n # e.g. np.subtract.outer\n if method == \"outer\":\n # GH#27198\n raise NotImplementedError\n return result\n return self._constructor(result, index=index, name=name, copy=False)\n\n if type(result) is tuple:\n # multiple return values\n return tuple(construct_return(x) for x in result)\n elif method == \"at\":\n # no return value\n return None\n else:\n return construct_return(result)\n\n def __array__(self, dtype=None) -> np.ndarray:\n \"\"\"\n Return the values as a NumPy array.\n\n Users should not call this directly. Rather, it is invoked by\n :func:`numpy.array` and :func:`numpy.asarray`.\n\n Parameters\n ----------\n dtype : str or numpy.dtype, optional\n The dtype to use for the resulting NumPy array. By default,\n the dtype is inferred from the data.\n\n Returns\n -------\n numpy.ndarray\n The values in the series converted to a :class:`numpy.ndarray`\n with the specified `dtype`.\n\n See Also\n --------\n array : Create a new array from data.\n Series.array : Zero-copy view to the array backing the Series.\n Series.to_numpy : Series method for similar behavior.\n\n Examples\n --------\n >>> ser = pd.Series([1, 2, 3])\n >>> np.asarray(ser)\n array([1, 2, 3])\n\n For timezone-aware data, the timezones may be retained with\n ``dtype='object'``\n\n >>> tzser = pd.Series(pd.date_range('2000', periods=2, tz=\"CET\"))\n >>> np.asarray(tzser, dtype=\"object\")\n array([Timestamp('2000-01-01 00:00:00+0100', tz='CET', freq='D'),\n Timestamp('2000-01-02 00:00:00+0100', tz='CET', freq='D')],\n dtype=object)\n\n Or the values may be localized to UTC and the tzinfo discarded with\n ``dtype='datetime64[ns]'``\n\n >>> np.asarray(tzser, dtype=\"datetime64[ns]\") # doctest: +ELLIPSIS\n array(['1999-12-31T23:00:00.000000000', ...],\n dtype='datetime64[ns]')\n \"\"\"\n return np.asarray(self.array, dtype)\n\n # ----------------------------------------------------------------------\n # Unary Methods\n\n # coercion\n __float__ = _coerce_method(float)\n __long__ = _coerce_method(int)\n __int__ = _coerce_method(int)\n\n # ----------------------------------------------------------------------\n\n # indexers\n @property\n def axes(self) -> List[Index]:\n \"\"\"\n Return a list of the row axis labels.\n \"\"\"\n return [self.index]\n\n # ----------------------------------------------------------------------\n # Indexing Methods\n\n @Appender(generic.NDFrame.take.__doc__)\n def take(self, indices, axis=0, is_copy=None, **kwargs) -> \"Series\":\n if is_copy is not None:\n warnings.warn(\n \"is_copy is deprecated and will be removed in a future version. \"\n \"'take' always returns a copy, so there is no need to specify this.\",\n FutureWarning,\n stacklevel=2,\n )\n nv.validate_take(tuple(), kwargs)\n\n indices = ensure_platform_int(indices)\n new_index = self.index.take(indices)\n new_values = self._values.take(indices)\n\n return self._constructor(\n new_values, index=new_index, fastpath=True\n ).__finalize__(self)\n\n def _take_with_is_copy(self, indices, axis=0):\n \"\"\"\n Internal version of the `take` method that sets the `_is_copy`\n attribute to keep track of the parent dataframe (using in indexing\n for the SettingWithCopyWarning). For Series this does the same\n as the public take (it never sets `_is_copy`).\n\n See the docstring of `take` for full explanation of the parameters.\n \"\"\"\n return self.take(indices=indices, axis=axis)\n\n def _ixs(self, i: int, axis: int = 0):\n \"\"\"\n Return the i-th value or values in the Series by location.\n\n Parameters\n ----------\n i : int\n\n Returns\n -------\n scalar (int) or Series (slice, sequence)\n \"\"\"\n return self._values[i]\n\n def _slice(self, slobj: slice, axis: int = 0) -> \"Series\":\n # axis kwarg is retained for compat with NDFrame method\n # _slice is *always* positional\n return self._get_values(slobj)\n\n def __getitem__(self, key):\n key = com.apply_if_callable(key, self)\n\n if key is Ellipsis:\n return self\n\n key_is_scalar = is_scalar(key)\n if isinstance(key, (list, tuple)):\n key = unpack_1tuple(key)\n\n if key_is_scalar or isinstance(self.index, MultiIndex):\n # Otherwise index.get_value will raise InvalidIndexError\n try:\n result = self.index.get_value(self, key)\n\n return result\n except InvalidIndexError:\n if not isinstance(self.index, MultiIndex):\n raise\n\n except (KeyError, ValueError):\n if isinstance(key, tuple) and isinstance(self.index, MultiIndex):\n # kludge\n pass\n else:\n raise\n\n if not key_is_scalar:\n # avoid expensive checks if we know we have a scalar\n if is_iterator(key):\n key = list(key)\n\n if com.is_bool_indexer(key):\n key = check_bool_indexer(self.index, key)\n key = np.asarray(key, dtype=bool)\n return self._get_values(key)\n\n return self._get_with(key)\n\n def _get_with(self, key):\n # other: fancy integer or otherwise\n if isinstance(key, slice):\n # _convert_slice_indexer to determin if this slice is positional\n # or label based, and if the latter, convert to positional\n slobj = self.index._convert_slice_indexer(key, kind=\"getitem\")\n return self._slice(slobj)\n elif isinstance(key, ABCDataFrame):\n raise TypeError(\n \"Indexing a Series with DataFrame is not \"\n \"supported, use the appropriate DataFrame column\"\n )\n elif isinstance(key, tuple):\n return self._get_values_tuple(key)\n\n elif not is_list_like(key):\n # e.g. scalars that aren't recognized by lib.is_scalar, GH#32684\n return self.loc[key]\n\n if not isinstance(key, (list, np.ndarray, ExtensionArray, Series, Index)):\n key = list(key)\n\n if isinstance(key, Index):\n key_type = key.inferred_type\n else:\n key_type = lib.infer_dtype(key, skipna=False)\n\n # Note: The key_type == \"boolean\" case should be caught by the\n # com.is_bool_indexer check in __getitem__\n if key_type == \"integer\":\n # We need to decide whether to treat this as a positional indexer\n # (i.e. self.iloc) or label-based (i.e. self.loc)\n if self.index.is_integer() or self.index.is_floating():\n return self.loc[key]\n elif isinstance(self.index, IntervalIndex):\n return self.loc[key]\n else:\n return self.iloc[key]\n\n if isinstance(key, list):\n # handle the dup indexing case GH#4246\n return self.loc[key]\n\n return self.reindex(key)\n\n def _get_values_tuple(self, key):\n # mpl hackaround\n if com.any_none(*key):\n # suppress warning from slicing the index with a 2d indexer.\n # eventually we'll want Series itself to warn.\n with warnings.catch_warnings():\n warnings.filterwarnings(\n \"ignore\", \"Support for multi-dim\", DeprecationWarning\n )\n return self._get_values(key)\n\n if not isinstance(self.index, MultiIndex):\n raise ValueError(\"Can only tuple-index with a MultiIndex\")\n\n # If key is contained, would have returned by now\n indexer, new_index = self.index.get_loc_level(key)\n return self._constructor(self._values[indexer], index=new_index).__finalize__(\n self\n )\n\n def _get_values(self, indexer):\n try:\n return self._constructor(self._data.get_slice(indexer)).__finalize__(self)\n except ValueError:\n # mpl compat if we look up e.g. ser[:, np.newaxis];\n # see tests.series.timeseries.test_mpl_compat_hack\n return self._values[indexer]\n\n def _get_value(self, label, takeable: bool = False):\n \"\"\"\n Quickly retrieve single value at passed index label.\n\n Parameters\n ----------\n label : object\n takeable : interpret the index as indexers, default False\n\n Returns\n -------\n scalar value\n \"\"\"\n if takeable:\n return self._values[label]\n\n # Similar to Index.get_value, but we do not fall back to positional\n loc = self.index.get_loc(label)\n return self.index._get_values_for_loc(self, loc, label)\n\n def __setitem__(self, key, value):\n key = com.apply_if_callable(key, self)\n cacher_needs_updating = self._check_is_chained_assignment_possible()\n\n if key is Ellipsis:\n key = slice(None)\n\n try:\n self._set_with_engine(key, value)\n except (KeyError, ValueError):\n values = self._values\n if is_integer(key) and not self.index.inferred_type == \"integer\":\n values[key] = value\n else:\n self.loc[key] = value\n\n except TypeError as e:\n if isinstance(key, tuple) and not isinstance(self.index, MultiIndex):\n raise ValueError(\"Can only tuple-index with a MultiIndex\") from e\n\n # python 3 type errors should be raised\n if _is_unorderable_exception(e):\n raise IndexError(key) from e\n\n if com.is_bool_indexer(key):\n key = check_bool_indexer(self.index, key)\n key = np.asarray(key, dtype=bool)\n try:\n self._where(~key, value, inplace=True)\n return\n except InvalidIndexError:\n self._set_values(key.astype(np.bool_), value)\n\n else:\n self._set_with(key, value)\n\n if cacher_needs_updating:\n self._maybe_update_cacher()\n\n def _set_with_engine(self, key, value):\n # fails with AttributeError for IntervalIndex\n loc = self.index._engine.get_loc(key)\n validate_numeric_casting(self.dtype, value)\n self._values[loc] = value\n\n def _set_with(self, key, value):\n # other: fancy integer or otherwise\n if isinstance(key, slice):\n indexer = self.index._convert_slice_indexer(key, kind=\"getitem\")\n return self._set_values(indexer, value)\n\n elif is_scalar(key) and not is_integer(key) and key not in self.index:\n # GH#12862 adding an new key to the Series\n # Note: have to exclude integers because that is ambiguously\n # position-based\n self.loc[key] = value\n return\n\n else:\n if isinstance(key, tuple):\n try:\n # TODO: no test cases that get here\n self._set_values(key, value)\n except Exception:\n pass\n\n if is_scalar(key):\n key = [key]\n\n if isinstance(key, Index):\n key_type = key.inferred_type\n key = key._values\n else:\n key_type = lib.infer_dtype(key, skipna=False)\n\n # Note: key_type == \"boolean\" should not occur because that\n # should be caught by the is_bool_indexer check in __setitem__\n if key_type == \"integer\":\n if self.index.inferred_type == \"integer\":\n self._set_labels(key, value)\n else:\n return self._set_values(key, value)\n else:\n self._set_labels(key, value)\n\n def _set_labels(self, key, value):\n key = com.asarray_tuplesafe(key)\n indexer: np.ndarray = self.index.get_indexer(key)\n mask = indexer == -1\n if mask.any():\n raise ValueError(f\"{key[mask]} not contained in the index\")\n self._set_values(indexer, value)\n\n def _set_values(self, key, value):\n if isinstance(key, Series):\n key = key._values\n self._data = self._data.setitem(indexer=key, value=value)\n self._maybe_update_cacher()\n\n def _set_value(self, label, value, takeable: bool = False):\n \"\"\"\n Quickly set single value at passed label.\n\n If label is not contained, a new object is created with the label\n placed at the end of the result index.\n\n Parameters\n ----------\n label : object\n Partial indexing with MultiIndex not allowed.\n value : object\n Scalar value.\n takeable : interpret the index as indexers, default False\n \"\"\"\n try:\n if takeable:\n self._values[label] = value\n else:\n loc = self.index.get_loc(label)\n validate_numeric_casting(self.dtype, value)\n self._values[loc] = value\n except KeyError:\n\n # set using a non-recursive method\n self.loc[label] = value\n\n # ----------------------------------------------------------------------\n # Unsorted\n\n @property\n def _is_mixed_type(self):\n return False\n\n def repeat(self, repeats, axis=None) -> \"Series\":\n \"\"\"\n Repeat elements of a Series.\n\n Returns a new Series where each element of the current Series\n is repeated consecutively a given number of times.\n\n Parameters\n ----------\n repeats : int or array of ints\n The number of repetitions for each element. This should be a\n non-negative integer. Repeating 0 times will return an empty\n Series.\n axis : None\n Must be ``None``. Has no effect but is accepted for compatibility\n with numpy.\n\n Returns\n -------\n Series\n Newly created Series with repeated elements.\n\n See Also\n --------\n Index.repeat : Equivalent function for Index.\n numpy.repeat : Similar method for :class:`numpy.ndarray`.\n\n Examples\n --------\n >>> s = pd.Series(['a', 'b', 'c'])\n >>> s\n 0 a\n 1 b\n 2 c\n dtype: object\n >>> s.repeat(2)\n 0 a\n 0 a\n 1 b\n 1 b\n 2 c\n 2 c\n dtype: object\n >>> s.repeat([1, 2, 3])\n 0 a\n 1 b\n 1 b\n 2 c\n 2 c\n 2 c\n dtype: object\n \"\"\"\n nv.validate_repeat(tuple(), dict(axis=axis))\n new_index = self.index.repeat(repeats)\n new_values = self._values.repeat(repeats)\n return self._constructor(new_values, index=new_index).__finalize__(self)\n\n def reset_index(self, level=None, drop=False, name=None, inplace=False):\n \"\"\"\n Generate a new DataFrame or Series with the index reset.\n\n This is useful when the index needs to be treated as a column, or\n when the index is meaningless and needs to be reset to the default\n before another operation.\n\n Parameters\n ----------\n level : int, str, tuple, or list, default optional\n For a Series with a MultiIndex, only remove the specified levels\n from the index. Removes all levels by default.\n drop : bool, default False\n Just reset the index, without inserting it as a column in\n the new DataFrame.\n name : object, optional\n The name to use for the column containing the original Series\n values. Uses ``self.name`` by default. This argument is ignored\n when `drop` is True.\n inplace : bool, default False\n Modify the Series in place (do not create a new object).\n\n Returns\n -------\n Series or DataFrame\n When `drop` is False (the default), a DataFrame is returned.\n The newly created columns will come first in the DataFrame,\n followed by the original Series values.\n When `drop` is True, a `Series` is returned.\n In either case, if ``inplace=True``, no value is returned.\n\n See Also\n --------\n DataFrame.reset_index: Analogous function for DataFrame.\n\n Examples\n --------\n >>> s = pd.Series([1, 2, 3, 4], name='foo',\n ... index=pd.Index(['a', 'b', 'c', 'd'], name='idx'))\n\n Generate a DataFrame with default index.\n\n >>> s.reset_index()\n idx foo\n 0 a 1\n 1 b 2\n 2 c 3\n 3 d 4\n\n To specify the name of the new column use `name`.\n\n >>> s.reset_index(name='values')\n idx values\n 0 a 1\n 1 b 2\n 2 c 3\n 3 d 4\n\n To generate a new Series with the default set `drop` to True.\n\n >>> s.reset_index(drop=True)\n 0 1\n 1 2\n 2 3\n 3 4\n Name: foo, dtype: int64\n\n To update the Series in place, without generating a new one\n set `inplace` to True. Note that it also requires ``drop=True``.\n\n >>> s.reset_index(inplace=True, drop=True)\n >>> s\n 0 1\n 1 2\n 2 3\n 3 4\n Name: foo, dtype: int64\n\n The `level` parameter is interesting for Series with a multi-level\n index.\n\n >>> arrays = [np.array(['bar', 'bar', 'baz', 'baz']),\n ... np.array(['one', 'two', 'one', 'two'])]\n >>> s2 = pd.Series(\n ... range(4), name='foo',\n ... index=pd.MultiIndex.from_arrays(arrays,\n ... names=['a', 'b']))\n\n To remove a specific level from the Index, use `level`.\n\n >>> s2.reset_index(level='a')\n a foo\n b\n one bar 0\n two bar 1\n one baz 2\n two baz 3\n\n If `level` is not set, all levels are removed from the Index.\n\n >>> s2.reset_index()\n a b foo\n 0 bar one 0\n 1 bar two 1\n 2 baz one 2\n 3 baz two 3\n \"\"\"\n inplace = validate_bool_kwarg(inplace, \"inplace\")\n if drop:\n new_index = ibase.default_index(len(self))\n if level is not None:\n if not isinstance(level, (tuple, list)):\n level = [level]\n level = [self.index._get_level_number(lev) for lev in level]\n if len(level) < self.index.nlevels:\n new_index = self.index.droplevel(level)\n\n if inplace:\n self.index = new_index\n # set name if it was passed, otherwise, keep the previous name\n self.name = name or self.name\n else:\n return self._constructor(\n self._values.copy(), index=new_index\n ).__finalize__(self)\n elif inplace:\n raise TypeError(\n \"Cannot reset_index inplace on a Series to create a DataFrame\"\n )\n else:\n df = self.to_frame(name)\n return df.reset_index(level=level, drop=drop)\n\n # ----------------------------------------------------------------------\n # Rendering Methods\n\n def __repr__(self) -> str:\n \"\"\"\n Return a string representation for a particular Series.\n \"\"\"\n buf = StringIO(\"\")\n width, height = get_terminal_size()\n max_rows = (\n height\n if get_option(\"display.max_rows\") == 0\n else get_option(\"display.max_rows\")\n )\n min_rows = (\n height\n if get_option(\"display.max_rows\") == 0\n else get_option(\"display.min_rows\")\n )\n show_dimensions = get_option(\"display.show_dimensions\")\n\n self.to_string(\n buf=buf,\n name=self.name,\n dtype=self.dtype,\n min_rows=min_rows,\n max_rows=max_rows,\n length=show_dimensions,\n )\n result = buf.getvalue()\n\n return result\n\n def to_string(\n self,\n buf=None,\n na_rep=\"NaN\",\n float_format=None,\n header=True,\n index=True,\n length=False,\n dtype=False,\n name=False,\n max_rows=None,\n min_rows=None,\n ):\n \"\"\"\n Render a string representation of the Series.\n\n Parameters\n ----------\n buf : StringIO-like, optional\n Buffer to write to.\n na_rep : str, optional\n String representation of NaN to use, default 'NaN'.\n float_format : one-parameter function, optional\n Formatter function to apply to columns' elements if they are\n floats, default None.\n header : bool, default True\n Add the Series header (index name).\n index : bool, optional\n Add index (row) labels, default True.\n length : bool, default False\n Add the Series length.\n dtype : bool, default False\n Add the Series dtype.\n name : bool, default False\n Add the Series name if not None.\n max_rows : int, optional\n Maximum number of rows to show before truncating. If None, show\n all.\n min_rows : int, optional\n The number of rows to display in a truncated repr (when number\n of rows is above `max_rows`).\n\n Returns\n -------\n str or None\n String representation of Series if ``buf=None``, otherwise None.\n \"\"\"\n formatter = fmt.SeriesFormatter(\n self,\n name=name,\n length=length,\n header=header,\n index=index,\n dtype=dtype,\n na_rep=na_rep,\n float_format=float_format,\n min_rows=min_rows,\n max_rows=max_rows,\n )\n result = formatter.to_string()\n\n # catch contract violations\n if not isinstance(result, str):\n raise AssertionError(\n \"result must be of type str, type \"\n f\"of result is {repr(type(result).__name__)}\"\n )\n\n if buf is None:\n return result\n else:\n try:\n buf.write(result)\n except AttributeError:\n with open(buf, \"w\") as f:\n f.write(result)\n\n @Appender(\n \"\"\"\n Examples\n --------\n >>> s = pd.Series([\"elk\", \"pig\", \"dog\", \"quetzal\"], name=\"animal\")\n >>> print(s.to_markdown())\n | | animal |\n |---:|:---------|\n | 0 | elk |\n | 1 | pig |\n | 2 | dog |\n | 3 | quetzal |\n \"\"\"\n )\n @Substitution(klass=\"Series\")\n @Appender(generic._shared_docs[\"to_markdown\"])\n def to_markdown(\n self, buf: Optional[IO[str]] = None, mode: Optional[str] = None, **kwargs\n ) -> Optional[str]:\n return self.to_frame().to_markdown(buf, mode, **kwargs)\n\n # ----------------------------------------------------------------------\n\n def items(self) -> Iterable[Tuple[Label, Any]]:\n \"\"\"\n Lazily iterate over (index, value) tuples.\n\n This method returns an iterable tuple (index, value). This is\n convenient if you want to create a lazy iterator.\n\n Returns\n -------\n iterable\n Iterable of tuples containing the (index, value) pairs from a\n Series.\n\n See Also\n --------\n DataFrame.items : Iterate over (column name, Series) pairs.\n DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs.\n\n Examples\n --------\n >>> s = pd.Series(['A', 'B', 'C'])\n >>> for index, value in s.items():\n ... print(f\"Index : {index}, Value : {value}\")\n Index : 0, Value : A\n Index : 1, Value : B\n Index : 2, Value : C\n \"\"\"\n return zip(iter(self.index), iter(self))\n\n @Appender(items.__doc__)\n def iteritems(self) -> Iterable[Tuple[Label, Any]]:\n return self.items()\n\n # ----------------------------------------------------------------------\n # Misc public methods\n\n def keys(self) -> Index:\n \"\"\"\n Return alias for index.\n\n Returns\n -------\n Index\n Index of the Series.\n \"\"\"\n return self.index\n\n def to_dict(self, into=dict):\n \"\"\"\n Convert Series to {label -> value} dict or dict-like object.\n\n Parameters\n ----------\n into : class, default dict\n The collections.abc.Mapping subclass to use as the return\n object. Can be the actual class or an empty\n instance of the mapping type you want. If you want a\n collections.defaultdict, you must pass it initialized.\n\n .. versionadded:: 0.21.0\n\n Returns\n -------\n collections.abc.Mapping\n Key-value representation of Series.\n\n Examples\n --------\n >>> s = pd.Series([1, 2, 3, 4])\n >>> s.to_dict()\n {0: 1, 1: 2, 2: 3, 3: 4}\n >>> from collections import OrderedDict, defaultdict\n >>> s.to_dict(OrderedDict)\n OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])\n >>> dd = defaultdict(list)\n >>> s.to_dict(dd)\n defaultdict(<class 'list'>, {0: 1, 1: 2, 2: 3, 3: 4})\n \"\"\"\n # GH16122\n into_c = com.standardize_mapping(into)\n return into_c(self.items())\n\n def to_frame(self, name=None) -> \"DataFrame\":\n \"\"\"\n Convert Series to DataFrame.\n\n Parameters\n ----------\n name : object, default None\n The passed name should substitute for the series name (if it has\n one).\n\n Returns\n -------\n DataFrame\n DataFrame representation of Series.\n\n Examples\n --------\n >>> s = pd.Series([\"a\", \"b\", \"c\"],\n ... name=\"vals\")\n >>> s.to_frame()\n vals\n 0 a\n 1 b\n 2 c\n \"\"\"\n if name is None:\n df = self._constructor_expanddim(self)\n else:\n df = self._constructor_expanddim({name: self})\n\n return df\n\n def _set_name(self, name, inplace=False) -> \"Series\":\n \"\"\"\n Set the Series name.\n\n Parameters\n ----------\n name : str\n inplace : bool\n Whether to modify `self` directly or return a copy.\n \"\"\"\n inplace = validate_bool_kwarg(inplace, \"inplace\")\n ser = self if inplace else self.copy()\n ser.name = name\n return ser\n\n @Appender(\n \"\"\"\nExamples\n--------\n>>> ser = pd.Series([390., 350., 30., 20.],\n... index=['Falcon', 'Falcon', 'Parrot', 'Parrot'], name=\"Max Speed\")\n>>> ser\nFalcon 390.0\nFalcon 350.0\nParrot 30.0\nParrot 20.0\nName: Max Speed, dtype: float64\n>>> ser.groupby([\"a\", \"b\", \"a\", \"b\"]).mean()\na 210.0\nb 185.0\nName: Max Speed, dtype: float64\n>>> ser.groupby(level=0).mean()\nFalcon 370.0\nParrot 25.0\nName: Max Speed, dtype: float64\n>>> ser.groupby(ser > 100).mean()\nMax Speed\nFalse 25.0\nTrue 370.0\nName: Max Speed, dtype: float64\n\n**Grouping by Indexes**\n\nWe can groupby different levels of a hierarchical index\nusing the `level` parameter:\n\n>>> arrays = [['Falcon', 'Falcon', 'Parrot', 'Parrot'],\n... ['Captive', 'Wild', 'Captive', 'Wild']]\n>>> index = pd.MultiIndex.from_arrays(arrays, names=('Animal', 'Type'))\n>>> ser = pd.Series([390., 350., 30., 20.], index=index, name=\"Max Speed\")\n>>> ser\nAnimal Type\nFalcon Captive 390.0\n Wild 350.0\nParrot Captive 30.0\n Wild 20.0\nName: Max Speed, dtype: float64\n>>> ser.groupby(level=0).mean()\nAnimal\nFalcon 370.0\nParrot 25.0\nName: Max Speed, dtype: float64\n>>> ser.groupby(level=\"Type\").mean()\nType\nCaptive 210.0\nWild 185.0\nName: Max Speed, dtype: float64\n\"\"\"\n )\n @Appender(generic._shared_docs[\"groupby\"] % _shared_doc_kwargs)\n def groupby(\n self,\n by=None,\n axis=0,\n level=None,\n as_index: bool = True,\n sort: bool = True,\n group_keys: bool = True,\n squeeze: bool = False,\n observed: bool = False,\n ) -> \"SeriesGroupBy\":\n from pandas.core.groupby.generic import SeriesGroupBy\n\n if level is None and by is None:\n raise TypeError(\"You have to supply one of 'by' and 'level'\")\n axis = self._get_axis_number(axis)\n\n return SeriesGroupBy(\n obj=self,\n keys=by,\n axis=axis,\n level=level,\n as_index=as_index,\n sort=sort,\n group_keys=group_keys,\n squeeze=squeeze,\n observed=observed,\n )\n\n # ----------------------------------------------------------------------\n # Statistics, overridden ndarray methods\n\n # TODO: integrate bottleneck\n\n def count(self, level=None):\n \"\"\"\n Return number of non-NA/null observations in the Series.\n\n Parameters\n ----------\n level : int or level name, default None\n If the axis is a MultiIndex (hierarchical), count along a\n particular level, collapsing into a smaller Series.\n\n Returns\n -------\n int or Series (if level specified)\n Number of non-null values in the Series.\n\n See Also\n --------\n DataFrame.count : Count non-NA cells for each column or row.\n\n Examples\n --------\n >>> s = pd.Series([0.0, 1.0, np.nan])\n >>> s.count()\n 2\n \"\"\"\n if level is None:\n return notna(self.array).sum()\n\n if isinstance(level, str):\n level = self.index._get_level_number(level)\n\n lev = self.index.levels[level]\n level_codes = np.array(self.index.codes[level], subok=False, copy=True)\n\n mask = level_codes == -1\n if mask.any():\n level_codes[mask] = cnt = len(lev)\n lev = lev.insert(cnt, lev._na_value)\n\n obs = level_codes[notna(self._values)]\n out = np.bincount(obs, minlength=len(lev) or None)\n return self._constructor(out, index=lev, dtype=\"int64\").__finalize__(self)\n\n def mode(self, dropna=True) -> \"Series\":\n \"\"\"\n Return the mode(s) of the dataset.\n\n Always returns Series even if only one value is returned.\n\n Parameters\n ----------\n dropna : bool, default True\n Don't consider counts of NaN/NaT.\n\n .. versionadded:: 0.24.0\n\n Returns\n -------\n Series\n Modes of the Series in sorted order.\n \"\"\"\n # TODO: Add option for bins like value_counts()\n return algorithms.mode(self, dropna=dropna)\n\n def unique(self):\n \"\"\"\n Return unique values of Series object.\n\n Uniques are returned in order of appearance. Hash table-based unique,\n therefore does NOT sort.\n\n Returns\n -------\n ndarray or ExtensionArray\n The unique values returned as a NumPy array. See Notes.\n\n See Also\n --------\n unique : Top-level unique method for any 1-d array-like object.\n Index.unique : Return Index with unique values from an Index object.\n\n Notes\n -----\n Returns the unique values as a NumPy array. In case of an\n extension-array backed Series, a new\n :class:`~api.extensions.ExtensionArray` of that type with just\n the unique values is returned. This includes\n\n * Categorical\n * Period\n * Datetime with Timezone\n * Interval\n * Sparse\n * IntegerNA\n\n See Examples section.\n\n Examples\n --------\n >>> pd.Series([2, 1, 3, 3], name='A').unique()\n array([2, 1, 3])\n\n >>> pd.Series([pd.Timestamp('2016-01-01') for _ in range(3)]).unique()\n array(['2016-01-01T00:00:00.000000000'], dtype='datetime64[ns]')\n\n >>> pd.Series([pd.Timestamp('2016-01-01', tz='US/Eastern')\n ... for _ in range(3)]).unique()\n <DatetimeArray>\n ['2016-01-01 00:00:00-05:00']\n Length: 1, dtype: datetime64[ns, US/Eastern]\n\n An unordered Categorical will return categories in the order of\n appearance.\n\n >>> pd.Series(pd.Categorical(list('baabc'))).unique()\n [b, a, c]\n Categories (3, object): [b, a, c]\n\n An ordered Categorical preserves the category ordering.\n\n >>> pd.Series(pd.Categorical(list('baabc'), categories=list('abc'),\n ... ordered=True)).unique()\n [b, a, c]\n Categories (3, object): [a < b < c]\n \"\"\"\n result = super().unique()\n return result\n\n def drop_duplicates(self, keep=\"first\", inplace=False) -> Optional[\"Series\"]:\n \"\"\"\n Return Series with duplicate values removed.\n\n Parameters\n ----------\n keep : {'first', 'last', ``False``}, default 'first'\n Method to handle dropping duplicates:\n\n - 'first' : Drop duplicates except for the first occurrence.\n - 'last' : Drop duplicates except for the last occurrence.\n - ``False`` : Drop all duplicates.\n\n inplace : bool, default ``False``\n If ``True``, performs operation inplace and returns None.\n\n Returns\n -------\n Series\n Series with duplicates dropped.\n\n See Also\n --------\n Index.drop_duplicates : Equivalent method on Index.\n DataFrame.drop_duplicates : Equivalent method on DataFrame.\n Series.duplicated : Related method on Series, indicating duplicate\n Series values.\n\n Examples\n --------\n Generate a Series with duplicated entries.\n\n >>> s = pd.Series(['lama', 'cow', 'lama', 'beetle', 'lama', 'hippo'],\n ... name='animal')\n >>> s\n 0 lama\n 1 cow\n 2 lama\n 3 beetle\n 4 lama\n 5 hippo\n Name: animal, dtype: object\n\n With the 'keep' parameter, the selection behaviour of duplicated values\n can be changed. The value 'first' keeps the first occurrence for each\n set of duplicated entries. The default value of keep is 'first'.\n\n >>> s.drop_duplicates()\n 0 lama\n 1 cow\n 3 beetle\n 5 hippo\n Name: animal, dtype: object\n\n The value 'last' for parameter 'keep' keeps the last occurrence for\n each set of duplicated entries.\n\n >>> s.drop_duplicates(keep='last')\n 1 cow\n 3 beetle\n 4 lama\n 5 hippo\n Name: animal, dtype: object\n\n The value ``False`` for parameter 'keep' discards all sets of\n duplicated entries. Setting the value of 'inplace' to ``True`` performs\n the operation inplace and returns ``None``.\n\n >>> s.drop_duplicates(keep=False, inplace=True)\n >>> s\n 1 cow\n 3 beetle\n 5 hippo\n Name: animal, dtype: object\n \"\"\"\n inplace = validate_bool_kwarg(inplace, \"inplace\")\n result = super().drop_duplicates(keep=keep)\n if inplace:\n self._update_inplace(result)\n return None\n else:\n return result\n\n def duplicated(self, keep=\"first\") -> \"Series\":\n \"\"\"\n Indicate duplicate Series values.\n\n Duplicated values are indicated as ``True`` values in the resulting\n Series. Either all duplicates, all except the first or all except the\n last occurrence of duplicates can be indicated.\n\n Parameters\n ----------\n keep : {'first', 'last', False}, default 'first'\n Method to handle dropping duplicates:\n\n - 'first' : Mark duplicates as ``True`` except for the first\n occurrence.\n - 'last' : Mark duplicates as ``True`` except for the last\n occurrence.\n - ``False`` : Mark all duplicates as ``True``.\n\n Returns\n -------\n Series\n Series indicating whether each value has occurred in the\n preceding values.\n\n See Also\n --------\n Index.duplicated : Equivalent method on pandas.Index.\n DataFrame.duplicated : Equivalent method on pandas.DataFrame.\n Series.drop_duplicates : Remove duplicate values from Series.\n\n Examples\n --------\n By default, for each set of duplicated values, the first occurrence is\n set on False and all others on True:\n\n >>> animals = pd.Series(['lama', 'cow', 'lama', 'beetle', 'lama'])\n >>> animals.duplicated()\n 0 False\n 1 False\n 2 True\n 3 False\n 4 True\n dtype: bool\n\n which is equivalent to\n\n >>> animals.duplicated(keep='first')\n 0 False\n 1 False\n 2 True\n 3 False\n 4 True\n dtype: bool\n\n By using 'last', the last occurrence of each set of duplicated values\n is set on False and all others on True:\n\n >>> animals.duplicated(keep='last')\n 0 True\n 1 False\n 2 True\n 3 False\n 4 False\n dtype: bool\n\n By setting keep on ``False``, all duplicates are True:\n\n >>> animals.duplicated(keep=False)\n 0 True\n 1 False\n 2 True\n 3 False\n 4 True\n dtype: bool\n \"\"\"\n return super().duplicated(keep=keep)\n\n def idxmin(self, axis=0, skipna=True, *args, **kwargs):\n \"\"\"\n Return the row label of the minimum value.\n\n If multiple values equal the minimum, the first row label with that\n value is returned.\n\n Parameters\n ----------\n axis : int, default 0\n For compatibility with DataFrame.idxmin. Redundant for application\n on Series.\n skipna : bool, default True\n Exclude NA/null values. If the entire Series is NA, the result\n will be NA.\n *args, **kwargs\n Additional arguments and keywords have no effect but might be\n accepted for compatibility with NumPy.\n\n Returns\n -------\n Index\n Label of the minimum value.\n\n Raises\n ------\n ValueError\n If the Series is empty.\n\n See Also\n --------\n numpy.argmin : Return indices of the minimum values\n along the given axis.\n DataFrame.idxmin : Return index of first occurrence of minimum\n over requested axis.\n Series.idxmax : Return index *label* of the first occurrence\n of maximum of values.\n\n Notes\n -----\n This method is the Series version of ``ndarray.argmin``. This method\n returns the label of the minimum, while ``ndarray.argmin`` returns\n the position. To get the position, use ``series.values.argmin()``.\n\n Examples\n --------\n >>> s = pd.Series(data=[1, None, 4, 1],\n ... index=['A', 'B', 'C', 'D'])\n >>> s\n A 1.0\n B NaN\n C 4.0\n D 1.0\n dtype: float64\n\n >>> s.idxmin()\n 'A'\n\n If `skipna` is False and there is an NA value in the data,\n the function returns ``nan``.\n\n >>> s.idxmin(skipna=False)\n nan\n \"\"\"\n skipna = nv.validate_argmin_with_skipna(skipna, args, kwargs)\n i = nanops.nanargmin(self._values, skipna=skipna)\n if i == -1:\n return np.nan\n return self.index[i]\n\n def idxmax(self, axis=0, skipna=True, *args, **kwargs):\n \"\"\"\n Return the row label of the maximum value.\n\n If multiple values equal the maximum, the first row label with that\n value is returned.\n\n Parameters\n ----------\n axis : int, default 0\n For compatibility with DataFrame.idxmax. Redundant for application\n on Series.\n skipna : bool, default True\n Exclude NA/null values. If the entire Series is NA, the result\n will be NA.\n *args, **kwargs\n Additional arguments and keywords have no effect but might be\n accepted for compatibility with NumPy.\n\n Returns\n -------\n Index\n Label of the maximum value.\n\n Raises\n ------\n ValueError\n If the Series is empty.\n\n See Also\n --------\n numpy.argmax : Return indices of the maximum values\n along the given axis.\n DataFrame.idxmax : Return index of first occurrence of maximum\n over requested axis.\n Series.idxmin : Return index *label* of the first occurrence\n of minimum of values.\n\n Notes\n -----\n This method is the Series version of ``ndarray.argmax``. This method\n returns the label of the maximum, while ``ndarray.argmax`` returns\n the position. To get the position, use ``series.values.argmax()``.\n\n Examples\n --------\n >>> s = pd.Series(data=[1, None, 4, 3, 4],\n ... index=['A', 'B', 'C', 'D', 'E'])\n >>> s\n A 1.0\n B NaN\n C 4.0\n D 3.0\n E 4.0\n dtype: float64\n\n >>> s.idxmax()\n 'C'\n\n If `skipna` is False and there is an NA value in the data,\n the function returns ``nan``.\n\n >>> s.idxmax(skipna=False)\n nan\n \"\"\"\n skipna = nv.validate_argmax_with_skipna(skipna, args, kwargs)\n i = nanops.nanargmax(self._values, skipna=skipna)\n if i == -1:\n return np.nan\n return self.index[i]\n\n def round(self, decimals=0, *args, **kwargs) -> \"Series\":\n \"\"\"\n Round each value in a Series to the given number of decimals.\n\n Parameters\n ----------\n decimals : int, default 0\n Number of decimal places to round to. If decimals is negative,\n it specifies the number of positions to the left of the decimal point.\n *args, **kwargs\n Additional arguments and keywords have no effect but might be\n accepted for compatibility with NumPy.\n\n Returns\n -------\n Series\n Rounded values of the Series.\n\n See Also\n --------\n numpy.around : Round values of an np.array.\n DataFrame.round : Round values of a DataFrame.\n\n Examples\n --------\n >>> s = pd.Series([0.1, 1.3, 2.7])\n >>> s.round()\n 0 0.0\n 1 1.0\n 2 3.0\n dtype: float64\n \"\"\"\n nv.validate_round(args, kwargs)\n result = self._values.round(decimals)\n result = self._constructor(result, index=self.index).__finalize__(self)\n\n return result\n\n def quantile(self, q=0.5, interpolation=\"linear\"):\n \"\"\"\n Return value at the given quantile.\n\n Parameters\n ----------\n q : float or array-like, default 0.5 (50% quantile)\n The quantile(s) to compute, which can lie in range: 0 <= q <= 1.\n interpolation : {'linear', 'lower', 'higher', 'midpoint', 'nearest'}\n This optional parameter specifies the interpolation method to use,\n when the desired quantile lies between two data points `i` and `j`:\n\n * linear: `i + (j - i) * fraction`, where `fraction` is the\n fractional part of the index surrounded by `i` and `j`.\n * lower: `i`.\n * higher: `j`.\n * nearest: `i` or `j` whichever is nearest.\n * midpoint: (`i` + `j`) / 2.\n\n Returns\n -------\n float or Series\n If ``q`` is an array, a Series will be returned where the\n index is ``q`` and the values are the quantiles, otherwise\n a float will be returned.\n\n See Also\n --------\n core.window.Rolling.quantile : Calculate the rolling quantile.\n numpy.percentile : Returns the q-th percentile(s) of the array elements.\n\n Examples\n --------\n >>> s = pd.Series([1, 2, 3, 4])\n >>> s.quantile(.5)\n 2.5\n >>> s.quantile([.25, .5, .75])\n 0.25 1.75\n 0.50 2.50\n 0.75 3.25\n dtype: float64\n \"\"\"\n validate_percentile(q)\n\n # We dispatch to DataFrame so that core.internals only has to worry\n # about 2D cases.\n df = self.to_frame()\n\n result = df.quantile(q=q, interpolation=interpolation, numeric_only=False)\n if result.ndim == 2:\n result = result.iloc[:, 0]\n\n if is_list_like(q):\n result.name = self.name\n return self._constructor(result, index=Float64Index(q), name=self.name)\n else:\n # scalar\n return result.iloc[0]\n\n def corr(self, other, method=\"pearson\", min_periods=None) -> float:\n \"\"\"\n Compute correlation with `other` Series, excluding missing values.\n\n Parameters\n ----------\n other : Series\n Series with which to compute the correlation.\n method : {'pearson', 'kendall', 'spearman'} or callable\n Method used to compute correlation:\n\n - pearson : Standard correlation coefficient\n - kendall : Kendall Tau correlation coefficient\n - spearman : Spearman rank correlation\n - callable: Callable with input two 1d ndarrays and returning a float.\n\n .. versionadded:: 0.24.0\n Note that the returned matrix from corr will have 1 along the\n diagonals and will be symmetric regardless of the callable's\n behavior.\n min_periods : int, optional\n Minimum number of observations needed to have a valid result.\n\n Returns\n -------\n float\n Correlation with other.\n\n See Also\n --------\n DataFrame.corr : Compute pairwise correlation between columns.\n DataFrame.corrwith : Compute pairwise correlation with another\n DataFrame or Series.\n\n Examples\n --------\n >>> def histogram_intersection(a, b):\n ... v = np.minimum(a, b).sum().round(decimals=1)\n ... return v\n >>> s1 = pd.Series([.2, .0, .6, .2])\n >>> s2 = pd.Series([.3, .6, .0, .1])\n >>> s1.corr(s2, method=histogram_intersection)\n 0.3\n \"\"\"\n this, other = self.align(other, join=\"inner\", copy=False)\n if len(this) == 0:\n return np.nan\n\n if method in [\"pearson\", \"spearman\", \"kendall\"] or callable(method):\n return nanops.nancorr(\n this.values, other.values, method=method, min_periods=min_periods\n )\n\n raise ValueError(\n \"method must be either 'pearson', \"\n \"'spearman', 'kendall', or a callable, \"\n f\"'{method}' was supplied\"\n )\n\n def cov(self, other, min_periods=None) -> float:\n \"\"\"\n Compute covariance with Series, excluding missing values.\n\n Parameters\n ----------\n other : Series\n Series with which to compute the covariance.\n min_periods : int, optional\n Minimum number of observations needed to have a valid result.\n\n Returns\n -------\n float\n Covariance between Series and other normalized by N-1\n (unbiased estimator).\n\n See Also\n --------\n DataFrame.cov : Compute pairwise covariance of columns.\n\n Examples\n --------\n >>> s1 = pd.Series([0.90010907, 0.13484424, 0.62036035])\n >>> s2 = pd.Series([0.12528585, 0.26962463, 0.51111198])\n >>> s1.cov(s2)\n -0.01685762652715874\n \"\"\"\n this, other = self.align(other, join=\"inner\", copy=False)\n if len(this) == 0:\n return np.nan\n return nanops.nancov(this.values, other.values, min_periods=min_periods)\n\n def diff(self, periods: int = 1) -> \"Series\":\n \"\"\"\n First discrete difference of element.\n\n Calculates the difference of a Series element compared with another\n element in the Series (default is element in previous row).\n\n Parameters\n ----------\n periods : int, default 1\n Periods to shift for calculating difference, accepts negative\n values.\n\n Returns\n -------\n Series\n First differences of the Series.\n\n See Also\n --------\n Series.pct_change: Percent change over given number of periods.\n Series.shift: Shift index by desired number of periods with an\n optional time freq.\n DataFrame.diff: First discrete difference of object.\n\n Notes\n -----\n For boolean dtypes, this uses :meth:`operator.xor` rather than\n :meth:`operator.sub`.\n\n Examples\n --------\n Difference with previous row\n\n >>> s = pd.Series([1, 1, 2, 3, 5, 8])\n >>> s.diff()\n 0 NaN\n 1 0.0\n 2 1.0\n 3 1.0\n 4 2.0\n 5 3.0\n dtype: float64\n\n Difference with 3rd previous row\n\n >>> s.diff(periods=3)\n 0 NaN\n 1 NaN\n 2 NaN\n 3 2.0\n 4 4.0\n 5 6.0\n dtype: float64\n\n Difference with following row\n\n >>> s.diff(periods=-1)\n 0 0.0\n 1 -1.0\n 2 -1.0\n 3 -2.0\n 4 -3.0\n 5 NaN\n dtype: float64\n \"\"\"\n result = algorithms.diff(self.array, periods)\n return self._constructor(result, index=self.index).__finalize__(self)\n\n def autocorr(self, lag=1) -> float:\n \"\"\"\n Compute the lag-N autocorrelation.\n\n This method computes the Pearson correlation between\n the Series and its shifted self.\n\n Parameters\n ----------\n lag : int, default 1\n Number of lags to apply before performing autocorrelation.\n\n Returns\n -------\n float\n The Pearson correlation between self and self.shift(lag).\n\n See Also\n --------\n Series.corr : Compute the correlation between two Series.\n Series.shift : Shift index by desired number of periods.\n DataFrame.corr : Compute pairwise correlation of columns.\n DataFrame.corrwith : Compute pairwise correlation between rows or\n columns of two DataFrame objects.\n\n Notes\n -----\n If the Pearson correlation is not well defined return 'NaN'.\n\n Examples\n --------\n >>> s = pd.Series([0.25, 0.5, 0.2, -0.05])\n >>> s.autocorr() # doctest: +ELLIPSIS\n 0.10355...\n >>> s.autocorr(lag=2) # doctest: +ELLIPSIS\n -0.99999...\n\n If the Pearson correlation is not well defined, then 'NaN' is returned.\n\n >>> s = pd.Series([1, 0, 0, 0])\n >>> s.autocorr()\n nan\n \"\"\"\n return self.corr(self.shift(lag))\n\n def dot(self, other):\n \"\"\"\n Compute the dot product between the Series and the columns of other.\n\n This method computes the dot product between the Series and another\n one, or the Series and each columns of a DataFrame, or the Series and\n each columns of an array.\n\n It can also be called using `self @ other` in Python >= 3.5.\n\n Parameters\n ----------\n other : Series, DataFrame or array-like\n The other object to compute the dot product with its columns.\n\n Returns\n -------\n scalar, Series or numpy.ndarray\n Return the dot product of the Series and other if other is a\n Series, the Series of the dot product of Series and each rows of\n other if other is a DataFrame or a numpy.ndarray between the Series\n and each columns of the numpy array.\n\n See Also\n --------\n DataFrame.dot: Compute the matrix product with the DataFrame.\n Series.mul: Multiplication of series and other, element-wise.\n\n Notes\n -----\n The Series and other has to share the same index if other is a Series\n or a DataFrame.\n\n Examples\n --------\n >>> s = pd.Series([0, 1, 2, 3])\n >>> other = pd.Series([-1, 2, -3, 4])\n >>> s.dot(other)\n 8\n >>> s @ other\n 8\n >>> df = pd.DataFrame([[0, 1], [-2, 3], [4, -5], [6, 7]])\n >>> s.dot(df)\n 0 24\n 1 14\n dtype: int64\n >>> arr = np.array([[0, 1], [-2, 3], [4, -5], [6, 7]])\n >>> s.dot(arr)\n array([24, 14])\n \"\"\"\n if isinstance(other, (Series, ABCDataFrame)):\n common = self.index.union(other.index)\n if len(common) > len(self.index) or len(common) > len(other.index):\n raise ValueError(\"matrices are not aligned\")\n\n left = self.reindex(index=common, copy=False)\n right = other.reindex(index=common, copy=False)\n lvals = left.values\n rvals = right.values\n else:\n lvals = self.values\n rvals = np.asarray(other)\n if lvals.shape[0] != rvals.shape[0]:\n raise Exception(\n f\"Dot product shape mismatch, {lvals.shape} vs {rvals.shape}\"\n )\n\n if isinstance(other, ABCDataFrame):\n return self._constructor(\n np.dot(lvals, rvals), index=other.columns\n ).__finalize__(self)\n elif isinstance(other, Series):\n return np.dot(lvals, rvals)\n elif isinstance(rvals, np.ndarray):\n return np.dot(lvals, rvals)\n else: # pragma: no cover\n raise TypeError(f\"unsupported type: {type(other)}\")\n\n def __matmul__(self, other):\n \"\"\"\n Matrix multiplication using binary `@` operator in Python>=3.5.\n \"\"\"\n return self.dot(other)\n\n def __rmatmul__(self, other):\n \"\"\"\n Matrix multiplication using binary `@` operator in Python>=3.5.\n \"\"\"\n return self.dot(np.transpose(other))\n\n @doc(base.IndexOpsMixin.searchsorted, klass=\"Series\")\n def searchsorted(self, value, side=\"left\", sorter=None):\n return algorithms.searchsorted(self._values, value, side=side, sorter=sorter)\n\n # -------------------------------------------------------------------\n # Combination\n\n def append(self, to_append, ignore_index=False, verify_integrity=False):\n \"\"\"\n Concatenate two or more Series.\n\n Parameters\n ----------\n to_append : Series or list/tuple of Series\n Series to append with self.\n ignore_index : bool, default False\n If True, do not use the index labels.\n verify_integrity : bool, default False\n If True, raise Exception on creating index with duplicates.\n\n Returns\n -------\n Series\n Concatenated Series.\n\n See Also\n --------\n concat : General function to concatenate DataFrame or Series objects.\n\n Notes\n -----\n Iteratively appending to a Series can be more computationally intensive\n than a single concatenate. A better solution is to append values to a\n list and then concatenate the list with the original Series all at\n once.\n\n Examples\n --------\n >>> s1 = pd.Series([1, 2, 3])\n >>> s2 = pd.Series([4, 5, 6])\n >>> s3 = pd.Series([4, 5, 6], index=[3, 4, 5])\n >>> s1.append(s2)\n 0 1\n 1 2\n 2 3\n 0 4\n 1 5\n 2 6\n dtype: int64\n\n >>> s1.append(s3)\n 0 1\n 1 2\n 2 3\n 3 4\n 4 5\n 5 6\n dtype: int64\n\n With `ignore_index` set to True:\n\n >>> s1.append(s2, ignore_index=True)\n 0 1\n 1 2\n 2 3\n 3 4\n 4 5\n 5 6\n dtype: int64\n\n With `verify_integrity` set to True:\n\n >>> s1.append(s2, verify_integrity=True)\n Traceback (most recent call last):\n ...\n ValueError: Indexes have overlapping values: [0, 1, 2]\n \"\"\"\n from pandas.core.reshape.concat import concat\n\n if isinstance(to_append, (list, tuple)):\n to_concat = [self]\n to_concat.extend(to_append)\n else:\n to_concat = [self, to_append]\n if any(isinstance(x, (ABCDataFrame,)) for x in to_concat[1:]):\n msg = (\n f\"to_append should be a Series or list/tuple of Series, \"\n f\"got DataFrame\"\n )\n raise TypeError(msg)\n return concat(\n to_concat, ignore_index=ignore_index, verify_integrity=verify_integrity\n )\n\n def _binop(self, other, func, level=None, fill_value=None):\n \"\"\"\n Perform generic binary operation with optional fill value.\n\n Parameters\n ----------\n other : Series\n func : binary operator\n fill_value : float or object\n Value to substitute for NA/null values. If both Series are NA in a\n location, the result will be NA regardless of the passed fill value.\n level : int or level name, default None\n Broadcast across a level, matching Index values on the\n passed MultiIndex level.\n\n Returns\n -------\n Series\n \"\"\"\n if not isinstance(other, Series):\n raise AssertionError(\"Other operand must be Series\")\n\n new_index = self.index\n this = self\n\n if not self.index.equals(other.index):\n this, other = self.align(other, level=level, join=\"outer\", copy=False)\n new_index = this.index\n\n this_vals, other_vals = ops.fill_binop(this.values, other.values, fill_value)\n\n with np.errstate(all=\"ignore\"):\n result = func(this_vals, other_vals)\n\n name = ops.get_op_result_name(self, other)\n ret = ops._construct_result(self, result, new_index, name)\n return ret\n\n def combine(self, other, func, fill_value=None) -> \"Series\":\n \"\"\"\n Combine the Series with a Series or scalar according to `func`.\n\n Combine the Series and `other` using `func` to perform elementwise\n selection for combined Series.\n `fill_value` is assumed when value is missing at some index\n from one of the two objects being combined.\n\n Parameters\n ----------\n other : Series or scalar\n The value(s) to be combined with the `Series`.\n func : function\n Function that takes two scalars as inputs and returns an element.\n fill_value : scalar, optional\n The value to assume when an index is missing from\n one Series or the other. The default specifies to use the\n appropriate NaN value for the underlying dtype of the Series.\n\n Returns\n -------\n Series\n The result of combining the Series with the other object.\n\n See Also\n --------\n Series.combine_first : Combine Series values, choosing the calling\n Series' values first.\n\n Examples\n --------\n Consider 2 Datasets ``s1`` and ``s2`` containing\n highest clocked speeds of different birds.\n\n >>> s1 = pd.Series({'falcon': 330.0, 'eagle': 160.0})\n >>> s1\n falcon 330.0\n eagle 160.0\n dtype: float64\n >>> s2 = pd.Series({'falcon': 345.0, 'eagle': 200.0, 'duck': 30.0})\n >>> s2\n falcon 345.0\n eagle 200.0\n duck 30.0\n dtype: float64\n\n Now, to combine the two datasets and view the highest speeds\n of the birds across the two datasets\n\n >>> s1.combine(s2, max)\n duck NaN\n eagle 200.0\n falcon 345.0\n dtype: float64\n\n In the previous example, the resulting value for duck is missing,\n because the maximum of a NaN and a float is a NaN.\n So, in the example, we set ``fill_value=0``,\n so the maximum value returned will be the value from some dataset.\n\n >>> s1.combine(s2, max, fill_value=0)\n duck 30.0\n eagle 200.0\n falcon 345.0\n dtype: float64\n \"\"\"\n if fill_value is None:\n fill_value = na_value_for_dtype(self.dtype, compat=False)\n\n if isinstance(other, Series):\n # If other is a Series, result is based on union of Series,\n # so do this element by element\n new_index = self.index.union(other.index)\n new_name = ops.get_op_result_name(self, other)\n new_values = []\n for idx in new_index:\n lv = self.get(idx, fill_value)\n rv = other.get(idx, fill_value)\n with np.errstate(all=\"ignore\"):\n new_values.append(func(lv, rv))\n else:\n # Assume that other is a scalar, so apply the function for\n # each element in the Series\n new_index = self.index\n with np.errstate(all=\"ignore\"):\n new_values = [func(lv, other) for lv in self._values]\n new_name = self.name\n\n if is_categorical_dtype(self.dtype):\n pass\n elif is_extension_array_dtype(self.dtype):\n # TODO: can we do this for only SparseDtype?\n # The function can return something of any type, so check\n # if the type is compatible with the calling EA.\n new_values = maybe_cast_to_extension_array(type(self._values), new_values)\n return self._constructor(new_values, index=new_index, name=new_name)\n\n def combine_first(self, other) -> \"Series\":\n \"\"\"\n Combine Series values, choosing the calling Series's values first.\n\n Parameters\n ----------\n other : Series\n The value(s) to be combined with the `Series`.\n\n Returns\n -------\n Series\n The result of combining the Series with the other object.\n\n See Also\n --------\n Series.combine : Perform elementwise operation on two Series\n using a given function.\n\n Notes\n -----\n Result index will be the union of the two indexes.\n\n Examples\n --------\n >>> s1 = pd.Series([1, np.nan])\n >>> s2 = pd.Series([3, 4])\n >>> s1.combine_first(s2)\n 0 1.0\n 1 4.0\n dtype: float64\n \"\"\"\n new_index = self.index.union(other.index)\n this = self.reindex(new_index, copy=False)\n other = other.reindex(new_index, copy=False)\n if this.dtype.kind == \"M\" and other.dtype.kind != \"M\":\n other = to_datetime(other)\n\n return this.where(notna(this), other)\n\n def update(self, other) -> None:\n \"\"\"\n Modify Series in place using non-NA values from passed\n Series. Aligns on index.\n\n Parameters\n ----------\n other : Series\n\n Examples\n --------\n >>> s = pd.Series([1, 2, 3])\n >>> s.update(pd.Series([4, 5, 6]))\n >>> s\n 0 4\n 1 5\n 2 6\n dtype: int64\n\n >>> s = pd.Series(['a', 'b', 'c'])\n >>> s.update(pd.Series(['d', 'e'], index=[0, 2]))\n >>> s\n 0 d\n 1 b\n 2 e\n dtype: object\n\n >>> s = pd.Series([1, 2, 3])\n >>> s.update(pd.Series([4, 5, 6, 7, 8]))\n >>> s\n 0 4\n 1 5\n 2 6\n dtype: int64\n\n If ``other`` contains NaNs the corresponding values are not updated\n in the original Series.\n\n >>> s = pd.Series([1, 2, 3])\n >>> s.update(pd.Series([4, np.nan, 6]))\n >>> s\n 0 4\n 1 2\n 2 6\n dtype: int64\n \"\"\"\n other = other.reindex_like(self)\n mask = notna(other)\n\n self._data = self._data.putmask(mask=mask, new=other)\n self._maybe_update_cacher()\n\n # ----------------------------------------------------------------------\n # Reindexing, sorting\n\n def sort_values(\n self,\n axis=0,\n ascending=True,\n inplace: bool = False,\n kind: str = \"quicksort\",\n na_position: str = \"last\",\n ignore_index: bool = False,\n ):\n \"\"\"\n Sort by the values.\n\n Sort a Series in ascending or descending order by some\n criterion.\n\n Parameters\n ----------\n axis : {0 or 'index'}, default 0\n Axis to direct sorting. The value 'index' is accepted for\n compatibility with DataFrame.sort_values.\n ascending : bool, default True\n If True, sort values in ascending order, otherwise descending.\n inplace : bool, default False\n If True, perform operation in-place.\n kind : {'quicksort', 'mergesort' or 'heapsort'}, default 'quicksort'\n Choice of sorting algorithm. See also :func:`numpy.sort` for more\n information. 'mergesort' is the only stable algorithm.\n na_position : {'first' or 'last'}, default 'last'\n Argument 'first' puts NaNs at the beginning, 'last' puts NaNs at\n the end.\n ignore_index : bool, default False\n If True, the resulting axis will be labeled 0, 1, …, n - 1.\n\n .. versionadded:: 1.0.0\n\n Returns\n -------\n Series\n Series ordered by values.\n\n See Also\n --------\n Series.sort_index : Sort by the Series indices.\n DataFrame.sort_values : Sort DataFrame by the values along either axis.\n DataFrame.sort_index : Sort DataFrame by indices.\n\n Examples\n --------\n >>> s = pd.Series([np.nan, 1, 3, 10, 5])\n >>> s\n 0 NaN\n 1 1.0\n 2 3.0\n 3 10.0\n 4 5.0\n dtype: float64\n\n Sort values ascending order (default behaviour)\n\n >>> s.sort_values(ascending=True)\n 1 1.0\n 2 3.0\n 4 5.0\n 3 10.0\n 0 NaN\n dtype: float64\n\n Sort values descending order\n\n >>> s.sort_values(ascending=False)\n 3 10.0\n 4 5.0\n 2 3.0\n 1 1.0\n 0 NaN\n dtype: float64\n\n Sort values inplace\n\n >>> s.sort_values(ascending=False, inplace=True)\n >>> s\n 3 10.0\n 4 5.0\n 2 3.0\n 1 1.0\n 0 NaN\n dtype: float64\n\n Sort values putting NAs first\n\n >>> s.sort_values(na_position='first')\n 0 NaN\n 1 1.0\n 2 3.0\n 4 5.0\n 3 10.0\n dtype: float64\n\n Sort a series of strings\n\n >>> s = pd.Series(['z', 'b', 'd', 'a', 'c'])\n >>> s\n 0 z\n 1 b\n 2 d\n 3 a\n 4 c\n dtype: object\n\n >>> s.sort_values()\n 3 a\n 1 b\n 4 c\n 2 d\n 0 z\n dtype: object\n \"\"\"\n inplace = validate_bool_kwarg(inplace, \"inplace\")\n # Validate the axis parameter\n self._get_axis_number(axis)\n\n # GH 5856/5853\n if inplace and self._is_cached:\n raise ValueError(\n \"This Series is a view of some other array, to \"\n \"sort in-place you must create a copy\"\n )\n\n def _try_kind_sort(arr):\n # easier to ask forgiveness than permission\n try:\n # if kind==mergesort, it can fail for object dtype\n return arr.argsort(kind=kind)\n except TypeError:\n # stable sort not available for object dtype\n # uses the argsort default quicksort\n return arr.argsort(kind=\"quicksort\")\n\n arr = self._values\n sorted_index = np.empty(len(self), dtype=np.int32)\n\n bad = isna(arr)\n\n good = ~bad\n idx = ibase.default_index(len(self))\n\n argsorted = _try_kind_sort(arr[good])\n\n if is_list_like(ascending):\n if len(ascending) != 1:\n raise ValueError(\n f\"Length of ascending ({len(ascending)}) must be 1 for Series\"\n )\n ascending = ascending[0]\n\n if not is_bool(ascending):\n raise ValueError(\"ascending must be boolean\")\n\n if not ascending:\n argsorted = argsorted[::-1]\n\n if na_position == \"last\":\n n = good.sum()\n sorted_index[:n] = idx[good][argsorted]\n sorted_index[n:] = idx[bad]\n elif na_position == \"first\":\n n = bad.sum()\n sorted_index[n:] = idx[good][argsorted]\n sorted_index[:n] = idx[bad]\n else:\n raise ValueError(f\"invalid na_position: {na_position}\")\n\n result = self._constructor(arr[sorted_index], index=self.index[sorted_index])\n\n if ignore_index:\n result.index = ibase.default_index(len(sorted_index))\n\n if inplace:\n self._update_inplace(result)\n else:\n return result.__finalize__(self)\n\n def sort_index(\n self,\n axis=0,\n level=None,\n ascending: bool = True,\n inplace: bool = False,\n kind: str = \"quicksort\",\n na_position: str = \"last\",\n sort_remaining: bool = True,\n ignore_index: bool = False,\n ):\n \"\"\"\n Sort Series by index labels.\n\n Returns a new Series sorted by label if `inplace` argument is\n ``False``, otherwise updates the original series and returns None.\n\n Parameters\n ----------\n axis : int, default 0\n Axis to direct sorting. This can only be 0 for Series.\n level : int, optional\n If not None, sort on values in specified index level(s).\n ascending : bool or list of bools, default True\n Sort ascending vs. descending. When the index is a MultiIndex the\n sort direction can be controlled for each level individually.\n inplace : bool, default False\n If True, perform operation in-place.\n kind : {'quicksort', 'mergesort', 'heapsort'}, default 'quicksort'\n Choice of sorting algorithm. See also :func:`numpy.sort` for more\n information. 'mergesort' is the only stable algorithm. For\n DataFrames, this option is only applied when sorting on a single\n column or label.\n na_position : {'first', 'last'}, default 'last'\n If 'first' puts NaNs at the beginning, 'last' puts NaNs at the end.\n Not implemented for MultiIndex.\n sort_remaining : bool, default True\n If True and sorting by level and index is multilevel, sort by other\n levels too (in order) after sorting by specified level.\n ignore_index : bool, default False\n If True, the resulting axis will be labeled 0, 1, …, n - 1.\n\n .. versionadded:: 1.0.0\n\n Returns\n -------\n Series\n The original Series sorted by the labels.\n\n See Also\n --------\n DataFrame.sort_index: Sort DataFrame by the index.\n DataFrame.sort_values: Sort DataFrame by the value.\n Series.sort_values : Sort Series by the value.\n\n Examples\n --------\n >>> s = pd.Series(['a', 'b', 'c', 'd'], index=[3, 2, 1, 4])\n >>> s.sort_index()\n 1 c\n 2 b\n 3 a\n 4 d\n dtype: object\n\n Sort Descending\n\n >>> s.sort_index(ascending=False)\n 4 d\n 3 a\n 2 b\n 1 c\n dtype: object\n\n Sort Inplace\n\n >>> s.sort_index(inplace=True)\n >>> s\n 1 c\n 2 b\n 3 a\n 4 d\n dtype: object\n\n By default NaNs are put at the end, but use `na_position` to place\n them at the beginning\n\n >>> s = pd.Series(['a', 'b', 'c', 'd'], index=[3, 2, 1, np.nan])\n >>> s.sort_index(na_position='first')\n NaN d\n 1.0 c\n 2.0 b\n 3.0 a\n dtype: object\n\n Specify index level to sort\n\n >>> arrays = [np.array(['qux', 'qux', 'foo', 'foo',\n ... 'baz', 'baz', 'bar', 'bar']),\n ... np.array(['two', 'one', 'two', 'one',\n ... 'two', 'one', 'two', 'one'])]\n >>> s = pd.Series([1, 2, 3, 4, 5, 6, 7, 8], index=arrays)\n >>> s.sort_index(level=1)\n bar one 8\n baz one 6\n foo one 4\n qux one 2\n bar two 7\n baz two 5\n foo two 3\n qux two 1\n dtype: int64\n\n Does not sort by remaining levels when sorting by levels\n\n >>> s.sort_index(level=1, sort_remaining=False)\n qux one 2\n foo one 4\n baz one 6\n bar one 8\n qux two 1\n foo two 3\n baz two 5\n bar two 7\n dtype: int64\n \"\"\"\n # TODO: this can be combined with DataFrame.sort_index impl as\n # almost identical\n inplace = validate_bool_kwarg(inplace, \"inplace\")\n # Validate the axis parameter\n self._get_axis_number(axis)\n index = self.index\n\n if level is not None:\n new_index, indexer = index.sortlevel(\n level, ascending=ascending, sort_remaining=sort_remaining\n )\n elif isinstance(index, MultiIndex):\n from pandas.core.sorting import lexsort_indexer\n\n labels = index._sort_levels_monotonic()\n indexer = lexsort_indexer(\n labels._get_codes_for_sorting(),\n orders=ascending,\n na_position=na_position,\n )\n else:\n from pandas.core.sorting import nargsort\n\n # Check monotonic-ness before sort an index\n # GH11080\n if (ascending and index.is_monotonic_increasing) or (\n not ascending and index.is_monotonic_decreasing\n ):\n if inplace:\n return\n else:\n return self.copy()\n\n indexer = nargsort(\n index, kind=kind, ascending=ascending, na_position=na_position\n )\n\n indexer = ensure_platform_int(indexer)\n new_index = index.take(indexer)\n new_index = new_index._sort_levels_monotonic()\n\n new_values = self._values.take(indexer)\n result = self._constructor(new_values, index=new_index)\n\n if ignore_index:\n result.index = ibase.default_index(len(result))\n\n if inplace:\n self._update_inplace(result)\n else:\n return result.__finalize__(self)\n\n def argsort(self, axis=0, kind=\"quicksort\", order=None) -> \"Series\":\n \"\"\"\n Override ndarray.argsort. Argsorts the value, omitting NA/null values,\n and places the result in the same locations as the non-NA values.\n\n Parameters\n ----------\n axis : {0 or \"index\"}\n Has no effect but is accepted for compatibility with numpy.\n kind : {'mergesort', 'quicksort', 'heapsort'}, default 'quicksort'\n Choice of sorting algorithm. See np.sort for more\n information. 'mergesort' is the only stable algorithm.\n order : None\n Has no effect but is accepted for compatibility with numpy.\n\n Returns\n -------\n Series\n Positions of values within the sort order with -1 indicating\n nan values.\n\n See Also\n --------\n numpy.ndarray.argsort : Returns the indices that would sort this array.\n \"\"\"\n values = self._values\n mask = isna(values)\n\n if mask.any():\n result = Series(-1, index=self.index, name=self.name, dtype=\"int64\")\n notmask = ~mask\n result[notmask] = np.argsort(values[notmask], kind=kind)\n return self._constructor(result, index=self.index).__finalize__(self)\n else:\n return self._constructor(\n np.argsort(values, kind=kind), index=self.index, dtype=\"int64\"\n ).__finalize__(self)\n\n def nlargest(self, n=5, keep=\"first\") -> \"Series\":\n \"\"\"\n Return the largest `n` elements.\n\n Parameters\n ----------\n n : int, default 5\n Return this many descending sorted values.\n keep : {'first', 'last', 'all'}, default 'first'\n When there are duplicate values that cannot all fit in a\n Series of `n` elements:\n\n - ``first`` : return the first `n` occurrences in order\n of appearance.\n - ``last`` : return the last `n` occurrences in reverse\n order of appearance.\n - ``all`` : keep all occurrences. This can result in a Series of\n size larger than `n`.\n\n Returns\n -------\n Series\n The `n` largest values in the Series, sorted in decreasing order.\n\n See Also\n --------\n Series.nsmallest: Get the `n` smallest elements.\n Series.sort_values: Sort Series by values.\n Series.head: Return the first `n` rows.\n\n Notes\n -----\n Faster than ``.sort_values(ascending=False).head(n)`` for small `n`\n relative to the size of the ``Series`` object.\n\n Examples\n --------\n >>> countries_population = {\"Italy\": 59000000, \"France\": 65000000,\n ... \"Malta\": 434000, \"Maldives\": 434000,\n ... \"Brunei\": 434000, \"Iceland\": 337000,\n ... \"Nauru\": 11300, \"Tuvalu\": 11300,\n ... \"Anguilla\": 11300, \"Monserat\": 5200}\n >>> s = pd.Series(countries_population)\n >>> s\n Italy 59000000\n France 65000000\n Malta 434000\n Maldives 434000\n Brunei 434000\n Iceland 337000\n Nauru 11300\n Tuvalu 11300\n Anguilla 11300\n Monserat 5200\n dtype: int64\n\n The `n` largest elements where ``n=5`` by default.\n\n >>> s.nlargest()\n France 65000000\n Italy 59000000\n Malta 434000\n Maldives 434000\n Brunei 434000\n dtype: int64\n\n The `n` largest elements where ``n=3``. Default `keep` value is 'first'\n so Malta will be kept.\n\n >>> s.nlargest(3)\n France 65000000\n Italy 59000000\n Malta 434000\n dtype: int64\n\n The `n` largest elements where ``n=3`` and keeping the last duplicates.\n Brunei will be kept since it is the last with value 434000 based on\n the index order.\n\n >>> s.nlargest(3, keep='last')\n France 65000000\n Italy 59000000\n Brunei 434000\n dtype: int64\n\n The `n` largest elements where ``n=3`` with all duplicates kept. Note\n that the returned Series has five elements due to the three duplicates.\n\n >>> s.nlargest(3, keep='all')\n France 65000000\n Italy 59000000\n Malta 434000\n Maldives 434000\n Brunei 434000\n dtype: int64\n \"\"\"\n return algorithms.SelectNSeries(self, n=n, keep=keep).nlargest()\n\n def nsmallest(self, n=5, keep=\"first\") -> \"Series\":\n \"\"\"\n Return the smallest `n` elements.\n\n Parameters\n ----------\n n : int, default 5\n Return this many ascending sorted values.\n keep : {'first', 'last', 'all'}, default 'first'\n When there are duplicate values that cannot all fit in a\n Series of `n` elements:\n\n - ``first`` : return the first `n` occurrences in order\n of appearance.\n - ``last`` : return the last `n` occurrences in reverse\n order of appearance.\n - ``all`` : keep all occurrences. This can result in a Series of\n size larger than `n`.\n\n Returns\n -------\n Series\n The `n` smallest values in the Series, sorted in increasing order.\n\n See Also\n --------\n Series.nlargest: Get the `n` largest elements.\n Series.sort_values: Sort Series by values.\n Series.head: Return the first `n` rows.\n\n Notes\n -----\n Faster than ``.sort_values().head(n)`` for small `n` relative to\n the size of the ``Series`` object.\n\n Examples\n --------\n >>> countries_population = {\"Italy\": 59000000, \"France\": 65000000,\n ... \"Brunei\": 434000, \"Malta\": 434000,\n ... \"Maldives\": 434000, \"Iceland\": 337000,\n ... \"Nauru\": 11300, \"Tuvalu\": 11300,\n ... \"Anguilla\": 11300, \"Monserat\": 5200}\n >>> s = pd.Series(countries_population)\n >>> s\n Italy 59000000\n France 65000000\n Brunei 434000\n Malta 434000\n Maldives 434000\n Iceland 337000\n Nauru 11300\n Tuvalu 11300\n Anguilla 11300\n Monserat 5200\n dtype: int64\n\n The `n` smallest elements where ``n=5`` by default.\n\n >>> s.nsmallest()\n Monserat 5200\n Nauru 11300\n Tuvalu 11300\n Anguilla 11300\n Iceland 337000\n dtype: int64\n\n The `n` smallest elements where ``n=3``. Default `keep` value is\n 'first' so Nauru and Tuvalu will be kept.\n\n >>> s.nsmallest(3)\n Monserat 5200\n Nauru 11300\n Tuvalu 11300\n dtype: int64\n\n The `n` smallest elements where ``n=3`` and keeping the last\n duplicates. Anguilla and Tuvalu will be kept since they are the last\n with value 11300 based on the index order.\n\n >>> s.nsmallest(3, keep='last')\n Monserat 5200\n Anguilla 11300\n Tuvalu 11300\n dtype: int64\n\n The `n` smallest elements where ``n=3`` with all duplicates kept. Note\n that the returned Series has four elements due to the three duplicates.\n\n >>> s.nsmallest(3, keep='all')\n Monserat 5200\n Nauru 11300\n Tuvalu 11300\n Anguilla 11300\n dtype: int64\n \"\"\"\n return algorithms.SelectNSeries(self, n=n, keep=keep).nsmallest()\n\n def swaplevel(self, i=-2, j=-1, copy=True) -> \"Series\":\n \"\"\"\n Swap levels i and j in a :class:`MultiIndex`.\n\n Default is to swap the two innermost levels of the index.\n\n Parameters\n ----------\n i, j : int, str\n Level of the indices to be swapped. Can pass level name as string.\n copy : bool, default True\n Whether to copy underlying data.\n\n Returns\n -------\n Series\n Series with levels swapped in MultiIndex.\n \"\"\"\n assert isinstance(self.index, ABCMultiIndex)\n new_index = self.index.swaplevel(i, j)\n return self._constructor(self._values, index=new_index, copy=copy).__finalize__(\n self\n )\n\n def reorder_levels(self, order) -> \"Series\":\n \"\"\"\n Rearrange index levels using input order.\n\n May not drop or duplicate levels.\n\n Parameters\n ----------\n order : list of int representing new level order\n Reference level by number or key.\n\n Returns\n -------\n type of caller (new object)\n \"\"\"\n if not isinstance(self.index, MultiIndex): # pragma: no cover\n raise Exception(\"Can only reorder levels on a hierarchical axis.\")\n\n result = self.copy()\n assert isinstance(result.index, ABCMultiIndex)\n result.index = result.index.reorder_levels(order)\n return result\n\n def explode(self) -> \"Series\":\n \"\"\"\n Transform each element of a list-like to a row, replicating the\n index values.\n\n .. versionadded:: 0.25.0\n\n Returns\n -------\n Series\n Exploded lists to rows; index will be duplicated for these rows.\n\n See Also\n --------\n Series.str.split : Split string values on specified separator.\n Series.unstack : Unstack, a.k.a. pivot, Series with MultiIndex\n to produce DataFrame.\n DataFrame.melt : Unpivot a DataFrame from wide format to long format.\n DataFrame.explode : Explode a DataFrame from list-like\n columns to long format.\n\n Notes\n -----\n This routine will explode list-likes including lists, tuples,\n Series, and np.ndarray. The result dtype of the subset rows will\n be object. Scalars will be returned unchanged. Empty list-likes will\n result in a np.nan for that row.\n\n Examples\n --------\n >>> s = pd.Series([[1, 2, 3], 'foo', [], [3, 4]])\n >>> s\n 0 [1, 2, 3]\n 1 foo\n 2 []\n 3 [3, 4]\n dtype: object\n\n >>> s.explode()\n 0 1\n 0 2\n 0 3\n 1 foo\n 2 NaN\n 3 3\n 3 4\n dtype: object\n \"\"\"\n if not len(self) or not is_object_dtype(self):\n return self.copy()\n\n values, counts = reshape.explode(np.asarray(self.array))\n\n result = Series(values, index=self.index.repeat(counts), name=self.name)\n return result\n\n def unstack(self, level=-1, fill_value=None):\n \"\"\"\n Unstack, also known as pivot, Series with MultiIndex to produce DataFrame.\n The level involved will automatically get sorted.\n\n Parameters\n ----------\n level : int, str, or list of these, default last level\n Level(s) to unstack, can pass level name.\n fill_value : scalar value, default None\n Value to use when replacing NaN values.\n\n Returns\n -------\n DataFrame\n Unstacked Series.\n\n Examples\n --------\n >>> s = pd.Series([1, 2, 3, 4],\n ... index=pd.MultiIndex.from_product([['one', 'two'],\n ... ['a', 'b']]))\n >>> s\n one a 1\n b 2\n two a 3\n b 4\n dtype: int64\n\n >>> s.unstack(level=-1)\n a b\n one 1 2\n two 3 4\n\n >>> s.unstack(level=0)\n one two\n a 1 3\n b 2 4\n \"\"\"\n from pandas.core.reshape.reshape import unstack\n\n return unstack(self, level, fill_value)\n\n # ----------------------------------------------------------------------\n # function application\n\n def map(self, arg, na_action=None) -> \"Series\":\n \"\"\"\n Map values of Series according to input correspondence.\n\n Used for substituting each value in a Series with another value,\n that may be derived from a function, a ``dict`` or\n a :class:`Series`.\n\n Parameters\n ----------\n arg : function, collections.abc.Mapping subclass or Series\n Mapping correspondence.\n na_action : {None, 'ignore'}, default None\n If 'ignore', propagate NaN values, without passing them to the\n mapping correspondence.\n\n Returns\n -------\n Series\n Same index as caller.\n\n See Also\n --------\n Series.apply : For applying more complex functions on a Series.\n DataFrame.apply : Apply a function row-/column-wise.\n DataFrame.applymap : Apply a function elementwise on a whole DataFrame.\n\n Notes\n -----\n When ``arg`` is a dictionary, values in Series that are not in the\n dictionary (as keys) are converted to ``NaN``. However, if the\n dictionary is a ``dict`` subclass that defines ``__missing__`` (i.e.\n provides a method for default values), then this default is used\n rather than ``NaN``.\n\n Examples\n --------\n >>> s = pd.Series(['cat', 'dog', np.nan, 'rabbit'])\n >>> s\n 0 cat\n 1 dog\n 2 NaN\n 3 rabbit\n dtype: object\n\n ``map`` accepts a ``dict`` or a ``Series``. Values that are not found\n in the ``dict`` are converted to ``NaN``, unless the dict has a default\n value (e.g. ``defaultdict``):\n\n >>> s.map({'cat': 'kitten', 'dog': 'puppy'})\n 0 kitten\n 1 puppy\n 2 NaN\n 3 NaN\n dtype: object\n\n It also accepts a function:\n\n >>> s.map('I am a {}'.format)\n 0 I am a cat\n 1 I am a dog\n 2 I am a nan\n 3 I am a rabbit\n dtype: object\n\n To avoid applying the function to missing values (and keep them as\n ``NaN``) ``na_action='ignore'`` can be used:\n\n >>> s.map('I am a {}'.format, na_action='ignore')\n 0 I am a cat\n 1 I am a dog\n 2 NaN\n 3 I am a rabbit\n dtype: object\n \"\"\"\n new_values = super()._map_values(arg, na_action=na_action)\n return self._constructor(new_values, index=self.index).__finalize__(self)\n\n def _gotitem(self, key, ndim, subset=None) -> \"Series\":\n \"\"\"\n Sub-classes to define. Return a sliced object.\n\n Parameters\n ----------\n key : string / list of selections\n ndim : 1,2\n Requested ndim of result.\n subset : object, default None\n Subset to act on.\n \"\"\"\n return self\n\n _agg_see_also_doc = dedent(\n \"\"\"\n See Also\n --------\n Series.apply : Invoke function on a Series.\n Series.transform : Transform function producing a Series with like indexes.\n \"\"\"\n )\n\n _agg_examples_doc = dedent(\n \"\"\"\n Examples\n --------\n >>> s = pd.Series([1, 2, 3, 4])\n >>> s\n 0 1\n 1 2\n 2 3\n 3 4\n dtype: int64\n\n >>> s.agg('min')\n 1\n\n >>> s.agg(['min', 'max'])\n min 1\n max 4\n dtype: int64\n \"\"\"\n )\n\n @Substitution(\n see_also=_agg_see_also_doc,\n examples=_agg_examples_doc,\n versionadded=\"\\n.. versionadded:: 0.20.0\\n\",\n **_shared_doc_kwargs,\n )\n @Appender(generic._shared_docs[\"aggregate\"])\n def aggregate(self, func, axis=0, *args, **kwargs):\n # Validate the axis parameter\n self._get_axis_number(axis)\n result, how = self._aggregate(func, *args, **kwargs)\n if result is None:\n\n # we can be called from an inner function which\n # passes this meta-data\n kwargs.pop(\"_axis\", None)\n kwargs.pop(\"_level\", None)\n\n # try a regular apply, this evaluates lambdas\n # row-by-row; however if the lambda is expected a Series\n # expression, e.g.: lambda x: x-x.quantile(0.25)\n # this will fail, so we can try a vectorized evaluation\n\n # we cannot FIRST try the vectorized evaluation, because\n # then .agg and .apply would have different semantics if the\n # operation is actually defined on the Series, e.g. str\n try:\n result = self.apply(func, *args, **kwargs)\n except (ValueError, AttributeError, TypeError):\n result = func(self, *args, **kwargs)\n\n return result\n\n agg = aggregate\n\n @Appender(generic._shared_docs[\"transform\"] % _shared_doc_kwargs)\n def transform(self, func, axis=0, *args, **kwargs):\n # Validate the axis parameter\n self._get_axis_number(axis)\n return super().transform(func, *args, **kwargs)\n\n def apply(self, func, convert_dtype=True, args=(), **kwds):\n \"\"\"\n Invoke function on values of Series.\n\n Can be ufunc (a NumPy function that applies to the entire Series)\n or a Python function that only works on single values.\n\n Parameters\n ----------\n func : function\n Python function or NumPy ufunc to apply.\n convert_dtype : bool, default True\n Try to find better dtype for elementwise function results. If\n False, leave as dtype=object.\n args : tuple\n Positional arguments passed to func after the series value.\n **kwds\n Additional keyword arguments passed to func.\n\n Returns\n -------\n Series or DataFrame\n If func returns a Series object the result will be a DataFrame.\n\n See Also\n --------\n Series.map: For element-wise operations.\n Series.agg: Only perform aggregating type operations.\n Series.transform: Only perform transforming type operations.\n\n Examples\n --------\n Create a series with typical summer temperatures for each city.\n\n >>> s = pd.Series([20, 21, 12],\n ... index=['London', 'New York', 'Helsinki'])\n >>> s\n London 20\n New York 21\n Helsinki 12\n dtype: int64\n\n Square the values by defining a function and passing it as an\n argument to ``apply()``.\n\n >>> def square(x):\n ... return x ** 2\n >>> s.apply(square)\n London 400\n New York 441\n Helsinki 144\n dtype: int64\n\n Square the values by passing an anonymous function as an\n argument to ``apply()``.\n\n >>> s.apply(lambda x: x ** 2)\n London 400\n New York 441\n Helsinki 144\n dtype: int64\n\n Define a custom function that needs additional positional\n arguments and pass these additional arguments using the\n ``args`` keyword.\n\n >>> def subtract_custom_value(x, custom_value):\n ... return x - custom_value\n\n >>> s.apply(subtract_custom_value, args=(5,))\n London 15\n New York 16\n Helsinki 7\n dtype: int64\n\n Define a custom function that takes keyword arguments\n and pass these arguments to ``apply``.\n\n >>> def add_custom_values(x, **kwargs):\n ... for month in kwargs:\n ... x += kwargs[month]\n ... return x\n\n >>> s.apply(add_custom_values, june=30, july=20, august=25)\n London 95\n New York 96\n Helsinki 87\n dtype: int64\n\n Use a function from the Numpy library.\n\n >>> s.apply(np.log)\n London 2.995732\n New York 3.044522\n Helsinki 2.484907\n dtype: float64\n \"\"\"\n if len(self) == 0:\n return self._constructor(dtype=self.dtype, index=self.index).__finalize__(\n self\n )\n\n # dispatch to agg\n if isinstance(func, (list, dict)):\n return self.aggregate(func, *args, **kwds)\n\n # if we are a string, try to dispatch\n if isinstance(func, str):\n return self._try_aggregate_string_function(func, *args, **kwds)\n\n # handle ufuncs and lambdas\n if kwds or args and not isinstance(func, np.ufunc):\n\n def f(x):\n return func(x, *args, **kwds)\n\n else:\n f = func\n\n with np.errstate(all=\"ignore\"):\n if isinstance(f, np.ufunc):\n return f(self)\n\n # row-wise access\n if is_extension_array_dtype(self.dtype) and hasattr(self._values, \"map\"):\n # GH#23179 some EAs do not have `map`\n mapped = self._values.map(f)\n else:\n values = self.astype(object)._values\n mapped = lib.map_infer(values, f, convert=convert_dtype)\n\n if len(mapped) and isinstance(mapped[0], Series):\n # GH 25959 use pd.array instead of tolist\n # so extension arrays can be used\n return self._constructor_expanddim(pd.array(mapped), index=self.index)\n else:\n return self._constructor(mapped, index=self.index).__finalize__(self)\n\n def _reduce(\n self, op, name, axis=0, skipna=True, numeric_only=None, filter_type=None, **kwds\n ):\n \"\"\"\n Perform a reduction operation.\n\n If we have an ndarray as a value, then simply perform the operation,\n otherwise delegate to the object.\n \"\"\"\n delegate = self._values\n\n if axis is not None:\n self._get_axis_number(axis)\n\n if isinstance(delegate, ExtensionArray):\n # dispatch to ExtensionArray interface\n return delegate._reduce(name, skipna=skipna, **kwds)\n\n else:\n # dispatch to numpy arrays\n if numeric_only:\n raise NotImplementedError(\n f\"Series.{name} does not implement numeric_only.\"\n )\n with np.errstate(all=\"ignore\"):\n return op(delegate, skipna=skipna, **kwds)\n\n def _reindex_indexer(self, new_index, indexer, copy):\n if indexer is None:\n if copy:\n return self.copy()\n return self\n\n new_values = algorithms.take_1d(\n self._values, indexer, allow_fill=True, fill_value=None\n )\n return self._constructor(new_values, index=new_index)\n\n def _needs_reindex_multi(self, axes, method, level):\n \"\"\"\n Check if we do need a multi reindex; this is for compat with\n higher dims.\n \"\"\"\n return False\n\n @Appender(generic._shared_docs[\"align\"] % _shared_doc_kwargs)\n def align(\n self,\n other,\n join=\"outer\",\n axis=None,\n level=None,\n copy=True,\n fill_value=None,\n method=None,\n limit=None,\n fill_axis=0,\n broadcast_axis=None,\n ):\n return super().align(\n other,\n join=join,\n axis=axis,\n level=level,\n copy=copy,\n fill_value=fill_value,\n method=method,\n limit=limit,\n fill_axis=fill_axis,\n broadcast_axis=broadcast_axis,\n )\n\n def rename(\n self,\n index=None,\n *,\n axis=None,\n copy=True,\n inplace=False,\n level=None,\n errors=\"ignore\",\n ):\n \"\"\"\n Alter Series index labels or name.\n\n Function / dict values must be unique (1-to-1). Labels not contained in\n a dict / Series will be left as-is. Extra labels listed don't throw an\n error.\n\n Alternatively, change ``Series.name`` with a scalar value.\n\n See the :ref:`user guide <basics.rename>` for more.\n\n Parameters\n ----------\n axis : {0 or \"index\"}\n Unused. Accepted for compatibility with DataFrame method only.\n index : scalar, hashable sequence, dict-like or function, optional\n Functions or dict-like are transformations to apply to\n the index.\n Scalar or hashable sequence-like will alter the ``Series.name``\n attribute.\n\n **kwargs\n Additional keyword arguments passed to the function. Only the\n \"inplace\" keyword is used.\n\n Returns\n -------\n Series\n Series with index labels or name altered.\n\n See Also\n --------\n DataFrame.rename : Corresponding DataFrame method.\n Series.rename_axis : Set the name of the axis.\n\n Examples\n --------\n >>> s = pd.Series([1, 2, 3])\n >>> s\n 0 1\n 1 2\n 2 3\n dtype: int64\n >>> s.rename(\"my_name\") # scalar, changes Series.name\n 0 1\n 1 2\n 2 3\n Name: my_name, dtype: int64\n >>> s.rename(lambda x: x ** 2) # function, changes labels\n 0 1\n 1 2\n 4 3\n dtype: int64\n >>> s.rename({1: 3, 2: 5}) # mapping, changes labels\n 0 1\n 3 2\n 5 3\n dtype: int64\n \"\"\"\n if callable(index) or is_dict_like(index):\n return super().rename(\n index, copy=copy, inplace=inplace, level=level, errors=errors\n )\n else:\n return self._set_name(index, inplace=inplace)\n\n @Appender(\n \"\"\"\n Examples\n --------\n >>> s = pd.Series([1, 2, 3])\n >>> s\n 0 1\n 1 2\n 2 3\n dtype: int64\n\n >>> s.set_axis(['a', 'b', 'c'], axis=0)\n a 1\n b 2\n c 3\n dtype: int64\n \"\"\"\n )\n @Substitution(\n **_shared_doc_kwargs,\n extended_summary_sub=\"\",\n axis_description_sub=\"\",\n see_also_sub=\"\",\n )\n @Appender(generic.NDFrame.set_axis.__doc__)\n def set_axis(self, labels, axis: Axis = 0, inplace: bool = False):\n return super().set_axis(labels, axis=axis, inplace=inplace)\n\n @Substitution(**_shared_doc_kwargs)\n @Appender(generic.NDFrame.reindex.__doc__)\n def reindex(self, index=None, **kwargs):\n return super().reindex(index=index, **kwargs)\n\n def drop(\n self,\n labels=None,\n axis=0,\n index=None,\n columns=None,\n level=None,\n inplace=False,\n errors=\"raise\",\n ) -> \"Series\":\n \"\"\"\n Return Series with specified index labels removed.\n\n Remove elements of a Series based on specifying the index labels.\n When using a multi-index, labels on different levels can be removed\n by specifying the level.\n\n Parameters\n ----------\n labels : single label or list-like\n Index labels to drop.\n axis : 0, default 0\n Redundant for application on Series.\n index : single label or list-like\n Redundant for application on Series, but 'index' can be used instead\n of 'labels'.\n\n .. versionadded:: 0.21.0\n columns : single label or list-like\n No change is made to the Series; use 'index' or 'labels' instead.\n\n .. versionadded:: 0.21.0\n level : int or level name, optional\n For MultiIndex, level for which the labels will be removed.\n inplace : bool, default False\n If True, do operation inplace and return None.\n errors : {'ignore', 'raise'}, default 'raise'\n If 'ignore', suppress error and only existing labels are dropped.\n\n Returns\n -------\n Series\n Series with specified index labels removed.\n\n Raises\n ------\n KeyError\n If none of the labels are found in the index.\n\n See Also\n --------\n Series.reindex : Return only specified index labels of Series.\n Series.dropna : Return series without null values.\n Series.drop_duplicates : Return Series with duplicate values removed.\n DataFrame.drop : Drop specified labels from rows or columns.\n\n Examples\n --------\n >>> s = pd.Series(data=np.arange(3), index=['A', 'B', 'C'])\n >>> s\n A 0\n B 1\n C 2\n dtype: int64\n\n Drop labels B en C\n\n >>> s.drop(labels=['B', 'C'])\n A 0\n dtype: int64\n\n Drop 2nd level label in MultiIndex Series\n\n >>> midx = pd.MultiIndex(levels=[['lama', 'cow', 'falcon'],\n ... ['speed', 'weight', 'length']],\n ... codes=[[0, 0, 0, 1, 1, 1, 2, 2, 2],\n ... [0, 1, 2, 0, 1, 2, 0, 1, 2]])\n >>> s = pd.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, 0.3],\n ... index=midx)\n >>> s\n lama speed 45.0\n weight 200.0\n length 1.2\n cow speed 30.0\n weight 250.0\n length 1.5\n falcon speed 320.0\n weight 1.0\n length 0.3\n dtype: float64\n\n >>> s.drop(labels='weight', level=1)\n lama speed 45.0\n length 1.2\n cow speed 30.0\n length 1.5\n falcon speed 320.0\n length 0.3\n dtype: float64\n \"\"\"\n return super().drop(\n labels=labels,\n axis=axis,\n index=index,\n columns=columns,\n level=level,\n inplace=inplace,\n errors=errors,\n )\n\n @doc(NDFrame.fillna, **_shared_doc_kwargs)\n def fillna(\n self,\n value=None,\n method=None,\n axis=None,\n inplace=False,\n limit=None,\n downcast=None,\n ) -> Optional[\"Series\"]:\n return super().fillna(\n value=value,\n method=method,\n axis=axis,\n inplace=inplace,\n limit=limit,\n downcast=downcast,\n )\n\n @Appender(generic._shared_docs[\"replace\"] % _shared_doc_kwargs)\n def replace(\n self,\n to_replace=None,\n value=None,\n inplace=False,\n limit=None,\n regex=False,\n method=\"pad\",\n ):\n return super().replace(\n to_replace=to_replace,\n value=value,\n inplace=inplace,\n limit=limit,\n regex=regex,\n method=method,\n )\n\n @Appender(generic._shared_docs[\"shift\"] % _shared_doc_kwargs)\n def shift(self, periods=1, freq=None, axis=0, fill_value=None) -> \"Series\":\n return super().shift(\n periods=periods, freq=freq, axis=axis, fill_value=fill_value\n )\n\n def memory_usage(self, index=True, deep=False):\n \"\"\"\n Return the memory usage of the Series.\n\n The memory usage can optionally include the contribution of\n the index and of elements of `object` dtype.\n\n Parameters\n ----------\n index : bool, default True\n Specifies whether to include the memory usage of the Series index.\n deep : bool, default False\n If True, introspect the data deeply by interrogating\n `object` dtypes for system-level memory consumption, and include\n it in the returned value.\n\n Returns\n -------\n int\n Bytes of memory consumed.\n\n See Also\n --------\n numpy.ndarray.nbytes : Total bytes consumed by the elements of the\n array.\n DataFrame.memory_usage : Bytes consumed by a DataFrame.\n\n Examples\n --------\n >>> s = pd.Series(range(3))\n >>> s.memory_usage()\n 152\n\n Not including the index gives the size of the rest of the data, which\n is necessarily smaller:\n\n >>> s.memory_usage(index=False)\n 24\n\n The memory footprint of `object` values is ignored by default:\n\n >>> s = pd.Series([\"a\", \"b\"])\n >>> s.values\n array(['a', 'b'], dtype=object)\n >>> s.memory_usage()\n 144\n >>> s.memory_usage(deep=True)\n 260\n \"\"\"\n v = super().memory_usage(deep=deep)\n if index:\n v += self.index.memory_usage(deep=deep)\n return v\n\n def isin(self, values) -> \"Series\":\n \"\"\"\n Check whether `values` are contained in Series.\n\n Return a boolean Series showing whether each element in the Series\n matches an element in the passed sequence of `values` exactly.\n\n Parameters\n ----------\n values : set or list-like\n The sequence of values to test. Passing in a single string will\n raise a ``TypeError``. Instead, turn a single string into a\n list of one element.\n\n Returns\n -------\n Series\n Series of booleans indicating if each element is in values.\n\n Raises\n ------\n TypeError\n * If `values` is a string\n\n See Also\n --------\n DataFrame.isin : Equivalent method on DataFrame.\n\n Examples\n --------\n >>> s = pd.Series(['lama', 'cow', 'lama', 'beetle', 'lama',\n ... 'hippo'], name='animal')\n >>> s.isin(['cow', 'lama'])\n 0 True\n 1 True\n 2 True\n 3 False\n 4 True\n 5 False\n Name: animal, dtype: bool\n\n Passing a single string as ``s.isin('lama')`` will raise an error. Use\n a list of one element instead:\n\n >>> s.isin(['lama'])\n 0 True\n 1 False\n 2 True\n 3 False\n 4 True\n 5 False\n Name: animal, dtype: bool\n \"\"\"\n result = algorithms.isin(self, values)\n return self._constructor(result, index=self.index).__finalize__(self)\n\n def between(self, left, right, inclusive=True) -> \"Series\":\n \"\"\"\n Return boolean Series equivalent to left <= series <= right.\n\n This function returns a boolean vector containing `True` wherever the\n corresponding Series element is between the boundary values `left` and\n `right`. NA values are treated as `False`.\n\n Parameters\n ----------\n left : scalar or list-like\n Left boundary.\n right : scalar or list-like\n Right boundary.\n inclusive : bool, default True\n Include boundaries.\n\n Returns\n -------\n Series\n Series representing whether each element is between left and\n right (inclusive).\n\n See Also\n --------\n Series.gt : Greater than of series and other.\n Series.lt : Less than of series and other.\n\n Notes\n -----\n This function is equivalent to ``(left <= ser) & (ser <= right)``\n\n Examples\n --------\n >>> s = pd.Series([2, 0, 4, 8, np.nan])\n\n Boundary values are included by default:\n\n >>> s.between(1, 4)\n 0 True\n 1 False\n 2 True\n 3 False\n 4 False\n dtype: bool\n\n With `inclusive` set to ``False`` boundary values are excluded:\n\n >>> s.between(1, 4, inclusive=False)\n 0 True\n 1 False\n 2 False\n 3 False\n 4 False\n dtype: bool\n\n `left` and `right` can be any scalar value:\n\n >>> s = pd.Series(['Alice', 'Bob', 'Carol', 'Eve'])\n >>> s.between('Anna', 'Daniel')\n 0 False\n 1 True\n 2 True\n 3 False\n dtype: bool\n \"\"\"\n if inclusive:\n lmask = self >= left\n rmask = self <= right\n else:\n lmask = self > left\n rmask = self < right\n\n return lmask & rmask\n\n # ----------------------------------------------------------------------\n # Convert to types that support pd.NA\n\n def _convert_dtypes(\n self,\n infer_objects: bool = True,\n convert_string: bool = True,\n convert_integer: bool = True,\n convert_boolean: bool = True,\n ) -> \"Series\":\n input_series = self\n if infer_objects:\n input_series = input_series.infer_objects()\n if is_object_dtype(input_series):\n input_series = input_series.copy()\n\n if convert_string or convert_integer or convert_boolean:\n inferred_dtype = convert_dtypes(\n input_series._values, convert_string, convert_integer, convert_boolean\n )\n try:\n result = input_series.astype(inferred_dtype)\n except TypeError:\n result = input_series.copy()\n else:\n result = input_series.copy()\n return result\n\n @Appender(generic._shared_docs[\"isna\"] % _shared_doc_kwargs)\n def isna(self) -> \"Series\":\n return super().isna()\n\n @Appender(generic._shared_docs[\"isna\"] % _shared_doc_kwargs)\n def isnull(self) -> \"Series\":\n return super().isnull()\n\n @Appender(generic._shared_docs[\"notna\"] % _shared_doc_kwargs)\n def notna(self) -> \"Series\":\n return super().notna()\n\n @Appender(generic._shared_docs[\"notna\"] % _shared_doc_kwargs)\n def notnull(self) -> \"Series\":\n return super().notnull()\n\n def dropna(self, axis=0, inplace=False, how=None):\n \"\"\"\n Return a new Series with missing values removed.\n\n See the :ref:`User Guide <missing_data>` for more on which values are\n considered missing, and how to work with missing data.\n\n Parameters\n ----------\n axis : {0 or 'index'}, default 0\n There is only one axis to drop values from.\n inplace : bool, default False\n If True, do operation inplace and return None.\n how : str, optional\n Not in use. Kept for compatibility.\n\n Returns\n -------\n Series\n Series with NA entries dropped from it.\n\n See Also\n --------\n Series.isna: Indicate missing values.\n Series.notna : Indicate existing (non-missing) values.\n Series.fillna : Replace missing values.\n DataFrame.dropna : Drop rows or columns which contain NA values.\n Index.dropna : Drop missing indices.\n\n Examples\n --------\n >>> ser = pd.Series([1., 2., np.nan])\n >>> ser\n 0 1.0\n 1 2.0\n 2 NaN\n dtype: float64\n\n Drop NA values from a Series.\n\n >>> ser.dropna()\n 0 1.0\n 1 2.0\n dtype: float64\n\n Keep the Series with valid entries in the same variable.\n\n >>> ser.dropna(inplace=True)\n >>> ser\n 0 1.0\n 1 2.0\n dtype: float64\n\n Empty strings are not considered NA values. ``None`` is considered an\n NA value.\n\n >>> ser = pd.Series([np.NaN, 2, pd.NaT, '', None, 'I stay'])\n >>> ser\n 0 NaN\n 1 2\n 2 NaT\n 3\n 4 None\n 5 I stay\n dtype: object\n >>> ser.dropna()\n 1 2\n 3\n 5 I stay\n dtype: object\n \"\"\"\n inplace = validate_bool_kwarg(inplace, \"inplace\")\n # Validate the axis parameter\n self._get_axis_number(axis or 0)\n\n if self._can_hold_na:\n result = remove_na_arraylike(self)\n if inplace:\n self._update_inplace(result)\n else:\n return result\n else:\n if inplace:\n # do nothing\n pass\n else:\n return self.copy()\n\n # ----------------------------------------------------------------------\n # Time series-oriented methods\n\n def to_timestamp(self, freq=None, how=\"start\", copy=True) -> \"Series\":\n \"\"\"\n Cast to DatetimeIndex of Timestamps, at *beginning* of period.\n\n Parameters\n ----------\n freq : str, default frequency of PeriodIndex\n Desired frequency.\n how : {'s', 'e', 'start', 'end'}\n Convention for converting period to timestamp; start of period\n vs. end.\n copy : bool, default True\n Whether or not to return a copy.\n\n Returns\n -------\n Series with DatetimeIndex\n \"\"\"\n new_values = self._values\n if copy:\n new_values = new_values.copy()\n\n assert isinstance(self.index, (ABCDatetimeIndex, ABCPeriodIndex))\n new_index = self.index.to_timestamp(freq=freq, how=how)\n return self._constructor(new_values, index=new_index).__finalize__(self)\n\n def to_period(self, freq=None, copy=True) -> \"Series\":\n \"\"\"\n Convert Series from DatetimeIndex to PeriodIndex with desired\n frequency (inferred from index if not passed).\n\n Parameters\n ----------\n freq : str, default None\n Frequency associated with the PeriodIndex.\n copy : bool, default True\n Whether or not to return a copy.\n\n Returns\n -------\n Series\n Series with index converted to PeriodIndex.\n \"\"\"\n new_values = self._values\n if copy:\n new_values = new_values.copy()\n\n assert isinstance(self.index, ABCDatetimeIndex)\n new_index = self.index.to_period(freq=freq)\n return self._constructor(new_values, index=new_index).__finalize__(self)\n\n # ----------------------------------------------------------------------\n # Add index\n _AXIS_ORDERS = [\"index\"]\n _AXIS_NUMBERS = {\"index\": 0}\n _AXIS_NAMES = {0: \"index\"}\n _AXIS_REVERSED = False\n _AXIS_LEN = len(_AXIS_ORDERS)\n _info_axis_number = 0\n _info_axis_name = \"index\"\n\n index: \"Index\" = properties.AxisProperty(\n axis=0, doc=\"The index (axis labels) of the Series.\"\n )\n\n # ----------------------------------------------------------------------\n # Accessor Methods\n # ----------------------------------------------------------------------\n str = CachedAccessor(\"str\", StringMethods)\n dt = CachedAccessor(\"dt\", CombinedDatetimelikeProperties)\n cat = CachedAccessor(\"cat\", CategoricalAccessor)\n plot = CachedAccessor(\"plot\", pandas.plotting.PlotAccessor)\n sparse = CachedAccessor(\"sparse\", SparseAccessor)\n\n # ----------------------------------------------------------------------\n # Add plotting methods to Series\n hist = pandas.plotting.hist_series\n\n\nSeries._add_numeric_operations()\nSeries._add_series_or_dataframe_operations()\n\n# Add arithmetic!\nops.add_flex_arithmetic_methods(Series)\nops.add_special_arithmetic_methods(Series)\n" ]
[ [ "pandas.core.ops.add_flex_arithmetic_methods", "pandas.core.common.asarray_tuplesafe", "pandas.core.construction.extract_array", "pandas.core.ops.get_op_result_name", "pandas.core.common.any_none", "pandas.core.algorithms.mode", "pandas.compat.numpy.function.validate_argmin_with_skipna", "pandas._libs.lib.infer_dtype", "pandas.core.construction.create_series_with_explicit_dtype", "pandas.core.common.maybe_iterable_to_list", "pandas.core.dtypes.common.is_bool", "pandas.core.sorting.nargsort", "pandas.util._decorators.doc", "pandas.core.nanops.nancov", "pandas.core.indexing.check_bool_indexer", "numpy.errstate", "pandas.core.accessor.CachedAccessor", "pandas._config.get_option", "numpy.dot", "pandas.core.indexes.api.ensure_index", "pandas._libs.lib.is_scalar", "pandas.core.ops._construct_result", "pandas.core.dtypes.missing.isna", "pandas.core.common.standardize_mapping", "pandas.core.algorithms.SelectNSeries", "pandas.core.common.is_bool_indexer", "numpy.array", "pandas.core.algorithms.take_1d", "numpy.argsort", "pandas.core.dtypes.common.is_iterator", "pandas.core.indexes.datetimes.DatetimeIndex", "pandas.core.reshape.reshape.unstack", "pandas.core.dtypes.cast.convert_dtypes", "pandas.io.formats.format.SeriesFormatter", "pandas.core.dtypes.common.ensure_platform_int", "pandas.core.algorithms.searchsorted", "pandas.core.common.apply_if_callable", "pandas.core.dtypes.common.is_dict_like", "pandas.core.dtypes.cast.validate_numeric_casting", "pandas.core.ops.maybe_dispatch_ufunc_to_dunder_op", "pandas.core.nanops.nanargmax", "pandas.core.nanops.nancorr", "numpy.transpose", "pandas.core.dtypes.common.is_object_dtype", "pandas.util._decorators.Substitution", "pandas.core.dtypes.common.is_scalar", "pandas.array", "pandas.core.dtypes.common.is_integer", "pandas.core.algorithms.isin", "pandas.core.dtypes.common.is_list_like", "pandas._libs.lib.map_infer", "numpy.asarray", "pandas.core.dtypes.missing.remove_na_arraylike", "pandas.core.indexes.api.Float64Index", "pandas.core.groupby.generic.SeriesGroupBy", "pandas.core.indexers.unpack_1tuple", "pandas.core.dtypes.inference.is_hashable", "pandas.core.dtypes.missing.na_value_for_dtype", "pandas.core.dtypes.missing.notna", "pandas.core.dtypes.common.is_extension_array_dtype", "pandas.core.generic.NDFrame.__init__", "pandas.util._validators.validate_bool_kwarg", "pandas.core.tools.datetimes.to_datetime", "pandas.util._decorators.Appender", "pandas.core.ops.add_special_arithmetic_methods", "pandas.core.construction.sanitize_array", "pandas.core.ops.fill_binop", "pandas._libs.properties.AxisProperty", "pandas.core.dtypes.common._is_unorderable_exception", "pandas.core.algorithms.diff", "pandas.core.internals.SingleBlockManager.from_array", "pandas.core.construction.is_empty_data", "pandas.util._validators.validate_percentile", "pandas.compat.numpy.function.validate_round", "pandas.core.nanops.nanargmin", "pandas.compat.numpy.function.validate_argmax_with_skipna", "pandas.core.dtypes.common.is_categorical_dtype", "pandas.core.reshape.concat.concat" ] ]
neurospin/deep_folding
[ "4c580314dfa4ae87c5d115f211c42185ae183f64" ]
[ "deep_folding/anatomist_tools/dataset_gen_pipe.py" ]
[ "#!python\n# -*- coding: utf-8 -*-\n#\n# This software and supporting documentation are distributed by\n# Institut Federatif de Recherche 49\n# CEA/NeuroSpin, Batiment 145,\n# 91191 Gif-sur-Yvette cedex\n# France\n#\n# This software is governed by the CeCILL license version 2 under\n# French law and abiding by the rules of distribution of free software.\n# You can use, modify and/or redistribute the software under the\n# terms of the CeCILL license version 2 as circulated by CEA, CNRS\n# and INRIA at the following URL \"http://www.cecill.info\".\n#\n# As a counterpart to the access to the source code and rights to copy,\n# modify and redistribute granted by the license, users are provided only\n# with a limited warranty and the software's author, the holder of the\n# economic rights, and the successive licensors have only limited\n# liability.\n#\n# In this respect, the user's attention is drawn to the risks associated\n# with loading, using, modifying and/or developing or reproducing the\n# software by the user in light of its specific status of free software,\n# that may mean that it is complicated to manipulate, and that also\n# therefore means that it is reserved for developers and experienced\n# professionals having in-depth computer knowledge. Users are therefore\n# encouraged to load and test the software's suitability as regards their\n# requirements in conditions enabling the security of their systems and/or\n# data to be ensured and, more generally, to use and operate it in the\n# same conditions as regards security.\n#\n# The fact that you are presently reading this means that you have had\n# knowledge of the CeCILL license version 2 and that you accept its terms.\n\n\"\"\"Creating pickle file from T1 MRI datas\n\nThe aim of this script is to create dataset of cropped skeletons from MRIs\nsaved in a .pickle file.\nSeveral steps are required: normalization, crop and .pickle generation\n\n Typical usage\n -------------\n You can use this program by first entering in the brainvisa environment\n (here brainvisa 5.0.0 installed with singurity) and launching the script\n from the terminal:\n >>> bv bash\n >>> python dataset_gen_pipe.py\n\n Alternatively, you can launch the script in the interactive terminal ipython:\n >>> %run dataset_gen_pipe.py\n\n\"\"\"\n\nimport argparse\nimport sys\nimport os\nfrom os import listdir\nfrom os.path import join\nimport tempfile\nimport re\n\nimport numpy as np\nimport scipy.ndimage\n\nimport six\n\nfrom soma import aims\n\nfrom pqdm.processes import pqdm\nfrom joblib import cpu_count\n\nfrom deep_folding.anatomist_tools.utils.logs import LogJson\nfrom deep_folding.anatomist_tools.utils.bbox import compute_max_box\nfrom deep_folding.anatomist_tools.utils.mask import compute_simple_mask, compute_centered_mask\nfrom deep_folding.anatomist_tools.utils.resample import resample\nfrom deep_folding.anatomist_tools.utils import remove_hull\nfrom deep_folding.anatomist_tools.utils.sulcus_side import complete_sulci_name\nfrom deep_folding.anatomist_tools.load_data import fetch_data\n\nfrom tqdm import tqdm\n\n_ALL_SUBJECTS = -1\n\n_SIDE_DEFAULT = 'L' # hemisphere 'L' or 'R'\n\n_INTERP_DEFAULT = 'nearest' # default interpolation for ApplyAimsTransform\n\n_RESAMPLING_DEFAULT = None # if None, resampling method is AimsApplyTransform\n\n_CROPPING_DEFAULT = 'bbox' # crops over a bounding box by default\n\n_OUT_VOXEL_SIZE = (1, 1, 1) # default output voxel size\n\n_EXTERNAL = 11 # topological value meaning \"outside the brain\"\n\n# sulcus to encompass:\n# its name depends on the hemisphere side\n_SULCUS_DEFAULT = 'S.T.s.ter.asc.ant.'\n\n_COMBINE_TYPE = False\n\n# Input directories\n# -----------------\n\n# Input directory contaning the morphologist analysis of the HCP database\n_SRC_DIR_DEFAULT = '/neurospin/hcp'\n\n# Directory where subjects to be processed are stored.\n# Default is for HCP dataset\n_MORPHOLOGIST_DIR_DEFAULT = 'ANALYSIS/3T_morphologist'\n\n# Directory containing bounding box json files\n# default corresponds to bounding boxes computed for voxels of 1mm\n_BBOX_DIR_DEFAULT = '/neurospin/dico/data/deep_folding/current/bbox'\n\n# Directory containing mask files\n_MASK_DIR_DEFAULT = '/neurospin/dico/data/deep_folding/current/mask'\n\n# Directory containing bounding box json files\n# default corresponds to bounding boxes computed for voxinput\n# -------------------------\n_TGT_DIR_DEFAULT = '/neurospin/dico/data/deep_folding/test'\n\n# temporary directory\ntemp_dir = tempfile.mkdtemp()\n\ndef define_njobs():\n \"\"\"Returns number of cpus used by main loop\n \"\"\"\n nb_cpus = cpu_count()\n return max(nb_cpus-2, 1)\n\nclass DatasetCroppedSkeleton:\n \"\"\"Generates cropped skeleton files and corresponding pickle file\n \"\"\"\n\n def __init__(self, src_dir=_SRC_DIR_DEFAULT,\n tgt_dir=_TGT_DIR_DEFAULT,\n bbox_dir=_BBOX_DIR_DEFAULT,\n mask_dir=_MASK_DIR_DEFAULT,\n morphologist_dir=_MORPHOLOGIST_DIR_DEFAULT,\n list_sulci=_SULCUS_DEFAULT,\n side=_SIDE_DEFAULT,\n interp=_INTERP_DEFAULT,\n resampling=_RESAMPLING_DEFAULT,\n cropping=_CROPPING_DEFAULT,\n out_voxel_size=_OUT_VOXEL_SIZE,\n combine_type=_COMBINE_TYPE):\n \"\"\"Inits with list of directories and list of sulci\n\n Args:\n src_dir: list of strings naming full path source directories,\n containing MRI images\n tgt_dir: name of target (output) directory with full path\n transform_dir: directory containing transformation files\n (generated using transform.py)\n bbox_dir: directory containing bbox json files\n (generated using bounding_box.py)\n list_sulci: list of sulcus names\n side: hemisphere side (either L for left, or R for right hemisphere)\n interp: string giving interpolation for AimsApplyTransform\n \"\"\"\n\n self.src_dir = src_dir\n self.side = side\n # Transforms sulcus in a list of sulci\n self.list_sulci = ([list_sulci] if isinstance(list_sulci, str)\n else list_sulci)\n self.list_sulci = complete_sulci_name(self.list_sulci, self.side)\n self.tgt_dir = tgt_dir\n self.bbox_dir = bbox_dir\n self.mask_dir=mask_dir\n self.morphologist_dir = morphologist_dir\n self.interp = interp\n self.resampling = resampling\n self.cropping = cropping\n self.out_voxel_size = out_voxel_size\n self.combine_type = combine_type\n\n # Morphologist directory\n self.morphologist_dir = join(self.src_dir, self.morphologist_dir)\n ## for Tissier\n self.morphologist_dir = join(self.src_dir)\n # default acquisition subdirectory\n self.acquisition_dir = \"%(subject)s/t1mri/default_acquisition\"\n\n # Directory where to store cropped files\n self.cropped_dir = join(self.tgt_dir, self.side + 'crops')\n\n # Names of files in function of dictionary: keys -> 'subject' and 'side'\n # Files from morphologist pipeline\n # self.skeleton_file = 'default_analysis/segmentation/' \\\n # '%(side)sskeleton_%(subject)s.nii.gz'\n ## FOR HCP dataset\n #self.skeleton_file = '/neurospin/dico/data/deep_folding/datasets/hcp/' \\\n # '%(side)sskeleton_%(subject)s_generated.nii.gz'\n ## FOR TISSIER dataset\n self.skeleton_file = '/neurospin/dico/data/deep_folding/datasets/ACC_patterns/tissier/' \\\n '%(side)sskeleton_%(subject)s_generated.nii.gz'\n #self.graph_file = 'default_analysis/folds/3.1/default_session_auto/' \\\n # '%(side)s%(subject)s_default_session_auto.arg'\n ## FOR TISSIER dataset\n self.graph_file = 'default_analysis/folds/3.1/default_session_manual/' \\\n '%(side)s%(subject)s_default_session_manual.arg'\n\n # Names of files in function of dictionary: keys -> 'subject' and 'side'\n self.cropped_file = '%(subject)s_normalized.nii.gz'\n\n # Initialization of bounding box coordinates\n self.bbmin = np.zeros(3)\n self.bbmax = np.zeros(3)\n\n # Creates json log class\n json_file = join(self.tgt_dir, self.side + 'dataset.json')\n self.json = LogJson(json_file)\n\n # reference file in MNI template with corrct voxel size\n self.ref_file = f\"{temp_dir}/file_ref.nii.gz\"\n self.g_to_icbm_template_file = join(temp_dir, 'file_g_to_icbm_%(subject)s.trm')\n\n def define_referentials(self):\n \"\"\"Writes MNI 2009 reference file with output voxel size\n\n It will be used by AimsApplyTransform\n \"\"\"\n hdr = aims.StandardReferentials.icbm2009cTemplateHeader()\n voxel_size = np.concatenate((self.out_voxel_size, [1]))\n resampling_ratio = np.array(hdr['voxel_size']) / voxel_size\n\n orig_dim = hdr['volume_dimension']\n new_dim = list((resampling_ratio * orig_dim).astype(int))\n\n vol = aims.Volume(new_dim, dtype='S16')\n vol.copyHeaderFrom(hdr)\n vol.header()['voxel_size'] = voxel_size\n aims.write(vol, self.ref_file)\n\n def crop_bbox(self, file_cropped, verbose):\n \"\"\"Crops according to bounding box\"\"\"\n # Take the coordinates of the bounding box\n bbmin = self.bbmin\n bbmax = self.bbmax\n xmin, ymin, zmin = str(bbmin[0]), str(bbmin[1]), str(bbmin[2])\n xmax, ymax, zmax = str(bbmax[0]), str(bbmax[1]), str(bbmax[2])\n\n # Crop of the images based on bounding box\n cmd_bounding_box = ' -x ' + xmin + ' -y ' + ymin + ' -z ' + zmin + \\\n ' -X ' + xmax + ' -Y ' + ymax + ' -Z ' + zmax\n cmd_crop = 'AimsSubVolume' + \\\n ' -i ' + file_cropped + \\\n ' -o ' + file_cropped + cmd_bounding_box\n\n # Sts output from AimsSubVolume is recorded in var_output\n # Put following command to get the output\n # os.popen(cmd_crop).read()\n if verbose:\n os.popen(cmd_crop).read()\n else:\n var_output = os.popen(cmd_crop).read()\n\n def filter_mask(self):\n \"\"\"Smooths the mask with Gaussian Filter\n \"\"\"\n arr = np.asarray(self.mask)\n arr_filter = scipy.ndimage.gaussian_filter(arr.astype(float), sigma=0.5,\n order=0, output=None, mode='reflect', truncate=4.0)\n arr[:] = (arr_filter> 0.001).astype(int)\n\n def crop_mask(self, file_cropped, verbose):\n \"\"\"Crops according to mask\"\"\"\n vol = aims.read(file_cropped)\n\n arr = np.asarray(vol)\n #remove_hull.remove_hull(arr)\n\n arr_mask = np.asarray(self.mask)\n arr[arr_mask == 0] = 0\n arr[arr == _EXTERNAL] = 0\n\n # Take the coordinates of the bounding box\n bbmin = self.bbmin\n bbmax = self.bbmax\n xmin, ymin, zmin = str(bbmin[0]), str(bbmin[1]), str(bbmin[2])\n xmax, ymax, zmax = str(bbmax[0]), str(bbmax[1]), str(bbmax[2])\n\n aims.write(vol, file_cropped)\n\n # Defines rop of the images based on bounding box\n cmd_bounding_box = ' -x ' + xmin + ' -y ' + ymin + ' -z ' + zmin + \\\n ' -X ' + xmax + ' -Y ' + ymax + ' -Z ' + zmax\n cmd_crop = 'AimsSubVolume' + \\\n ' -i ' + file_cropped + \\\n ' -o ' + file_cropped + cmd_bounding_box\n\n if verbose:\n os.popen(cmd_crop).read()\n else:\n var_output = os.popen(cmd_crop).read()\n\n def crop_one_file(self, subject_id, verbose=False):\n \"\"\"Crops one file\n\n Args:\n subject_id: string giving the subject ID\n \"\"\"\n\n # Identifies 'subject' in a mapping (for file and directory namings)\n subject = {'subject': subject_id, 'side': self.side}\n ## FOR TISSIER\n subject_id = re.search('([ae\\d]{5,6})', subject_id).group(0)\n\n # Names directory where subject analysis files are stored\n subject_dir = \\\n join(self.morphologist_dir, self.acquisition_dir % subject)\n\n # Skeleton file name\n file_skeleton = join(subject_dir, self.skeleton_file % {'subject': subject_id, 'side': self.side})\n\n # Creates transformation MNI template\n file_graph = join(subject_dir, self.graph_file % subject)\n graph = aims.read(file_graph)\n g_to_icbm_template = aims.GraphManip.getICBM2009cTemplateTransform(graph)\n g_to_icbm_template_file = self.g_to_icbm_template_file % subject\n aims.write(g_to_icbm_template, g_to_icbm_template_file)\n\n if os.path.exists(file_skeleton):\n # Creates output (cropped) file name\n file_cropped = join(self.cropped_dir, self.cropped_file % {'subject': subject_id, 'side': self.side})\n\n # Normalization and resampling of skeleton images\n if self.resampling:\n resampled = resample(input_image=file_skeleton,\n output_vs=self.out_voxel_size,\n transformation=g_to_icbm_template_file,\n verbose=False)\n aims.write(resampled, file_cropped)\n else :\n cmd_normalize = 'AimsApplyTransform' + \\\n ' -i ' + file_skeleton + \\\n ' -o ' + file_cropped + \\\n ' -m ' + g_to_icbm_template_file + \\\n ' -r ' + self.ref_file + \\\n ' -t ' + self.interp + \\\n ' --bg ' + str(_EXTERNAL)\n os.system(cmd_normalize)\n\n # Cropping of skeleton image\n if self.cropping == 'bbox':\n self.crop_bbox(file_cropped, verbose)\n else:\n self.crop_mask(file_cropped, verbose)\n\n\n\n def crop_files(self, number_subjects=_ALL_SUBJECTS):\n \"\"\"Crop nii files\n\n The programm loops over all subjects from the input (source) directory.\n\n Args:\n number_subjects: integer giving the number of subjects to analyze,\n by default it is set to _ALL_SUBJECTS (-1).\n \"\"\"\n\n if number_subjects:\n\n # subjects are detected as the directory names under src_dir\n list_all_subjects = [dI for dI in os.listdir(self.morphologist_dir)\\\n if os.path.isdir(os.path.join(self.morphologist_dir,dI))]\n\n # Gives the possibility to list only the first number_subjects\n list_subjects = (\n list_all_subjects\n if number_subjects == _ALL_SUBJECTS\n else list_all_subjects[:number_subjects])\n\n # Creates target and cropped directory\n if not os.path.exists(self.tgt_dir):\n os.makedirs(self.tgt_dir)\n if not os.path.exists(self.cropped_dir):\n os.makedirs(self.cropped_dir)\n\n # Writes number of subjects and directory names to json file\n dict_to_add = {'nb_subjects': len(list_subjects),\n 'src_dir': self.src_dir,\n 'bbox_dir': self.bbox_dir,\n 'mask_dir': self.mask_dir,\n 'side': self.side,\n 'interp': self.interp,\n 'list_sulci': self.list_sulci,\n 'bbmin': self.bbmin.tolist(),\n 'bbmax': self.bbmax.tolist(),\n 'tgt_dir': self.tgt_dir,\n 'cropped_dir': self.cropped_dir,\n 'resampling_type': 'sulcus-based' if self.resampling else 'AimsApplyTransform',\n 'out_voxel_size': self.out_voxel_size,\n 'combine_type': self.combine_type\n }\n self.json.update(dict_to_add=dict_to_add)\n\n # Defines referential\n self.define_referentials()\n\n # Performs cropping for each file in a parallelized way\n print(list_subjects)\n\n #for sub in list_subjects:\n # self.crop_one_file(sub)\n pqdm(list_subjects, self.crop_one_file, n_jobs=define_njobs())\n\n\n def dataset_gen_pipe(self, number_subjects=_ALL_SUBJECTS):\n \"\"\"Main API to create pickle files\n\n The programm loops over all subjects from the input (source) directory.\n # Writes number of subjects and directory names to json file\n dict_to_add = {'nb_subjects': len(list_subjects),joblib import Parallel, delayed\n Args:\n number_subjects: integer giving the number of subjects to analyze,\n by default it is set to _ALL_SUBJECTS (-1).\n \"\"\"\n\n self.json.write_general_info()\n\n # Computes bounding box and mask\n if number_subjects:\n if self.cropping == 'bbox':\n self.bbmin, self.bbmax = compute_max_box(sulci_list=self.list_sulci,\n side=self.side,\n talairach_box=False,\n src_dir=self.bbox_dir)\n elif self.cropping == 'mask':\n if self.combine_type:\n self.mask, self.bbmin, self.bbmax = \\\n compute_centered_mask(sulci_list=self.list_sulci,\n side=self.side,\n mask_dir=self.mask_dir)\n else:\n self.mask, self.bbmin, self.bbmax = \\\n compute_simple_mask(sulci_list=self.list_sulci,\n side=self.side,\n mask_dir=self.mask_dir)\n else:\n raise ValueError('Cropping must be either \\'bbox\\' or \\'mask\\'')\n\n # Generate cropped files\n self.crop_files(number_subjects=number_subjects)\n\n # Creation of .pickle file for all subjects\n if number_subjects:\n fetch_data(cropped_dir=self.cropped_dir,\n tgt_dir=self.tgt_dir,\n side=self.side)\n\n\ndef parse_args(argv):\n \"\"\"Function parsing command-line arguments\n\n Args:\n argv: a list containing command line arguments\n\n Returns:\n params: dictionary with keys: src_dir, tgt_dir, nb_subjects, list_sulci\n \"\"\"\n\n # Parse command line arguments\n parser = argparse.ArgumentParser(\n prog='dataset_gen_pipe.py',\n description='Generates cropped and pickle files')\n parser.add_argument(\n \"-s\", \"--src_dir\", type=str, default=_SRC_DIR_DEFAULT,\n help='Source directory where the MRI data lies. '\n 'Default is : ' + _SRC_DIR_DEFAULT)\n parser.add_argument(\n \"-t\", \"--tgt_dir\", type=str, default=_TGT_DIR_DEFAULT,\n help='Target directory where to store the cropped and pickle files. '\n 'Default is : ' + _TGT_DIR_DEFAULT)\n parser.add_argument(\n \"-a\", \"--mask_dir\", type=str, default=_MASK_DIR_DEFAULT,\n help='masking directory where mask has been stored. '\n 'Default is : ' + _MASK_DIR_DEFAULT)\n parser.add_argument(\n \"-b\", \"--bbox_dir\", type=str, default=_BBOX_DIR_DEFAULT,\n help='Bounding box directory where json files containing '\n 'bounding box coordinates have been stored. '\n 'Default is : ' + _BBOX_DIR_DEFAULT)\n parser.add_argument(\n \"-m\", \"--morphologist_dir\", type=str, default=_MORPHOLOGIST_DIR_DEFAULT,\n help='Directory where subjects to be processed are stored')\n parser.add_argument(\n \"-u\", \"--sulcus\", type=str, default=_SULCUS_DEFAULT, nargs='+',\n help='Sulcus name around which we determine the bounding box. '\n 'If there are several sulci, add all sulci '\n 'one after the other. Example: -u sulcus_1 sulcus_2 '\n 'Default is : ' + _SULCUS_DEFAULT)\n parser.add_argument(\n \"-i\", \"--side\", type=str, default=_SIDE_DEFAULT,\n help='Hemisphere side (either L or R). Default is : ' + _SIDE_DEFAULT)\n parser.add_argument(\n \"-n\", \"--nb_subjects\", type=str, default=\"all\",\n help='Number of subjects to take into account, or \\'all\\'. '\n '0 subject is allowed, for debug purpose.'\n 'Default is : all')\n parser.add_argument(\n \"-e\", \"--interp\", type=str, default=_INTERP_DEFAULT,\n help=\"Same interpolation type as for AimsApplyTransform. \"\n \"Type of interpolation used for Volumes: \"\n \"n[earest], l[inear], q[uadratic], c[cubic], quartic, \"\n \"quintic, six[thorder], seven[thorder]. \"\n \"Modes may also be specified as order number: \"\n \"0=nearest, 1=linear...\")\n parser.add_argument(\n \"-p\", \"--resampling\", type=str, default=None,\n help='Method of resampling to perform. '\n 'Type of resampling: '\n 's[ulcus] for sulcus-based method'\n 'If None, AimsApplyTransform is used.'\n 'Default is : None')\n parser.add_argument(\n \"-c\", \"--cropping\", type=str, default=None,\n help='Method of to select and crop the image. '\n 'Type of cropping: '\n 'bbox: for bounding box cropping'\n 'mask: selection based on a mask'\n 'Default is : bbox')\n parser.add_argument(\n \"-v\", \"--out_voxel_size\", type=int, nargs='+', default=_OUT_VOXEL_SIZE,\n help='Voxel size of output images'\n 'Default is : 1 1 1')\n parser.add_argument(\n \"-o\", \"--combine_type\", type=bool, default=_COMBINE_TYPE,\n help='Whether use specific combination of masks or not')\n\n params = {}\n\n args = parser.parse_args(argv)\n params['src_dir'] = args.src_dir\n params['tgt_dir'] = args.tgt_dir\n params['bbox_dir'] = args.bbox_dir\n params['mask_dir'] = args.mask_dir\n params['list_sulci'] = args.sulcus # a list of sulci\n params['side'] = args.side\n params['interp'] = args.interp\n params['resampling'] = args.resampling\n params['cropping'] = args.cropping\n params['out_voxel_size'] = tuple(args.out_voxel_size)\n params['morphologist_dir'] = args.morphologist_dir\n params['combine_type'] = args.combine_type\n\n number_subjects = args.nb_subjects\n\n # Check if nb_subjects is either the string \"all\" or a positive integer\n try:\n if number_subjects == \"all\":\n number_subjects = _ALL_SUBJECTS\n else:\n number_subjects = int(number_subjects)\n if number_subjects < 0:\n raise ValueError\n except ValueError:\n raise ValueError(\n \"number_subjects must be either the string \\\"all\\\" or an integer\")\n params['nb_subjects'] = number_subjects\n\n return params\n\n\ndef dataset_gen_pipe(src_dir=_SRC_DIR_DEFAULT,\n tgt_dir=_TGT_DIR_DEFAULT,\n bbox_dir=_BBOX_DIR_DEFAULT,\n mask_dir=_MASK_DIR_DEFAULT,\n morphologist_dir=_MORPHOLOGIST_DIR_DEFAULT,\n side=_SIDE_DEFAULT,\n list_sulci=_SULCUS_DEFAULT,\n number_subjects=_ALL_SUBJECTS,\n interp=_INTERP_DEFAULT,\n resampling=_RESAMPLING_DEFAULT,\n cropping=_CROPPING_DEFAULT,\n out_voxel_size=_OUT_VOXEL_SIZE,\n combine_type=_COMBINE_TYPE):\n \"\"\"Main program generating cropped files and corresponding pickle file\n \"\"\"\n\n dataset = DatasetCroppedSkeleton(src_dir=src_dir,\n tgt_dir=tgt_dir,\n bbox_dir=bbox_dir,\n mask_dir=mask_dir,\n morphologist_dir=morphologist_dir,\n side=side,\n list_sulci=list_sulci,\n interp=interp,\n resampling=resampling,\n cropping=cropping,\n out_voxel_size=out_voxel_size,\n combine_type=combine_type)\n dataset.dataset_gen_pipe(number_subjects=number_subjects)\n\n\ndef main(argv):\n \"\"\"Reads argument line and creates cropped files and pickle file\n\n Args:\n argv: a list containing command line arguments\n \"\"\"\n\n # This code permits to catch SystemExit with exit code 0\n # such as the one raised when \"--help\" is given as argument\n try:\n # Parsing arguments\n params = parse_args(argv)\n # Actual API\n dataset_gen_pipe(src_dir=params['src_dir'],\n tgt_dir=params['tgt_dir'],\n bbox_dir=params['bbox_dir'],\n mask_dir=params['mask_dir'],\n morphologist_dir=params['morphologist_dir'],\n side=params['side'],\n list_sulci=params['list_sulci'],\n interp=params['interp'],\n number_subjects=params['nb_subjects'],\n resampling=params['resampling'],\n cropping=params['cropping'],\n out_voxel_size=params['out_voxel_size'],\n combine_type=params['combine_type'])\n except SystemExit as exc:\n if exc.code != 0:\n six.reraise(*sys.exc_info())\n\n\n######################################################################\n# Main program\n######################################################################\n\nif __name__ == '__main__':\n # This permits to call main also from another python program\n # without having to make system calls\n main(argv=sys.argv[1:])\n" ]
[ [ "numpy.concatenate", "numpy.array", "numpy.asarray", "numpy.zeros" ] ]
DeppMeng/HRNet-MaskRCNN-Benchmark
[ "63bc0fcf9e98137c0e07c27ee134b67f2b46004d" ]
[ "maskrcnn_benchmark/modeling/backbone/hrnet.py" ]
[ "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport torch.nn as nn\nfrom maskrcnn_benchmark.layers import FrozenBatchNorm2d\nfrom maskrcnn_benchmark.layers import Conv2d\n\n\nclass BasicBlock(nn.Module):\n expansion = 1\n\n def __init__(self, inplanes, planes, stride=1):\n super(BasicBlock, self).__init__()\n self.inplanes = inplanes\n self.planes = planes\n self.conv1 = Conv2d(\n inplanes, planes, kernel_size=3,\n stride=stride, padding=1, bias=False)\n self.bn1 = FrozenBatchNorm2d(planes)\n self.relu = nn.ReLU(inplace=True)\n self.conv2 = Conv2d(\n planes, planes, kernel_size=3,\n stride=stride, padding=1, bias=False)\n self.bn2 = FrozenBatchNorm2d(planes)\n if self.inplanes != self.planes*self.expansion:\n self.downsample = nn.Sequential(\n Conv2d(self.inplanes, self.planes * self.expansion,\n kernel_size=1, stride=stride, bias=False),\n FrozenBatchNorm2d(self.planes * self.expansion),\n )\n\n def forward(self, x):\n residual = x\n\n out = self.conv1(x)\n out = self.bn1(out)\n out = self.relu(out)\n\n out = self.conv2(out)\n out = self.bn2(out)\n\n if self.inplanes != self.planes*self.expansion:\n residual = self.downsample(x)\n\n out = out + residual\n out = self.relu(out)\n\n return out\n\n\nclass Bottleneck(nn.Module):\n expansion = 4\n\n def __init__(self, inplanes, planes, stride=1):\n super(Bottleneck, self).__init__()\n self.inplanes = inplanes\n self.planes = planes\n self.conv1 = Conv2d(inplanes, planes, kernel_size=1, bias=False)\n self.bn1 = FrozenBatchNorm2d(planes)\n self.conv2 = Conv2d(planes, planes, kernel_size=3, stride=stride,\n padding=1, bias=False)\n self.bn2 = FrozenBatchNorm2d(planes)\n self.conv3 = Conv2d(planes, planes * self.expansion, kernel_size=1,\n bias=False)\n self.bn3 = FrozenBatchNorm2d(planes * self.expansion)\n self.relu = nn.ReLU(inplace=True)\n\n if self.inplanes != self.planes*self.expansion:\n self.downsample = nn.Sequential(\n Conv2d(self.inplanes, self.planes * self.expansion,\n kernel_size=1, stride=stride, bias=False),\n FrozenBatchNorm2d(self.planes * self.expansion),\n )\n\n def forward(self, x):\n residual = x\n\n out = self.conv1(x)\n out = self.bn1(out)\n out = self.relu(out)\n\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu(out)\n\n out = self.conv3(out)\n out = self.bn3(out)\n\n if self.inplanes != self.planes*self.expansion:\n residual = self.downsample(x)\n\n out += residual\n out = self.relu(out)\n\n return out\n\n\nclass HighResolutionModule(nn.Module):\n\n def __init__(self, num_branches, blocks, num_blocks, num_inchannels,\n num_channels, fuse_method, multi_scale_output=True):\n super(HighResolutionModule, self).__init__()\n self._check_branches(\n num_branches, blocks, num_blocks, num_inchannels, num_channels)\n\n self.num_inchannels = num_inchannels\n self.fuse_method = fuse_method\n self.num_branches = num_branches\n\n self.multi_scale_output = multi_scale_output\n\n self.branches = self._make_branches(\n num_branches, blocks, num_blocks, num_channels)\n self.fuse_layers = self._make_fuse_layers()\n self.relu = nn.ReLU(True)\n\n def _check_branches(self, num_branches, blocks, num_blocks,\n num_inchannels, num_channels):\n if num_branches != len(num_blocks):\n error_msg = 'NUM_BRANCHES({}) <> NUM_BLOCKS({})'.format(\n num_branches, len(num_blocks))\n raise ValueError(error_msg)\n\n if num_branches != len(num_channels):\n error_msg = 'NUM_BRANCHES({}) <> NUM_CHANNELS({})'.format(\n num_branches, len(num_channels))\n raise ValueError(error_msg)\n\n if num_branches != len(num_inchannels):\n error_msg = 'NUM_BRANCHES({}) <> NUM_INCHANNELS({})'.format(\n num_branches, len(num_inchannels))\n raise ValueError(error_msg)\n\n def _make_one_branch(self, branch_index, block, num_blocks, num_channels,\n stride=1):\n layers = []\n layers.append(block(self.num_inchannels[branch_index],\n num_channels[branch_index], stride))\n self.num_inchannels[branch_index] = \\\n num_channels[branch_index] * block.expansion\n for i in range(1, num_blocks[branch_index]):\n layers.append(block(self.num_inchannels[branch_index],\n num_channels[branch_index]))\n\n return nn.Sequential(*layers)\n\n def _make_branches(self, num_branches, block, num_blocks, num_channels):\n branches = []\n\n for i in range(num_branches):\n branches.append(\n self._make_one_branch(i, block, num_blocks, num_channels))\n\n return nn.ModuleList(branches)\n\n def _make_fuse_layers(self):\n if self.num_branches == 1:\n return None\n\n num_branches = self.num_branches\n num_inchannels = self.num_inchannels\n fuse_layers = []\n for i in range(num_branches if self.multi_scale_output else 1):\n fuse_layer = []\n for j in range(num_branches):\n if j > i:\n fuse_layer.append(nn.Sequential(\n Conv2d(num_inchannels[j], num_inchannels[i], 1, 1, 0, bias=False),\n FrozenBatchNorm2d(num_inchannels[i]),\n nn.Upsample(scale_factor=2**(j-i), mode='nearest')))\n elif j == i:\n fuse_layer.append(None)\n else:\n conv3x3s = []\n for k in range(i-j):\n if k == i - j - 1:\n num_outchannels_conv3x3 = num_inchannels[i]\n conv3x3s.append(nn.Sequential(\n Conv2d(num_inchannels[j], num_outchannels_conv3x3,\n 3, 2, 1, bias=False),\n FrozenBatchNorm2d(num_outchannels_conv3x3)))\n else:\n num_outchannels_conv3x3 = num_inchannels[j]\n conv3x3s.append(nn.Sequential(\n Conv2d(num_inchannels[j], num_outchannels_conv3x3,\n 3, 2, 1, bias=False),\n FrozenBatchNorm2d(num_outchannels_conv3x3),\n nn.ReLU(True)))\n fuse_layer.append(nn.Sequential(*conv3x3s))\n fuse_layers.append(nn.ModuleList(fuse_layer))\n\n return nn.ModuleList(fuse_layers)\n\n def get_num_inchannels(self):\n return self.num_inchannels\n\n def forward(self, x):\n if self.num_branches == 1:\n return [self.branches[0](x[0])]\n\n for i in range(self.num_branches):\n x[i] = self.branches[i](x[i])\n\n x_fuse = []\n for i in range(len(self.fuse_layers)):\n y = x[0] if i == 0 else self.fuse_layers[i][0](x[0])\n for j in range(1, self.num_branches):\n if i == j:\n y = y + x[j]\n else:\n y = y + self.fuse_layers[i][j](x[j])\n x_fuse.append(self.relu(y))\n\n return x_fuse\n\n\nclass HRNet(nn.Module):\n def __init__(self, cfg, **kwargs):\n super(HRNet, self).__init__()\n\n blocks_dict = {\n 'BasicBlockWithFixedBatchNorm': BasicBlock,\n 'BottleneckWithFixedBatchNorm': Bottleneck\n }\n\n self.blocks_dict = blocks_dict\n self.inplanes = 64\n\n # stem net\n self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=2, padding=1,\n bias=False)\n self.bn1 = FrozenBatchNorm2d(64)\n self.conv2 = nn.Conv2d(64, 64, kernel_size=3, stride=2, padding=1,\n bias=False)\n self.bn2 = FrozenBatchNorm2d(64)\n self.relu = nn.ReLU(inplace=True)\n\n self.stage1_cfg = cfg.MODEL.HRNET.STAGE1\n num_channels = self.stage1_cfg['NUM_CHANNELS'][0]\n block = blocks_dict[self.stage1_cfg['BLOCK']]\n num_blocks = self.stage1_cfg['NUM_BLOCKS'][0]\n self.layer1 = self._make_layer(block, 64, num_channels, num_blocks)\n # stage1_out_channel = block.expansion*num_channels\n # self.layer1 = self._make_layer(Bottleneck, self.inplanes, 64, 4)\n\n self.stage2_cfg = cfg.MODEL.HRNET.STAGE2\n num_channels = self.stage2_cfg['NUM_CHANNELS']\n block = blocks_dict[self.stage2_cfg['BLOCK']]\n num_channels = [\n num_channels[i] * block.expansion for i in range(len(num_channels))]\n self.transition1 = self._make_transition_layer([256], num_channels)\n self.stage2, pre_stage_channels = self._make_stage(\n self.stage2_cfg, num_channels)\n\n self.stage3_cfg = cfg.MODEL.HRNET.STAGE3\n num_channels = self.stage3_cfg['NUM_CHANNELS']\n block = blocks_dict[self.stage3_cfg['BLOCK']]\n num_channels = [\n num_channels[i] * block.expansion for i in range(len(num_channels))]\n self.transition2 = self._make_transition_layer(\n pre_stage_channels, num_channels)\n self.stage3, pre_stage_channels = self._make_stage(\n self.stage3_cfg, num_channels)\n\n self.stage4_cfg = cfg.MODEL.HRNET.STAGE4\n num_channels = self.stage4_cfg['NUM_CHANNELS']\n block = blocks_dict[self.stage4_cfg['BLOCK']]\n num_channels = [\n num_channels[i] * block.expansion for i in range(len(num_channels))]\n self.transition3 = self._make_transition_layer(\n pre_stage_channels, num_channels)\n self.stage4, pre_stage_channels = self._make_stage(\n self.stage4_cfg, num_channels, multi_scale_output=self.stage4_cfg.MULTI_OUTPUT)\n\n def _make_transition_layer(\n self, num_channels_pre_layer, num_channels_cur_layer):\n num_branches_cur = len(num_channels_cur_layer)\n num_branches_pre = len(num_channels_pre_layer)\n\n transition_layers = []\n for i in range(num_branches_cur):\n if i < num_branches_pre:\n if num_channels_cur_layer[i] != num_channels_pre_layer[i]:\n transition_layers.append(nn.Sequential(\n Conv2d(num_channels_pre_layer[i],\n num_channels_cur_layer[i],\n 3,\n 1,\n 1,\n bias=False),\n FrozenBatchNorm2d(num_channels_cur_layer[i]),\n nn.ReLU(inplace=True)))\n else:\n transition_layers.append(None)\n else:\n conv3x3s = []\n for j in range(i+1-num_branches_pre):\n inchannels = num_channels_pre_layer[-1]\n outchannels = num_channels_cur_layer[i] \\\n if j == i-num_branches_pre else inchannels\n conv3x3s.append(nn.Sequential(\n Conv2d(\n inchannels, outchannels, 3, 2, 1, bias=False),\n FrozenBatchNorm2d(outchannels),\n nn.ReLU(inplace=True)))\n transition_layers.append(nn.Sequential(*conv3x3s))\n\n return nn.ModuleList(transition_layers)\n\n def _make_layer(self, block, inplanes, planes, blocks, stride=1):\n layers = []\n layers.append(block(inplanes, planes, stride))\n inplanes = planes * block.expansion\n for i in range(1, blocks):\n layers.append(block(inplanes, planes))\n\n return nn.Sequential(*layers)\n\n def _make_stage(self, layer_config, num_inchannels,\n multi_scale_output=True):\n num_modules = layer_config['NUM_MODULES']\n num_branches = layer_config['NUM_BRANCHES']\n num_blocks = layer_config['NUM_BLOCKS']\n num_channels = layer_config['NUM_CHANNELS']\n block = self.blocks_dict[layer_config['BLOCK']]\n fuse_method = layer_config['FUSE_METHOD']\n\n modules = []\n for i in range(num_modules):\n # multi_scale_output is only used last module\n if not multi_scale_output and i == num_modules - 1:\n reset_multi_scale_output = False\n else:\n reset_multi_scale_output = True\n\n modules.append(\n HighResolutionModule(num_branches,\n block,\n num_blocks,\n num_inchannels,\n num_channels,\n fuse_method,\n reset_multi_scale_output)\n )\n num_inchannels = modules[-1].get_num_inchannels()\n\n return nn.Sequential(*modules), num_inchannels\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.bn1(x)\n x = self.relu(x)\n x = self.conv2(x)\n x = self.bn2(x)\n x = self.relu(x)\n x = self.layer1(x)\n\n x_list = []\n for i in range(self.stage2_cfg['NUM_BRANCHES']):\n if self.transition1[i] is not None:\n x_list.append(self.transition1[i](x))\n else:\n x_list.append(x)\n y_list = self.stage2(x_list)\n\n x_list = []\n for i in range(self.stage3_cfg['NUM_BRANCHES']):\n if self.transition2[i] is not None:\n x_list.append(self.transition2[i](y_list[-1]))\n else:\n x_list.append(y_list[i])\n y_list = self.stage3(x_list)\n\n x_list = []\n for i in range(self.stage4_cfg['NUM_BRANCHES']):\n if self.transition3[i] is not None:\n x_list.append(self.transition3[i](y_list[-1]))\n else:\n x_list.append(y_list[i])\n y_list = self.stage4(x_list)\n\n return tuple(y_list)\n" ]
[ [ "torch.nn.ModuleList", "torch.nn.Sequential", "torch.nn.ReLU", "torch.nn.Upsample", "torch.nn.Conv2d" ] ]
rudyn2/tsad
[ "d606fc60c35772bc9052cf4283ea06c64bbd61d3", "d606fc60c35772bc9052cf4283ea06c64bbd61d3" ]
[ "models/ActorCritic.py", "utils/join_datasets.py" ]
[ "import numpy as np\nimport torch.nn\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom typing import Union\n\n__HLCNUMBER_TO_HLC__ = {\n 0: 'right',\n 1: 'left',\n 2: 'straight',\n 3: 'follow_lane'\n}\n\n\nclass TwoLayerMLP(nn.Module):\n\n def __init__(self, input_dim: int, hidden_size: int, output_size: int):\n super(TwoLayerMLP, self).__init__()\n self._input_dim = input_dim\n self._hidden_size = hidden_size\n self._output_size = output_size\n self.fc1 = nn.Linear(self._input_dim, self._hidden_size)\n self.fc2 = nn.Linear(self._hidden_size, self._output_size)\n self.relu = nn.ReLU()\n\n def forward(self, x):\n x = self.fc1(x)\n x = self.relu(x)\n x = self.fc2(x)\n return x\n\n\nclass ThreeLayerMLP(nn.Module):\n\n def __init__(self, input_dim: int, hidden_sizes: tuple, output_size: int):\n super(ThreeLayerMLP, self).__init__()\n self._input_dim = input_dim\n self._hidden_sizes = hidden_sizes\n self._output_size = output_size\n self.fc1 = nn.Linear(self._input_dim, self._hidden_sizes[0])\n self.fc2 = nn.Linear(self._hidden_sizes[0], self._hidden_sizes[1])\n self.fc3 = nn.Linear(self._hidden_sizes[1], self._output_size)\n self.relu = nn.ReLU()\n\n def forward(self, x):\n x = self.fc1(x)\n x = self.relu(x)\n x = self.fc2(x)\n x = self.relu(x)\n x = self.fc3(x)\n return x\n\n\nclass Actor(nn.Module):\n \"\"\"\n Output: a (3,)\n Input: s (1024x4x4)\n \"\"\"\n\n def __init__(self, input_size: int, hidden_size: int, action_dim: int = 2, output_factor: int = 2):\n super(Actor, self).__init__()\n self._device = 'cuda' if torch.cuda.is_available() else 'cpu'\n self._action_dim = action_dim\n self._hidden_size = hidden_size\n\n self.branches = torch.nn.ModuleDict({\n 'left': ThreeLayerMLP(input_size, (hidden_size, hidden_size // 2), self._action_dim * output_factor),\n 'right': ThreeLayerMLP(input_size, (hidden_size, hidden_size // 2), self._action_dim * output_factor),\n 'follow_lane': ThreeLayerMLP(input_size, (hidden_size, hidden_size // 2), self._action_dim * output_factor),\n 'straight': ThreeLayerMLP(input_size, (hidden_size, hidden_size // 2), self._action_dim * output_factor)\n })\n\n def forward(self, obs: Union[list, tuple, dict], hlc):\n\n if isinstance(obs, list) or isinstance(obs, tuple):\n # if the observation is an iterable, then this method is going to be used for TRAINING in a batch-wise\n encoding = torch.stack([torch.tensor(o['encoding'], device=self._device) for o in obs], dim=0).float()\n elif isinstance(obs, dict):\n # if the observation is a dict, then this method is going to be used for ACTION SELECTION\n encoding = torch.tensor(obs['encoding'], device=self._device).unsqueeze(0).float()\n else:\n raise ValueError(f\"Expected input of type list, tuple or dict but got: {type(obs)}\")\n\n # forward\n encoding = torch.flatten(encoding, start_dim=1)\n preds = self.branches[__HLCNUMBER_TO_HLC__[hlc]](encoding)\n return preds\n\n\nclass Critic(nn.Module):\n \"\"\"\n Output: Q(s,a): (1,)\n Input: (s, a); s: (1024x4x4); a: (3,)\n \"\"\"\n\n def __init__(self, hidden_dim: int, action_dim: int):\n super(Critic, self).__init__()\n self._input_channels = 768\n self._device = 'cuda'\n\n input_size = 15 + action_dim\n self.branches = torch.nn.ModuleDict({\n 'left': ThreeLayerMLP(input_size, (hidden_dim, hidden_dim // 2), 1),\n 'right': ThreeLayerMLP(input_size, (hidden_dim, hidden_dim // 2), 1),\n 'follow_lane': ThreeLayerMLP(input_size, (hidden_dim, hidden_dim // 2), 1),\n 'straight': ThreeLayerMLP(input_size, (hidden_dim, hidden_dim // 2), 1)\n })\n\n def forward(self, obs: Union[list, tuple], action: Union[list, tuple, torch.Tensor], hlc: int):\n encoding = torch.stack([torch.tensor(o['encoding'], device=self._device) for o in obs], dim=0).float()\n\n if isinstance(action, list) or isinstance(action, tuple):\n action = torch.stack([torch.tensor(a) for a in action]).to(self._device)\n \n x_action = torch.cat([encoding, action], dim=1)\n\n # forward\n preds = self.branches[__HLCNUMBER_TO_HLC__[hlc]](x_action)\n return preds\n\n\nif __name__ == '__main__':\n # DEPRECATED -->\n batch_size = 8\n sample_input = torch.rand((batch_size, 15))\n sample_speed = torch.rand((batch_size, 2))\n action = torch.tensor(np.random.random((batch_size, 3))).float()\n mse_loss = nn.MSELoss()\n\n critic = Critic(512, 3)\n actor = Actor(512, 3)\n q = critic(sample_input, sample_speed, action, \"right\")\n a = actor(sample_input, sample_speed, \"right\")\n\n expected_q = torch.rand((batch_size, 1))\n expected_a = torch.rand((batch_size, 3))\n a_loss = mse_loss(expected_a, a)\n q_loss = mse_loss(q, expected_q)\n\n q_loss.backward()\n a_loss.backward()\n # <---\n", "\"\"\"\nTool to join hdf5 datasets.\n\"\"\"\nimport h5py\nimport json\nimport argparse\nfrom hdf5_saver import HDF5Saver\nfrom json_saver import JsonSaver\nfrom glob import glob\nimport numpy as np\nfrom tqdm import tqdm\n\n\nclass Merger(object):\n\n def __init__(self, folder_path: str, output_path: str):\n self.path = folder_path\n self.metadata = {}\n self.hdf5_saver = HDF5Saver(288, 288, file_path_to_save=output_path + \".hdf5\")\n self.json_saver = JsonSaver(path=output_path + \".json\")\n self._load_metadata()\n self.total_saved = 0\n self.total_discarded = 0\n\n def _load_metadata(self):\n json_files = glob(self.path + \"*.json\")\n for json_file in json_files:\n with open(json_file, \"r\") as f:\n json_metadata = json.load(f)\n self.metadata.update(json_metadata)\n\n def merge(self):\n # find all hdf5 files in provided path\n hdf5_files = glob(self.path + \"*.hdf5\")\n print(\"The following files will be merged: \")\n for hdf5_file in hdf5_files:\n print(hdf5_file)\n for hdf5_file in hdf5_files:\n with h5py.File(hdf5_file, \"r\") as f:\n self.process_hdf5_file(f)\n\n def process_hdf5_file(self, file: h5py.File):\n \"\"\"\n Save all episodes of provided file\n \"\"\"\n for run_id in tqdm(file.keys(), \"Processing...\"):\n if run_id in self.metadata.keys():\n image_ts = list(file[run_id].keys())\n meta_ts = list(self.metadata[run_id].keys())\n if set(image_ts) == set(meta_ts):\n\n media_data = [{\n \"timestamp\": ts,\n \"rgb\": np.array(file[run_id][ts][\"rgb\"]),\n \"depth\": np.array(file[run_id][ts][\"depth\"]),\n \"semantic\": np.array(file[run_id][ts][\"semantic\"])\n } for ts in image_ts]\n info_data = [{\n \"timestamp\": ts,\n \"metadata\": self.metadata[run_id][ts]\n } for ts in meta_ts]\n\n self.hdf5_saver.save_one_ego_run(run_id, media_data, verbose=False)\n self.json_saver.save_one_ego_run(run_id=run_id, info_data=info_data)\n self.total_saved += 1\n else:\n self.total_discarded += 1\n else:\n self.total_discarded += 1\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\"Merger utility\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n parser.add_argument('datasets_folder', default='../dataset', type=str,\n help='Path to dataset (just name, without extension')\n parser.add_argument('--output', type=str, default=\"merge\", help=\"Output path name (without extensions)\")\n args = parser.parse_args()\n i = Merger(args.datasets_folder, args.output)\n i.merge()\n" ]
[ [ "torch.nn.Linear", "numpy.random.random", "torch.nn.MSELoss", "torch.nn.ReLU" ], [ "numpy.array" ] ]
agacera/udacity_mle_word2vec_recommender
[ "289c9a5754dd3833bfbd3424b888ae7e3b32db23" ]
[ "word2vec_recommender/explorer.py" ]
[ "#AUTOGENERATED! DO NOT EDIT! File to edit: dev/04_explorer.ipynb (unless otherwise specified).\n\n__all__ = ['create_dash_app']\n\n#Cell\nimport dash\nimport dash_core_components as dcc\nimport dash_html_components as html\nfrom dash.dependencies import Input, Output\n\nfrom functools import lru_cache\nfrom pathlib import Path\n\nimport pandas as pd\nimport numpy as np\nimport requests\n\nfrom .core import *\nfrom .recommender import KnnRecommender\nfrom .tmdb import load_api_key, TmdbApi, MovieData\n\n#Cell\ndef create_dash_app(dataset_path: Path, model_path: Path, dash_params: dict = None):\n # load dataframes\n movies_df = pd.read_csv(dataset_path / \"movies.csv\")\n links_df = pd.read_csv(dataset_path / \"links.csv\")\n\n # create TMDB API\n movielens_to_tmdb_lookup = {\n int(movie_id):int(tmdb_id)\n for movie_id, tmdb_id in links_df.set_index(\"movieId\")[\"tmdbId\"].to_dict().items()\n if movie_id > 0 and tmdb_id > 0\n }\n api = TmdbApi(api_key=load_api_key(), movielens_to_tmdb_lookup=movielens_to_tmdb_lookup)\n\n # load model\n with open(model_path / \"embeddings.pkl\", \"rb\") as f:\n embeddings = np.load(f)\n with open(model_path / \"words_index.pkl\", \"rb\") as f:\n word_indexes = np.load(f)\n movie_id_to_index_lookup = {int(movie_id):idx for idx, movie_id in enumerate(word_indexes)}\n knn_recommender = KnnRecommender(\n word_indexes=word_indexes,\n embeddings=embeddings)\n\n if not dash_params:\n dash_params={}\n app = dash.Dash(__name__, **dash_params)\n\n # base layout\n app.layout = html.Div(children=[\n html.H1(children='MovieLens Recommender based on Word2Vec'),\n\n html.Div(children='''\n Dash: A web application framework for Python.\n '''),\n dcc.Dropdown(\n id=\"movie-id\",\n options=movies_df[[\"title\", \"movieId\"]].apply(lambda r: {\"value\": r[\"movieId\"], \"label\": r[\"title\"]} ,axis=1).tolist(),\n ),\n html.Div(id=\"selected-movie-id\", children=\"\")\n ])\n\n def movie_card(movie: MovieData, seed: bool = False) -> html.Div:\n description = f\"{movie.title} (id={movie.movie_id}, tmdb_id={movie.tmdb_id})\"\n return html.Div(children=[\n html.H2(description) if seed else html.H3(description),\n html.Img(src=movie.image_url)\n ])\n\n @app.callback(\n Output(component_id='selected-movie-id', component_property='children'),\n [Input(component_id='movie-id', component_property='value')]\n )\n def update_output_div(movie_id) -> html.Div:\n if not movie_id:\n return html.Div(children=\"No movie selected\")\n print(movie_id)\n movie = api.fetch_movie_data_by_movielens_id(int(movie_id))\n if not movie:\n return html.Div(children=\"Movie not found on TMDB\")\n movie_index = movie_id_to_index_lookup.get(int(movie_id))\n if not movie_index:\n return html.Div(children=\"No embeddings for movie\")\n movie_recs = knn_recommender.recommend_by_index(movie_index) or []\n movies = [ api.fetch_movie_data_by_movielens_id(rec.movie_id) for rec in movie_recs ]\n return html.Div(children=[\n movie_card(movie, seed=True),\n html.Div(children=[\n html.H2(\"Recommendations\"),\n *[movie_card(m) for m in movies]\n ])\n ])\n\n return app" ]
[ [ "pandas.read_csv", "numpy.load" ] ]
eulloa10/work-automation-projects
[ "6a25b4507317e7072c23e2fb8ae6b632174140e9" ]
[ "excel_workbook_automation/revenue/revenue_data.py" ]
[ "import pandas as pd \nimport datetime\n\nbegin_time = datetime.datetime.now()\n\nreport_dl_date = '010621'\nhyphen_mm_yy = '05-21'\nlast_hyphen_mm_yy = '04-21' \nyyyy_mm = '202105'\ncurr_mth_name = 'May'\nmm_dd = '0531'\ncurr_yyyy = '2021'\nhyphen_yyyy_mm = '2021-05'\n\n#US Oracle Reports\ndefrev_us = f'deferred_revenue_us_{report_dl_date}.xlsx'\nrefcur_us = f'current_period_refunds_us_{report_dl_date}.xlsx'\nrefpr_us = f'prior_period_refunds_us_{report_dl_date}.xlsx'\nrev_us = f'revenue_us_{report_dl_date}.xlsx'\ntax_us = f'tax_us_{report_dl_date}.xlsx'\n\n#DE Oracle Reports\ndefrev_de = f'deferred_revenue_de_{report_dl_date}.xlsx'\nrefcur_de = f'current_period_refund_de_{report_dl_date}.xlsx'\nrefpr_de = f'prior_period_refund_de_{report_dl_date}.xlsx'\nrev_de = f'revenue_de_{report_dl_date}.xlsx'\ntax_de = f'tax_de_{report_dl_date}.xlsx'\n\n#US Oracle Report DataFrames\ndf1 = pd.read_excel(defrev_us, header=None)\ndf2 = pd.read_excel(refcur_us, header=None)\ndf3 = pd.read_excel(refpr_us, header=None)\ndf4 = pd.read_excel(rev_us, header=None)\ndf5 = pd.read_excel(tax_us, header=None)\n\n#DE Oracle Report DataFrames\ndf6 = pd.read_excel(defrev_de, header=None)\ndf7 = pd.read_excel(refcur_de, header=None)\ndf8 = pd.read_excel(refpr_de, header=None)\ndf9 = pd.read_excel(rev_de, header=None)\ndf10 = pd.read_excel(tax_de, header=None)\n\n#Splash deferred revenue activity report\nsplash = pd.read_excel('splash_report.xlsx', sheet_name='Data', header=None)\n\n#Wholesale report\nwholesale = pd.read_excel('wholesale.xlsx', headers=True)\n\nwholesale_df_max_row = wholesale.shape[0]\ntransaction_month_formulas = []\norder_month_formulas = []\nshipping_month_formulas = []\namounts_in_gl_formulas = []\nvariance_formulas = []\n\ni = 2\nwhile i <= wholesale_df_max_row + 1:\n transaction_month_formulas.append(f'=MONTH(A{i})')\n order_month_formulas.append(f'=MONTH(D{i})')\n shipping_month_formulas.append(f'=IF(L{i} = \"\", \"\",MONTH(L{i}))')\n amounts_in_gl_formulas.append(f\"=VLOOKUP(Q{i},'Cash and AR Report'!F:R,13,0)\")\n variance_formulas.append(f'=F{i}-R{i}')\n i += 1\n\n\nwholesale['Transaction Month (formula)'] = transaction_month_formulas\nwholesale['Order Month (formula)'] = order_month_formulas\nwholesale['Shipping Month (formula)'] = shipping_month_formulas\nwholesale[f'AR Data for {curr_mth_name} Orders'] = ''\nwholesale['Amounts in GL'] = amounts_in_gl_formulas\nwholesale['Variance between GL and S6 data'] = variance_formulas\n\n\n\n# Adjustment DataFrames\norders_cancelled_not_cancelled_sheets = ['Ref Adj for Cancelled Orders', 'Ref Adj for Orders Not Cancel ']\n\norder_cancelled_after_eom = pd.read_excel(fr'revenue_adjustments\\{hyphen_yyyy_mm} {curr_mth_name}_OrderCancelledAfter{mm_dd}.xlsx', sheet_name=f'{yyyy_mm}_OrderCancelledAfter_{curr_yyyy}', header=None)\n\norders_cancelled_not_cancelled = pd.read_excel(fr'revenue_adjustments\\{hyphen_yyyy_mm} Refunds Adj for Orders Cancelled and not Cancelled.xlsx', sheet_name=['Ref Adj for Cancelled Orders', 'Ref Adj for Orders Not Cancel '], header=None)\n\nduplicate_refunds = pd.read_excel(fr'revenue_adjustments\\{hyphen_yyyy_mm} s6_refund_report_ - Duplicate refund amounts.xlsx', sheet_name='Summary', header=None)\n\nshipping_adjustment = pd.read_excel(fr'revenue_adjustments\\{hyphen_yyyy_mm} Shipping Adjustment.xlsx', sheet_name='Summary', header=None)\n\nstripe_chargebacks = pd.read_excel(fr'revenue_adjustments\\S6 Stripe Chargebacks {hyphen_mm_yy}.xlsx', sheet_name='stripe chargebacks only', header=None)\n\nstripe_unmatched_refunds = pd.read_excel(fr'revenue_adjustments\\Tableau Stripe Unmatched Refunds {hyphen_mm_yy}.xlsx', sheet_name='Summary', header=None)\n\n# Journal Entry DataFrames\ns6_wb_sheet_names = ['JE', 'Cash','Prior Period Ref Calc US', 'Prior Period Ref Calc DE',\n 'Curr Period Ref Calc US', 'Curr Period Ref Calc DE', 'Wholesale Order Tax',\n f'{last_hyphen_mm_yy} GiftCard', 'Wholesale', 'Screenshot support', 'SOX Control Notes']\n\ns6_workbook = pd.read_excel(fr'\\revenue_workbook\\110 - 223180 S6 Deferred Rev {hyphen_mm_yy}.xlsx', sheet_name=None, header=None)\n\n\n# Write the file to the directory with the new report generated \nwith pd.ExcelWriter('s6_revenue_template_v1.xlsx', \n date_format='MM/DD/YYYY',\n datetime_format='MM/DD/YYYY') as writer: \n for sheet in s6_wb_sheet_names[:2]:\n s6_workbook[f'{sheet}'].to_excel(writer, sheet_name=f'{sheet}', index=False, header=False)\n \n splash.to_excel(writer, sheet_name = 'Cash and AR Report', index=False, header=False) \n df1.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Def Revenue US', index= False, header=False)\n df6.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Def Revenue DE', index= False, header=False)\n df4.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Revenue US', index= False, header=False)\n df9.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Revenue DE', index= False, header=False)\n df3.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Prior Period Refund US', index= False, header=False)\n df8.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Prior Period Refund DE', index= False, header=False)\n df2.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Curr Per Partial Ref US', index= False, header=False)\n df7.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Curr Per Partial Ref DE', index= False, header=False)\n\n for sheet in s6_wb_sheet_names[2:6]:\n s6_workbook[f'{sheet}'].to_excel(writer, sheet_name=f'{sheet}', index=False, header=False)\n\n df5.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Tax US', index= False, header=False)\n df10.to_excel(writer, sheet_name=f'{hyphen_mm_yy} Tax DE', index= False, header=False)\n\n for sheet in s6_wb_sheet_names[6:]: \n if sheet == f'{last_hyphen_mm_yy} GiftCard':\n s6_workbook[f'{sheet}'].to_excel(writer, sheet_name=f'{hyphen_mm_yy} GiftCard', index=False, header=False)\n elif sheet == 'Wholesale':\n wholesale.to_excel(writer, sheet_name='Wholesale', index=False, header=True)\n else: \n s6_workbook[f'{sheet}'].to_excel(writer, sheet_name=f'{sheet}', index=False, header=False)\n\n for sheet in orders_cancelled_not_cancelled_sheets:\n orders_cancelled_not_cancelled[f'{sheet}'].to_excel(writer, sheet_name=f'{sheet}', index= False, header=False)\n \n duplicate_refunds.to_excel(writer, sheet_name='Dup Refunds', index= False, header=False)\n shipping_adjustment.to_excel(writer, sheet_name='Shipping Adj', index= False, header=False)\n stripe_chargebacks.to_excel(writer, sheet_name='Stripe Chargebacks', index= False, header=False)\n stripe_unmatched_refunds.to_excel(writer, sheet_name='Stripe Unmatched Ref', index= False, header=False)\n order_cancelled_after_eom.to_excel(writer, sheet_name='Order Canc EOM', index= False, header=False)\n\nprint(\"Revenue Workbook Step 1 Completed: \", datetime.datetime.now() - begin_time)\n" ]
[ [ "pandas.read_excel", "pandas.ExcelWriter" ] ]
HEPcodes/MG5_aMC
[ "aeb96510d1cf204bfd4e753569a8cc7becb62275" ]
[ "madgraph/various/process_checks.py" ]
[ "################################################################################\n#\n# Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors\n#\n# This file is a part of the MadGraph5_aMC@NLO project, an application which \n# automatically generates Feynman diagrams and matrix elements for arbitrary\n# high-energy processes in the Standard Model and beyond.\n#\n# It is subject to the MadGraph5_aMC@NLO license which should accompany this \n# distribution.\n#\n# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch\n#\n################################################################################\n\"\"\"Several different checks for processes (and hence models):\npermutation tests, gauge invariance tests, lorentz invariance\ntests. Also class for evaluation of Python matrix elements,\nMatrixElementEvaluator.\"\"\"\n\nfrom __future__ import division\n\nfrom __future__ import absolute_import\nimport array\nimport copy\nimport fractions\nimport itertools\nimport logging\nimport math\nimport os\nimport sys\nimport re\nimport shutil\nimport random\nimport glob\nimport re\nimport subprocess\nimport time\nimport datetime\nimport errno\nimport pickle\n# If psutil becomes standard, the RAM check can be performed with it instead\n#import psutil\n\nimport aloha\nimport aloha.aloha_writers as aloha_writers\nimport aloha.create_aloha as create_aloha\n\nimport madgraph.iolibs.export_python as export_python\nimport madgraph.iolibs.helas_call_writers as helas_call_writers\nimport models.import_ufo as import_ufo\nimport madgraph.iolibs.save_load_object as save_load_object\nimport madgraph.iolibs.file_writers as writers\n\nimport madgraph.core.base_objects as base_objects\nimport madgraph.core.color_algebra as color\nimport madgraph.core.color_amp as color_amp\nimport madgraph.core.helas_objects as helas_objects\nimport madgraph.core.diagram_generation as diagram_generation\n\nimport madgraph.various.rambo as rambo\nimport madgraph.various.misc as misc\nimport madgraph.various.progressbar as pbar\nimport madgraph.various.banner as bannermod\nimport madgraph.various.progressbar as pbar\n\nimport madgraph.loop.loop_diagram_generation as loop_diagram_generation\nimport madgraph.loop.loop_helas_objects as loop_helas_objects\nimport madgraph.loop.loop_base_objects as loop_base_objects\nimport models.check_param_card as check_param_card\n\nfrom madgraph.interface.madevent_interface import MadLoopInitializer\nfrom madgraph.interface.common_run_interface import AskforEditCard\nfrom madgraph import MG5DIR, InvalidCmd, MadGraph5Error\n\nfrom madgraph.iolibs.files import cp\n\nimport models.model_reader as model_reader\nimport aloha.template_files.wavefunctions as wavefunctions\nfrom aloha.template_files.wavefunctions import \\\n ixxxxx, oxxxxx, vxxxxx, sxxxxx, txxxxx, irxxxx, orxxxx\nimport six\nStringIO = six\nfrom six.moves import range\nfrom six.moves import zip\nimport io\nif six.PY3:\n file = io.FileIO\n\n\nADDED_GLOBAL = []\n\ntemp_dir_prefix = \"TMP_CHECK\"\n\npjoin = os.path.join\n\ndef clean_added_globals(to_clean):\n for value in list(to_clean):\n del globals()[value]\n to_clean.remove(value)\n\n#===============================================================================\n# Fake interface to be instancied when using process_checks from tests instead.\n#===============================================================================\nclass FakeInterface(object):\n \"\"\" Just an 'option container' to mimick the interface which is passed to the\n tests. We put in only what is now used from interface by the test:\n cmd.options['fortran_compiler']\n cmd.options['complex_mass_scheme']\n cmd._mgme_dir\"\"\"\n def __init__(self, mgme_dir = \"\", complex_mass_scheme = False,\n fortran_compiler = 'gfortran' ):\n self._mgme_dir = mgme_dir\n self.options = {}\n self.options['complex_mass_scheme']=complex_mass_scheme\n self.options['fortran_compiler']=fortran_compiler\n\n#===============================================================================\n# Logger for process_checks\n#===============================================================================\n\nlogger = logging.getLogger('madgraph.various.process_checks')\n\n\n# Helper function to boost momentum\ndef boost_momenta(p, boost_direction=1, beta=0.5):\n \"\"\"boost the set momenta in the 'boost direction' by the 'beta' \n factor\"\"\"\n \n boost_p = [] \n gamma = 1/ math.sqrt(1 - beta**2)\n for imp in p:\n bosst_p = imp[boost_direction]\n E, px, py, pz = imp\n boost_imp = []\n # Energy:\n boost_imp.append(gamma * E - gamma * beta * bosst_p)\n # PX\n if boost_direction == 1:\n boost_imp.append(-gamma * beta * E + gamma * px)\n else: \n boost_imp.append(px)\n # PY\n if boost_direction == 2:\n boost_imp.append(-gamma * beta * E + gamma * py)\n else: \n boost_imp.append(py) \n # PZ\n if boost_direction == 3:\n boost_imp.append(-gamma * beta * E + gamma * pz)\n else: \n boost_imp.append(pz) \n #Add the momenta to the list\n boost_p.append(boost_imp) \n \n return boost_p\n\n#===============================================================================\n# Helper class MatrixElementEvaluator\n#===============================================================================\nclass MatrixElementEvaluator(object):\n \"\"\"Class taking care of matrix element evaluation, storing\n relevant quantities for speedup.\"\"\"\n\n def __init__(self, model , param_card = None,\n auth_skipping = False, reuse = True, cmd = FakeInterface()):\n \"\"\"Initialize object with stored_quantities, helas_writer,\n model, etc.\n auth_skipping = True means that any identical matrix element will be\n evaluated only once\n reuse = True means that the matrix element corresponding to a\n given process can be reused (turn off if you are using\n different models for the same process)\"\"\"\n \n self.cmd = cmd\n \n # Writer for the Python matrix elements\n self.helas_writer = helas_call_writers.PythonUFOHelasCallWriter(model)\n \n # Read a param_card and calculate couplings\n self.full_model = model_reader.ModelReader(model)\n try:\n self.full_model.set_parameters_and_couplings(param_card)\n except MadGraph5Error:\n if isinstance(param_card, (str,file)):\n raise\n logger.warning('param_card present in the event file not compatible.'+\n ' We will use the default one.')\n self.full_model.set_parameters_and_couplings()\n \n self.auth_skipping = auth_skipping\n self.reuse = reuse\n self.cmass_scheme = cmd.options['complex_mass_scheme']\n self.store_aloha = []\n self.stored_quantities = {}\n \n #===============================================================================\n # Helper function evaluate_matrix_element\n #===============================================================================\n def evaluate_matrix_element(self, matrix_element, p=None, full_model=None, \n gauge_check=False, auth_skipping=None, output='m2',\n options=None):\n \"\"\"Calculate the matrix element and evaluate it for a phase space point\n output is either m2, amp, jamp\n \"\"\"\n\n if full_model:\n self.full_model = full_model\n process = matrix_element.get('processes')[0]\n model = process.get('model')\n\n\n if \"matrix_elements\" not in self.stored_quantities:\n self.stored_quantities['matrix_elements'] = []\n matrix_methods = {}\n\n if self.reuse and \"Matrix_%s\" % process.shell_string() in globals() and p:\n if matrix_element not in self.stored_quantities['matrix_elements']:\n self.stored_quantities['matrix_elements'].append(matrix_element)\n # Evaluate the matrix element for the momenta p\n matrix = eval(\"Matrix_%s()\" % process.shell_string(), globals())\n me_value = matrix.smatrix(p, self.full_model)\n if output == \"m2\":\n return matrix.smatrix(p, self.full_model), matrix.amp2\n else:\n m2 = matrix.smatrix(p, self.full_model)\n return {'m2': m2, output:getattr(matrix, output)}\n\n if (auth_skipping or self.auth_skipping) and matrix_element in \\\n self.stored_quantities['matrix_elements']:\n # Exactly the same matrix element has been tested\n logger.info(\"Skipping %s, \" % process.nice_string() + \\\n \"identical matrix element already tested\" \\\n )\n return None\n\n\n self.stored_quantities['matrix_elements'].append(matrix_element)\n # Create an empty color basis, and the list of raw\n # colorize objects (before simplification) associated\n # with amplitude\n if \"list_colorize\" not in self.stored_quantities:\n self.stored_quantities[\"list_colorize\"] = []\n if \"list_color_basis\" not in self.stored_quantities:\n self.stored_quantities[\"list_color_basis\"] = []\n if \"list_color_matrices\" not in self.stored_quantities:\n self.stored_quantities[\"list_color_matrices\"] = [] \n\n col_basis = color_amp.ColorBasis()\n new_amp = matrix_element.get_base_amplitude()\n matrix_element.set('base_amplitude', new_amp)\n colorize_obj = col_basis.create_color_dict_list(new_amp)\n\n try:\n # If the color configuration of the ME has\n # already been considered before, recycle\n # the information\n col_index = self.stored_quantities[\"list_colorize\"].index(colorize_obj)\n except ValueError:\n # If not, create color basis and color\n # matrix accordingly\n self.stored_quantities['list_colorize'].append(colorize_obj)\n col_basis.build()\n self.stored_quantities['list_color_basis'].append(col_basis)\n col_matrix = color_amp.ColorMatrix(col_basis)\n self.stored_quantities['list_color_matrices'].append(col_matrix)\n col_index = -1\n\n # Set the color for the matrix element\n matrix_element.set('color_basis',\n self.stored_quantities['list_color_basis'][col_index])\n matrix_element.set('color_matrix',\n self.stored_quantities['list_color_matrices'][col_index])\n\n # Create the needed aloha routines\n if \"used_lorentz\" not in self.stored_quantities:\n self.stored_quantities[\"used_lorentz\"] = []\n\n me_used_lorentz = set(matrix_element.get_used_lorentz())\n me_used_lorentz = [lorentz for lorentz in me_used_lorentz \\\n if lorentz not in self.store_aloha]\n\n aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath'))\n aloha_model.add_Lorentz_object(model.get('lorentz'))\n aloha_model.compute_subset(me_used_lorentz)\n\n # Write out the routines in Python\n aloha_routines = []\n for routine in aloha_model.values():\n aloha_routines.append(routine.write(output_dir = None, \n mode='mg5',\n language = 'Python'))\n for routine in aloha_model.external_routines:\n for path in aloha_model.locate_external(routine, 'Python'):\n aloha_routines.append(open(path).read())\n\n # Define the routines to be available globally\n previous_globals = list(globals().keys())\n for routine in aloha_routines:\n exec(routine, globals())\n for key in globals().keys():\n if key not in previous_globals:\n ADDED_GLOBAL.append(key)\n\n # Add the defined Aloha routines to used_lorentz\n self.store_aloha.extend(me_used_lorentz)\n # Export the matrix element to Python calls\n exporter = export_python.ProcessExporterPython(matrix_element,\n self.helas_writer)\n try:\n matrix_methods = exporter.get_python_matrix_methods(\\\n gauge_check=gauge_check)\n# print \"I got matrix_methods=\",str(matrix_methods.items()[0][1])\n except helas_call_writers.HelasWriterError as error:\n logger.info(error)\n return None\n # If one wants to output the python code generated for the computation\n # of these matrix elements, it is possible to run the following cmd\n# open('output_path','w').write(matrix_methods[process.shell_string()])\n if self.reuse:\n # Define the routines (globally)\n exec(matrix_methods[process.shell_string()], globals())\t \n ADDED_GLOBAL.append('Matrix_%s' % process.shell_string())\n else:\n # Define the routines (locally is enough)\n exec(matrix_methods[process.shell_string()], globals())\n # Generate phase space point to use\n if not p:\n p, w_rambo = self.get_momenta(process, options)\n # Evaluate the matrix element for the momenta p\n exec(\"data = Matrix_%s()\" % process.shell_string(), globals())\n if output == \"m2\":\n return data.smatrix(p, self.full_model), data.amp2\n else:\n m2 = data.smatrix(p,self.full_model)\n return {'m2': m2, output:getattr(data, output)}\n \n @staticmethod\n def pass_isolation_cuts(pmoms, ptcut=50.0, drcut=0.5):\n \"\"\" Check whether the specified kinematic point passes isolation cuts\n \"\"\"\n\n def Pt(pmom):\n \"\"\" Computes the pt of a 4-momentum\"\"\"\n return math.sqrt(pmom[1]**2+pmom[2]**2)\n\n def DeltaR(p1,p2):\n \"\"\" Computes the DeltaR between two 4-momenta\"\"\"\n # First compute pseudo-rapidities\n p1_vec=math.sqrt(p1[1]**2+p1[2]**2+p1[3]**2)\n p2_vec=math.sqrt(p2[1]**2+p2[2]**2+p2[3]**2) \n eta1=0.5*math.log((p1_vec+p1[3])/(p1_vec-p1[3]))\n eta2=0.5*math.log((p2_vec+p2[3])/(p2_vec-p2[3]))\n # Then azimutal angle phi\n phi1=math.atan2(p1[2],p1[1])\n phi2=math.atan2(p2[2],p2[1])\n dphi=abs(phi2-phi1)\n # Take the wraparound factor into account\n dphi=abs(abs(dphi-math.pi)-math.pi)\n # Now return deltaR\n return math.sqrt(dphi**2+(eta2-eta1)**2)\n\n for i, pmom in enumerate(pmoms[2:]):\n # Pt > 50 GeV\n if Pt(pmom)<ptcut:\n return False\n # Delta_R ij > 0.5\n for pmom2 in pmoms[3+i:]:\n if DeltaR(pmom,pmom2)<drcut:\n return False\n return True\n \n #===============================================================================\n # Helper function get_momenta\n #===============================================================================\n def get_momenta(self, process, options=None, special_mass=None):\n \"\"\"Get a point in phase space for the external states in the given\n process, with the CM energy given. The incoming particles are\n assumed to be oriented along the z axis, with particle 1 along the\n positive z axis.\n For the CMS check, one must be able to chose the mass of the special\n resonance particle with id = -1, and the special_mass option allows\n to specify it.\"\"\"\n\n if not options:\n energy=1000\n events=None\n else:\n energy = options['energy']\n events = options['events']\n to_skip = options['skip_evt']\n \n if not (isinstance(process, base_objects.Process) and \\\n isinstance(energy, (float,int))):\n raise rambo.RAMBOError(\"Not correct type for arguments to get_momenta\")\n\n\n sorted_legs = sorted(process.get('legs'), key=lambda l: l.get('number')) \n\n # If an events file is given use it for getting the momentum\n if events:\n ids = [l.get('id') for l in sorted_legs]\n import MadSpin.decay as madspin\n if not hasattr(self, 'event_file'):\n fsock = open(events)\n self.event_file = madspin.Event(fsock)\n\n skip = 0\n while self.event_file.get_next_event() != 'no_event':\n event = self.event_file.particle\n #check if the event is compatible\n event_ids = [p['pid'] for p in event.values()]\n if event_ids == ids:\n skip += 1\n if skip > to_skip:\n break\n else:\n raise MadGraph5Error('No compatible events for %s' % ids)\n p = []\n for part in event.values():\n m = part['momentum']\n p.append([m.E, m.px, m.py, m.pz])\n return p, 1\n\n nincoming = len([leg for leg in sorted_legs if leg.get('state') == False])\n nfinal = len(sorted_legs) - nincoming\n\n # Find masses of particles\n mass = []\n for l in sorted_legs:\n if l.get('id') != 0:\n mass_string = self.full_model.get_particle(l.get('id')).get('mass') \n mass.append(self.full_model.get('parameter_dict')[mass_string].real)\n else:\n if isinstance(special_mass, float):\n mass.append(special_mass)\n else:\n raise Exception(\"A 'special_mass' option must be specified\"+\\\n \" in get_momenta when a leg with id=-10 is present (for CMS check)\")\n #mass = [math.sqrt(m.real) for m in mass]\n\n\n\n # Make sure energy is large enough for incoming and outgoing particles,\n# # Keep the special_mass case separate to be sure that nothing interferes\n# # with the regular usage of get_momenta.\n# if not (any(l.get('id')==0 for l in sorted_legs) and \\\n# isinstance(special_mass, float)):\n energy = max(energy, sum(mass[:nincoming])*1.2,sum(mass[nincoming:])*1.2)\n# else:\n# incoming_mass = sum([mass[i] for i, leg in enumerate(sorted_legs) \\\n# if leg.get('state') == False and leg.get('id')!=0])\n# outcoming_mass = sum([mass[i] for i, leg in enumerate(sorted_legs) \\\n# if leg.get('state') == True and leg.get('id')!=0])\n# energy = max(energy, incoming_mass*1.2, outcoming_mass*1.2)\n\n if nfinal == 1:\n p = []\n energy = mass[-1]\n p.append([energy/2,0,0,energy/2])\n p.append([energy/2,0,0,-energy/2])\n p.append([mass[-1],0,0,0])\n return p, 1.0\n\n e2 = energy**2\n m1 = mass[0]\n p = []\n\n masses = rambo.FortranList(nfinal)\n for i in range(nfinal):\n masses[i+1] = mass[nincoming + i]\n\n if nincoming == 1:\n # Momenta for the incoming particle\n p.append([abs(m1), 0., 0., 0.])\n p_rambo, w_rambo = rambo.RAMBO(nfinal, abs(m1), masses)\n # Reorder momenta from px,py,pz,E to E,px,py,pz scheme\n for i in range(1, nfinal+1):\n momi = [p_rambo[(4,i)], p_rambo[(1,i)],\n p_rambo[(2,i)], p_rambo[(3,i)]]\n p.append(momi)\n\n return p, w_rambo\n\n if nincoming != 2:\n raise rambo.RAMBOError('Need 1 or 2 incoming particles')\n\n if nfinal == 1:\n energy = masses[1]\n if masses[1] == 0.0:\n raise rambo.RAMBOError('The kinematic 2 > 1 with the final'+\\\n ' state particle massless is invalid')\n\n e2 = energy**2\n m2 = mass[1]\n\n mom = math.sqrt((e2**2 - 2*e2*m1**2 + m1**4 - 2*e2*m2**2 - \\\n 2*m1**2*m2**2 + m2**4) / (4*e2))\n e1 = math.sqrt(mom**2+m1**2)\n e2 = math.sqrt(mom**2+m2**2)\n # Set momenta for incoming particles\n p.append([e1, 0., 0., mom])\n p.append([e2, 0., 0., -mom])\n\n if nfinal == 1:\n p.append([energy, 0., 0., 0.])\n return p, 1.\n\n p_rambo, w_rambo = rambo.RAMBO(nfinal, energy, masses)\n\n # Reorder momenta from px,py,pz,E to E,px,py,pz scheme\n for i in range(1, nfinal+1):\n momi = [p_rambo[(4,i)], p_rambo[(1,i)],\n p_rambo[(2,i)], p_rambo[(3,i)]]\n p.append(momi)\n\n return p, w_rambo\n\n#===============================================================================\n# Helper class LoopMatrixElementEvaluator\n#===============================================================================\n\nclass LoopMatrixElementEvaluator(MatrixElementEvaluator):\n \"\"\"Class taking care of matrix element evaluation for loop processes.\"\"\"\n\n def __init__(self,cuttools_dir=None, output_path=None, tir_dir={}, \n cmd=FakeInterface(),*args,**kwargs):\n \"\"\"Allow for initializing the MG5 root where the temporary fortran\n output for checks is placed.\"\"\"\n \n super(LoopMatrixElementEvaluator,self).__init__(*args,cmd=cmd,**kwargs)\n\n self.mg_root=self.cmd._mgme_dir\n # If no specific output path is specified, then write in MG5 root directory\n if output_path is None:\n self.output_path = self.cmd._mgme_dir\n else:\n self.output_path = output_path\n \n self.cuttools_dir=cuttools_dir\n self.tir_dir=tir_dir\n self.loop_optimized_output = cmd.options['loop_optimized_output']\n # Set proliferate to true if you want to keep the produced directories\n # and eventually reuse them if possible\n self.proliferate=True\n \n #===============================================================================\n # Helper function evaluate_matrix_element for loops\n #===============================================================================\n def evaluate_matrix_element(self, matrix_element, p=None, options=None,\n gauge_check=False, auth_skipping=None, output='m2', \n PS_name = None, MLOptions={}):\n \"\"\"Calculate the matrix element and evaluate it for a phase space point\n Output can only be 'm2. The 'jamp' and 'amp' returned values are just\n empty lists at this point.\n If PS_name is not none the written out PS.input will be saved in \n the file PS.input_<PS_name> as well.\"\"\"\n\n process = matrix_element.get('processes')[0]\n model = process.get('model')\n \n if options and 'split_orders' in list(options.keys()):\n split_orders = options['split_orders']\n else:\n split_orders = -1\n \n if \"loop_matrix_elements\" not in self.stored_quantities:\n self.stored_quantities['loop_matrix_elements'] = []\n\n if (auth_skipping or self.auth_skipping) and matrix_element in \\\n [el[0] for el in self.stored_quantities['loop_matrix_elements']]:\n # Exactly the same matrix element has been tested\n logger.info(\"Skipping %s, \" % process.nice_string() + \\\n \"identical matrix element already tested\" )\n return None\n\n # Generate phase space point to use\n if not p:\n p, w_rambo = self.get_momenta(process, options=options)\n \n if matrix_element in [el[0] for el in \\\n self.stored_quantities['loop_matrix_elements']]: \n export_dir=self.stored_quantities['loop_matrix_elements'][\\\n [el[0] for el in self.stored_quantities['loop_matrix_elements']\\\n ].index(matrix_element)][1]\n logger.debug(\"Reusing generated output %s\"%str(export_dir))\n else: \n export_dir=pjoin(self.output_path,temp_dir_prefix)\n if os.path.isdir(export_dir):\n if not self.proliferate:\n raise InvalidCmd(\"The directory %s already exist. Please remove it.\"%str(export_dir))\n else:\n id=1\n while os.path.isdir(pjoin(self.output_path,\\\n '%s_%i'%(temp_dir_prefix,id))):\n id+=1\n export_dir=pjoin(self.output_path,'%s_%i'%(temp_dir_prefix,id))\n \n if self.proliferate:\n self.stored_quantities['loop_matrix_elements'].append(\\\n (matrix_element,export_dir))\n\n # I do the import here because there is some cyclic import of export_v4\n # otherwise\n import madgraph.loop.loop_exporters as loop_exporters\n if self.loop_optimized_output:\n exporter_class=loop_exporters.LoopProcessOptimizedExporterFortranSA\n else:\n exporter_class=loop_exporters.LoopProcessExporterFortranSA\n \n MLoptions = {'clean': True, \n 'complex_mass': self.cmass_scheme,\n 'export_format':'madloop', \n 'mp':True,\n 'SubProc_prefix':'P',\n 'compute_color_flows': not process.get('has_born'),\n 'loop_dir': pjoin(self.mg_root,'Template','loop_material'),\n 'cuttools_dir': self.cuttools_dir,\n 'fortran_compiler': self.cmd.options['fortran_compiler'],\n 'output_dependencies': self.cmd.options['output_dependencies']}\n\n MLoptions.update(self.tir_dir)\n \n FortranExporter = exporter_class(export_dir, MLoptions)\n FortranModel = helas_call_writers.FortranUFOHelasCallWriter(model)\n FortranExporter.copy_template(model)\n FortranExporter.generate_subprocess_directory(matrix_element, FortranModel)\n wanted_lorentz = list(set(matrix_element.get_used_lorentz()))\n wanted_couplings = list(set([c for l in matrix_element.get_used_couplings() \\\n for c in l]))\n FortranExporter.convert_model(model,wanted_lorentz,wanted_couplings)\n FortranExporter.finalize(matrix_element,\"\",self.cmd.options, ['nojpeg'])\n\n MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),\n split_orders=split_orders)\n\n self.fix_MadLoopParamCard(pjoin(export_dir,'Cards'),\n mp = gauge_check and self.loop_optimized_output, MLOptions=MLOptions)\n \n if gauge_check:\n file_path, orig_file_content, new_file_content = \\\n self.setup_ward_check(pjoin(export_dir,'SubProcesses'), \n ['helas_calls_ampb_1.f','loop_matrix.f'])\n file = open(file_path,'w')\n file.write(new_file_content)\n file.close()\n if self.loop_optimized_output:\n mp_file_path, mp_orig_file_content, mp_new_file_content = \\\n self.setup_ward_check(pjoin(export_dir,'SubProcesses'), \n ['mp_helas_calls_ampb_1.f','mp_compute_loop_coefs.f'],mp=True)\n mp_file = open(mp_file_path,'w')\n mp_file.write(mp_new_file_content)\n mp_file.close()\n \n # Evaluate the matrix element for the momenta p\n finite_m2 = self.get_me_value(process.shell_string_v4(), 0,\\\n export_dir, p, PS_name = PS_name, verbose=False)[0][0]\n\n # Restore the original loop_matrix.f code so that it could be reused\n if gauge_check:\n file = open(file_path,'w')\n file.write(orig_file_content)\n file.close()\n if self.loop_optimized_output:\n mp_file = open(mp_file_path,'w')\n mp_file.write(mp_orig_file_content)\n mp_file.close()\n \n # Now erase the output directory\n if not self.proliferate:\n shutil.rmtree(export_dir)\n \n if output == \"m2\": \n # We do not provide details (i.e. amps and Jamps) of the computed \n # amplitudes, hence the []\n return finite_m2, []\n else:\n return {'m2': finite_m2, output:[]}\n\n def fix_MadLoopParamCard(self,dir_name, mp=False, loop_filter=False,\n DoubleCheckHelicityFilter=False, MLOptions={}):\n \"\"\" Set parameters in MadLoopParams.dat suited for these checks.MP\n stands for multiple precision and can either be a bool or an integer\n to specify the mode.\"\"\"\n\n # Instanciate a MadLoopParam card\n file = open(pjoin(dir_name,'MadLoopParams.dat'), 'r')\n MLCard = bannermod.MadLoopParam(file)\n\n if isinstance(mp,bool):\n mode = 4 if mp else 1\n else:\n mode = mp\n\n for key, value in MLOptions.items():\n if key == \"MLReductionLib\":\n if isinstance(value, int):\n ml_reds = str(value)\n if isinstance(value,list):\n if len(value)==0:\n ml_reds = '1'\n else:\n ml_reds=\"|\".join([str(vl) for vl in value])\n elif isinstance(value, str):\n ml_reds = value\n elif isinstance(value, int):\n ml_reds = str(value)\n else:\n raise MadGraph5Error('The argument %s '%str(value)+\\\n ' in fix_MadLoopParamCard must be a string, integer'+\\\n ' or a list.')\n MLCard.set(\"MLReductionLib\",ml_reds) \n elif key == 'ImprovePS':\n MLCard.set('ImprovePSPoint',2 if value else -1)\n elif key == 'ForceMP':\n mode = 4\n elif key in MLCard:\n MLCard.set(key,value)\n else:\n raise Exception('The MadLoop options %s specified in function'%key+\\\n ' fix_MadLoopParamCard does not correspond to an option defined'+\\\n ' MadLoop nor is it specially handled in this function.')\n if not mode is None:\n MLCard.set('CTModeRun',mode)\n MLCard.set('CTModeInit',mode)\n MLCard.set('UseLoopFilter',loop_filter)\n MLCard.set('DoubleCheckHelicityFilter',DoubleCheckHelicityFilter)\n \n MLCard.write(pjoin(dir_name,os.pardir,'SubProcesses','MadLoopParams.dat'))\n\n @classmethod\n def get_me_value(cls, proc, proc_id, working_dir, PSpoint=[], PS_name = None,\n verbose=True, format='tuple', skip_compilation=False):\n \"\"\"Compile and run ./check, then parse the output and return the result\n for process with id = proc_id and PSpoint if specified.\n If PS_name is not none the written out PS.input will be saved in \n the file PS.input_<PS_name> as well\"\"\" \n if verbose:\n sys.stdout.write('.')\n sys.stdout.flush()\n \n shell_name = None\n directories = misc.glob('P%i_*' % proc_id, pjoin(working_dir, 'SubProcesses'))\n if directories and os.path.isdir(directories[0]):\n shell_name = os.path.basename(directories[0])\n\n # If directory doesn't exist, skip and return 0\n if not shell_name:\n logging.info(\"Directory hasn't been created for process %s: %s\", proc, directories)\n return ((0.0, 0.0, 0.0, 0.0, 0), [])\n\n if verbose: logging.debug(\"Working on process %s in dir %s\" % (proc, shell_name))\n \n dir_name = pjoin(working_dir, 'SubProcesses', shell_name)\n if not skip_compilation:\n # Make sure to recreate the executable and modified sources\n if os.path.isfile(pjoin(dir_name,'check')):\n os.remove(pjoin(dir_name,'check'))\n try:\n os.remove(pjoin(dir_name,'check_sa.o'))\n os.remove(pjoin(dir_name,'loop_matrix.o'))\n except OSError:\n pass\n # Now run make\n devnull = open(os.devnull, 'w')\n retcode = subprocess.call(['make','check'],\n cwd=dir_name, stdout=devnull, stderr=devnull)\n devnull.close()\n \n if retcode != 0:\n logging.info(\"Error while executing make in %s\" % shell_name)\n return ((0.0, 0.0, 0.0, 0.0, 0), [])\n\n # If a PS point is specified, write out the corresponding PS.input\n if PSpoint:\n misc.write_PS_input(pjoin(dir_name, 'PS.input'),PSpoint)\n # Also save the PS point used in PS.input_<PS_name> if the user\n # wanted so. It is used for the lorentz check. \n if not PS_name is None:\n misc.write_PS_input(pjoin(dir_name, \\\n 'PS.input_%s'%PS_name),PSpoint) \n # Run ./check\n try:\n output = subprocess.Popen('./check',\n cwd=dir_name,\n stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout\n output.read()\n output.close()\n if os.path.exists(pjoin(dir_name,'result.dat')):\n return cls.parse_check_output(open(pjoin(dir_name,\\\n 'result.dat')),format=format) \n else:\n logging.warning(\"Error while looking for file %s\"%str(os.path\\\n .join(dir_name,'result.dat')))\n return ((0.0, 0.0, 0.0, 0.0, 0), [])\n except IOError:\n logging.warning(\"Error while executing ./check in %s\" % shell_name)\n return ((0.0, 0.0, 0.0, 0.0, 0), [])\n\n @classmethod\n def parse_check_output(cls,output,format='tuple'):\n \"\"\"Parse the output string and return a pair where first four values are \n the finite, born, single and double pole of the ME and the fourth is the\n GeV exponent and the second value is a list of 4 momenta for all particles \n involved. Return the answer in two possible formats, 'tuple' or 'dict'.\"\"\"\n\n res_dict = {'res_p':[],\n 'born':0.0,\n 'finite':0.0,\n '1eps':0.0,\n '2eps':0.0,\n 'gev_pow':0,\n 'export_format':'Default',\n 'accuracy':0.0,\n 'return_code':0,\n 'Split_Orders_Names':[],\n 'Loop_SO_Results':[],\n 'Born_SO_Results':[],\n 'Born_kept':[],\n 'Loop_kept':[]\n }\n res_p = []\n \n # output is supposed to be a file, if it is its content directly then\n # I change it to be the list of line.\n if isinstance(output,(file,io.TextIOWrapper)) or isinstance(output,list):\n text=output\n elif isinstance(output,(str)) or (six.PY2 and isinstance(output, six.text_type)):\n text=output.split('\\n')\n elif isinstance(output, bytes):\n text=output.decode().split('\\n')\n else:\n raise MadGraph5Error('Type for argument output not supported in'+\\\n ' parse_check_output: %s' % type(output))\n for line in text:\n splitline=line.split()\n if len(splitline)==0:\n continue\n elif splitline[0]=='PS':\n res_p.append([float(s) for s in splitline[1:]])\n elif splitline[0]=='ASO2PI':\n res_dict['alphaS_over_2pi']=float(splitline[1])\n elif splitline[0]=='BORN':\n res_dict['born']=float(splitline[1])\n elif splitline[0]=='FIN':\n res_dict['finite']=float(splitline[1])\n elif splitline[0]=='1EPS':\n res_dict['1eps']=float(splitline[1])\n elif splitline[0]=='2EPS':\n res_dict['2eps']=float(splitline[1])\n elif splitline[0]=='EXP':\n res_dict['gev_pow']=int(splitline[1])\n elif splitline[0]=='Export_Format':\n res_dict['export_format']=splitline[1]\n elif splitline[0]=='ACC':\n res_dict['accuracy']=float(splitline[1])\n elif splitline[0]=='RETCODE':\n res_dict['return_code']=int(splitline[1])\n elif splitline[0]=='Split_Orders_Names':\n res_dict['Split_Orders_Names']=splitline[1:]\n elif splitline[0] in ['Born_kept', 'Loop_kept']:\n res_dict[splitline[0]] = [kept=='T' for kept in splitline[1:]]\n elif splitline[0] in ['Loop_SO_Results', 'Born_SO_Results']:\n # The value for this key of this dictionary is a list of elements\n # with format ([],{}) where the first list specifies the split\n # orders to which the dictionary in the second position corresponds \n # to.\n res_dict[splitline[0]].append(\\\n ([int(el) for el in splitline[1:]],{}))\n elif splitline[0]=='SO_Loop':\n res_dict['Loop_SO_Results'][-1][1][splitline[1]]=\\\n float(splitline[2])\n elif splitline[0]=='SO_Born':\n res_dict['Born_SO_Results'][-1][1][splitline[1]]=\\\n float(splitline[2])\n \n res_dict['res_p'] = res_p\n\n if format=='tuple':\n return ((res_dict['finite'],res_dict['born'],res_dict['1eps'],\n res_dict['2eps'],res_dict['gev_pow']), res_dict['res_p'])\n else:\n return res_dict\n \n @staticmethod\n def apply_log_tweak(proc_path, mode):\n \"\"\" Changes the file model_functions.f in the SOURCE of the process output\n so as to change how logarithms are analytically continued and see how\n it impacts the CMS check.\"\"\"\n valid_modes = ['default','recompile']\n if not (mode in valid_modes or (isinstance(mode, list) and\n len(mode)==2 and all(m in ['logp','logm','log'] for m in mode))):\n raise MadGraph5Error(\"Mode '%s' not reckonized\"%mode+\n \" in function apply_log_tweak.\")\n \n model_path = pjoin(proc_path,'Source','MODEL')\n directories = misc.glob('P0_*', pjoin(proc_path,'SubProcesses'))\n if directories and os.path.isdir(directories[0]):\n exe_path = directories[0]\n else:\n raise MadGraph5Error('Could not find a process executable '+\\\n 'directory in %s'%proc_dir)\n bu_path = pjoin(model_path, 'model_functions.f__backUp__')\n \n if mode=='default':\n # Restore the default source file model_function.f\n if not os.path.isfile(bu_path):\n raise MadGraph5Error('Back up file %s could not be found.'%bu_path)\n shutil.move(bu_path, pjoin(model_path, 'model_functions.f'))\n return\n\n if mode=='recompile':\n try:\n os.remove(pjoin(model_path,'model_functions.o'))\n os.remove(pjoin(proc_path,'lib','libmodel.a'))\n except:\n pass \n misc.compile(cwd=model_path)\n # Remove the executable to insure proper recompilation\n try:\n os.remove(pjoin(exe_path,'check'))\n except:\n pass\n misc.compile(arg=['check'], cwd=exe_path)\n return\n \n if mode[0]==mode[1]:\n return\n \n # Now change the logs\n mp_prefix = 'MP_'\n target_line = 'FUNCTION %%sREG%s(ARG)'%mode[0].lower()\n\n # Make sure to create a backup\n if not os.path.isfile(bu_path):\n shutil.copy(pjoin(model_path, 'model_functions.f'), bu_path) \n model_functions = open(pjoin(model_path,'model_functions.f'),'r')\n \n new_model_functions = []\n has_replaced = False\n just_replaced = False\n find_one_replacement= False\n mp_mode = None\n suffix = {'log':'','logp':r'\\s*\\+\\s*TWOPII','logm':r'\\s*\\-\\s*TWOPII'}\n replace_regex=r'^\\s*%%sREG%s\\s*=\\s*LOG\\(ARG\\)%s'%(mode[0],suffix[mode[0]])\n for line in model_functions:\n # Make sure to skip split lines after the replacement\n if just_replaced:\n if not re.match(r'\\s{6}', line):\n continue\n else:\n just_replaced = False\n if mp_mode is None:\n # We are looking for the start of the function\n new_model_functions.append(line)\n if (target_line%mp_prefix).lower() in line.lower():\n mp_mode = mp_prefix\n elif (target_line%'').lower() in line.lower():\n mp_mode = ''\n else:\n # Now apply the substitution\n if not has_replaced and re.match(replace_regex%mp_mode,line,\n re.IGNORECASE):\n # Apply the replacement\n if mode[0]=='log':\n if mp_mode=='':\n new_line =\\\n\"\"\" if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then\n reg%s=log(arg) %s TWOPII\n else\n reg%s=log(arg)\n endif\\n\"\"\"%(mode[0],'+' if mode[1]=='logp' else '-',mode[0])\n else:\n new_line =\\\n\"\"\" if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then\n mp_reg%s=log(arg) %s TWOPII\n else\n mp_reg%s=log(arg)\n endif\\n\"\"\"%(mode[0],'+' if mode[1]=='logp' else '-',mode[0])\n else:\n new_line = ' '*6+\"%sreg%s=log(arg) %s\\n\"%(mp_mode,mode[0],\n ('' if mode[1]=='log' else ('+TWOPII' if mode[1]=='logp' else '-TWOPII')))\n new_model_functions.append(new_line)\n just_replaced = True\n has_replaced = True\n find_one_replacement = True\n else:\n new_model_functions.append(line)\n if re.match(r'^\\s*END\\s*$',line,re.IGNORECASE):\n mp_mode = None\n has_replaced = False\n \n if not find_one_replacement:\n logger.warning('No replacement was found/performed for token '+\n \"'%s->%s'.\"%(mode[0],mode[1]))\n else:\n open(pjoin(model_path,'model_functions.f'),'w').\\\n write(''.join(new_model_functions))\n return \n \n def setup_ward_check(self, working_dir, file_names, mp = False):\n \"\"\" Modify loop_matrix.f so to have one external massless gauge boson\n polarization vector turned into its momentum. It is not a pretty and \n flexible solution but it works for this particular case.\"\"\"\n \n shell_name = None\n directories = misc.glob('P0_*', working_dir)\n if directories and os.path.isdir(directories[0]):\n shell_name = os.path.basename(directories[0])\n \n dir_name = pjoin(working_dir, shell_name)\n \n # Look, in order, for all the possible file names provided.\n ind=0\n while ind<len(file_names) and not os.path.isfile(pjoin(dir_name,\n file_names[ind])):\n ind += 1\n if ind==len(file_names):\n raise Exception(\"No helas calls output file found.\")\n \n helas_file_name=pjoin(dir_name,file_names[ind])\n file = open(pjoin(dir_name,helas_file_name), 'r')\n \n helas_calls_out=\"\"\n original_file=\"\"\n gaugeVectorRegExp=re.compile(\\\n r\"CALL (MP\\_)?VXXXXX\\(P\\(0,(?P<p_id>\\d+)\\),((D)?CMPLX\\()?ZERO((,KIND\\=16)?\\))?,\"+\n r\"NHEL\\(\\d+\\),[\\+\\-]1\\*IC\\(\\d+\\),W\\(1,(?P<wf_id>\\d+(,H)?)\\)\\)\")\n foundGauge=False\n # Now we modify the first massless gauge vector wavefunction\n for line in file:\n helas_calls_out+=line\n original_file+=line\n if line.find(\"INCLUDE 'coupl.inc'\") != -1 or \\\n line.find(\"INCLUDE 'mp_coupl_same_name.inc'\") !=-1:\n helas_calls_out+=\" INTEGER WARDINT\\n\"\n if not foundGauge:\n res=gaugeVectorRegExp.search(line)\n if res!=None:\n foundGauge=True\n helas_calls_out+=\" DO WARDINT=1,4\\n\"\n helas_calls_out+=\" W(WARDINT+4,\"+res.group('wf_id')+\")=\"\n if not mp:\n helas_calls_out+=\\\n \"DCMPLX(P(WARDINT-1,\"+res.group('p_id')+\"),0.0D0)\\n\"\n else:\n helas_calls_out+=\"CMPLX(P(WARDINT-1,\"+\\\n res.group('p_id')+\"),0.0E0_16,KIND=16)\\n\"\n helas_calls_out+=\" ENDDO\\n\"\n file.close()\n \n return pjoin(dir_name,helas_file_name), original_file, helas_calls_out\n\n#===============================================================================\n# Helper class LoopMatrixElementEvaluator\n#===============================================================================\nclass LoopMatrixElementTimer(LoopMatrixElementEvaluator):\n \"\"\"Class taking care of matrix element evaluation and running timing for \n loop processes.\"\"\"\n\n def __init__(self, *args, **kwargs):\n \"\"\" Same as the mother for now \"\"\"\n LoopMatrixElementEvaluator.__init__(self,*args, **kwargs)\n \n @classmethod\n def get_MadLoop_Params(cls,MLCardPath):\n \"\"\" Return a dictionary of the parameter of the MadLoopParamCard.\n The key is the name of the parameter and the value is the corresponding\n string read from the card.\"\"\"\n \n return bannermod.MadLoopParam(MLCardPath)\n\n\n @classmethod\n def set_MadLoop_Params(cls,MLCardPath,params):\n \"\"\" Set the parameters in MadLoopParamCard to the values specified in\n the dictionary params.\n The key is the name of the parameter and the value is the corresponding\n string to write in the card.\"\"\"\n \n MLcard = bannermod.MadLoopParam(MLCardPath)\n for key,value in params.items():\n MLcard.set(key, value, changeifuserset=False)\n MLcard.write(MLCardPath, commentdefault=True)\n\n def skip_loop_evaluation_setup(self, dir_name, skip=True):\n \"\"\" Edit loop_matrix.f in order to skip the loop evaluation phase.\n Notice this only affects the double precision evaluation which is\n normally fine as we do not make the timing check on mp.\"\"\"\n\n file = open(pjoin(dir_name,'loop_matrix.f'), 'r')\n loop_matrix = file.read()\n file.close()\n \n file = open(pjoin(dir_name,'loop_matrix.f'), 'w')\n loop_matrix = re.sub(r\"SKIPLOOPEVAL=\\S+\\)\",\"SKIPLOOPEVAL=%s)\"%('.TRUE.' \n if skip else '.FALSE.'), loop_matrix)\n file.write(loop_matrix)\n file.close()\n\n def boot_time_setup(self, dir_name, bootandstop=True):\n \"\"\" Edit loop_matrix.f in order to set the flag which stops the\n execution after booting the program (i.e. reading the color data).\"\"\"\n\n file = open(pjoin(dir_name,'loop_matrix.f'), 'r')\n loop_matrix = file.read()\n file.close()\n \n file = open(pjoin(dir_name,'loop_matrix.f'), 'w') \n loop_matrix = re.sub(r\"BOOTANDSTOP=\\S+\\)\",\"BOOTANDSTOP=%s)\"%('.TRUE.' \n if bootandstop else '.FALSE.'), loop_matrix)\n file.write(loop_matrix)\n file.close()\n\n def setup_process(self, matrix_element, export_dir, reusing = False,\n param_card = None, MLOptions={},clean=True):\n \"\"\" Output the matrix_element in argument and perform the initialization\n while providing some details about the output in the dictionary returned. \n Returns None if anything fails\"\"\"\n \n infos={'Process_output': None,\n 'HELAS_MODEL_compilation' : None,\n 'dir_path' : None,\n 'Initialization' : None,\n 'Process_compilation' : None}\n\n if not reusing and clean:\n if os.path.isdir(export_dir):\n clean_up(self.output_path)\n if os.path.isdir(export_dir):\n raise InvalidCmd(\\\n \"The directory %s already exist. Please remove it.\"\\\n %str(export_dir))\n else:\n if not os.path.isdir(export_dir):\n raise InvalidCmd(\\\n \"Could not find the directory %s to reuse.\"%str(export_dir)) \n \n\n if not reusing and clean:\n model = matrix_element['processes'][0].get('model')\n # I do the import here because there is some cyclic import of export_v4\n # otherwise\n import madgraph.loop.loop_exporters as loop_exporters\n if self.loop_optimized_output:\n exporter_class=loop_exporters.LoopProcessOptimizedExporterFortranSA\n else:\n exporter_class=loop_exporters.LoopProcessExporterFortranSA\n \n MLoptions = {'clean': True, \n 'complex_mass': self.cmass_scheme,\n 'export_format':'madloop', \n 'mp':True,\n 'SubProc_prefix':'P',\n 'compute_color_flows':not matrix_element['processes'][0].get('has_born'),\n 'loop_dir': pjoin(self.mg_root,'Template','loop_material'),\n 'cuttools_dir': self.cuttools_dir,\n 'fortran_compiler':self.cmd.options['fortran_compiler'],\n 'output_dependencies':self.cmd.options['output_dependencies']}\n \n MLoptions.update(self.tir_dir)\n\n start=time.time()\n FortranExporter = exporter_class(export_dir, MLoptions)\n FortranModel = helas_call_writers.FortranUFOHelasCallWriter(model)\n FortranExporter.copy_template(model)\n FortranExporter.generate_subprocess_directory(matrix_element, FortranModel)\n wanted_lorentz = list(set(matrix_element.get_used_lorentz()))\n wanted_couplings = list(set([c for l in matrix_element.get_used_couplings() \\\n for c in l]))\n FortranExporter.convert_model(self.full_model,wanted_lorentz,wanted_couplings)\n infos['Process_output'] = time.time()-start\n start=time.time()\n FortranExporter.finalize(matrix_element,\"\",self.cmd.options, ['nojpeg'])\n infos['HELAS_MODEL_compilation'] = time.time()-start\n \n # Copy the parameter card if provided\n if param_card != None:\n if isinstance(param_card, str):\n cp(pjoin(param_card),\\\n pjoin(export_dir,'Cards','param_card.dat'))\n else:\n param_card.write(pjoin(export_dir,'Cards','param_card.dat'))\n \n # First Initialize filters (in later versions where this will hopefully\n # be done at generation time, then it will be able to skip it)\n MadLoopInitializer.fix_PSPoint_in_check(\n pjoin(export_dir,'SubProcesses'), read_ps = False, npoints = 4)\n\n self.fix_MadLoopParamCard(pjoin(export_dir,'Cards'),\n mp = False, loop_filter = True,MLOptions=MLOptions)\n \n shell_name = None\n directories = misc.glob('P0_*', pjoin(export_dir, 'SubProcesses'))\n if directories and os.path.isdir(directories[0]):\n shell_name = os.path.basename(directories[0])\n dir_name = pjoin(export_dir, 'SubProcesses', shell_name)\n infos['dir_path']=dir_name\n\n # Do not refresh the filter automatically as this is very often a waste\n # of time\n if not MadLoopInitializer.need_MadLoopInit(\n export_dir, subproc_prefix='P'):\n return infos\n\n attempts = [3,15]\n # remove check and check_sa.o for running initialization again\n try:\n os.remove(pjoin(dir_name,'check'))\n os.remove(pjoin(dir_name,'check_sa.o'))\n except OSError:\n pass\n\n nPS_necessary = MadLoopInitializer.run_initialization(dir_name,\n pjoin(export_dir,'SubProcesses'),infos,\\\n req_files = ['HelFilter.dat','LoopFilter.dat'],\n attempts = attempts)\n if attempts is None:\n logger.error(\"Could not compile the process %s,\"%shell_name+\\\n \" try to generate it via the 'generate' command.\")\n return None\n if nPS_necessary is None:\n logger.error(\"Could not initialize the process %s\"%shell_name+\\\n \" with %s PS points.\"%max(attempts))\n return None\n elif nPS_necessary > min(attempts):\n logger.warning(\"Could not initialize the process %s\"%shell_name+\\\n \" with %d PS points. It needed %d.\"%(min(attempts),nPS_necessary))\n\n return infos\n\n def time_matrix_element(self, matrix_element, reusing = False,\n param_card = None, keep_folder = False, options=None,\n MLOptions = {}):\n \"\"\" Output the matrix_element in argument and give detail information\n about the timing for its output and running\"\"\"\n\n # If True, then force three PS points only and skip the test on\n # unpolarized PS point \n make_it_quick=False\n\n if options and 'split_orders' in list(options.keys()):\n split_orders = options['split_orders']\n else:\n split_orders = -1\n\n assert ((not reusing and isinstance(matrix_element, \\\n helas_objects.HelasMatrixElement)) or (reusing and \n isinstance(matrix_element, base_objects.Process)))\n if not reusing:\n proc_name = matrix_element['processes'][0].shell_string()[2:]\n else:\n proc_name = matrix_element.shell_string()[2:]\n \n export_dir=pjoin(self.output_path,('SAVED' if keep_folder else '')+\\\n temp_dir_prefix+\"_%s\"%proc_name)\n\n res_timings = self.setup_process(matrix_element,export_dir, \\\n reusing, param_card,MLOptions = MLOptions,clean=True)\n \n if res_timings == None:\n return None\n dir_name=res_timings['dir_path']\n\n def check_disk_usage(path):\n return subprocess.Popen(\"du -shc -L \"+str(path), \\\n stdout=subprocess.PIPE, shell=True).communicate()[0].decode().split()[-2]\n # The above is compatible with python 2.6, not the neater version below\n # -> need to check if need .decode for python3.7\n #return subprocess.check_output([\"du -shc %s\"%path],shell=True).\\\n # split()[-2]\n\n res_timings['du_source']=check_disk_usage(pjoin(\\\n export_dir,'Source','*','*.f'))\n res_timings['du_process']=check_disk_usage(pjoin(dir_name,'*.f'))\n res_timings['du_color']=check_disk_usage(pjoin(dir_name,\n 'MadLoop5_resources','*.dat'))\n res_timings['du_exe']=check_disk_usage(pjoin(dir_name,'check'))\n\n if not res_timings['Initialization']==None:\n time_per_ps_estimate = (res_timings['Initialization']/4.0)/2.0\n elif make_it_quick:\n time_per_ps_estimate = -1.0 \n else:\n # We cannot estimate from the initialization, so we run just a 3\n # PS point run to evaluate it.\n MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),\n read_ps = False, npoints = 3, hel_config = -1, \n split_orders=split_orders)\n compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)\n time_per_ps_estimate = run_time/3.0\n \n self.boot_time_setup(dir_name,bootandstop=True)\n compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)\n res_timings['Booting_time'] = run_time\n self.boot_time_setup(dir_name,bootandstop=False)\n\n # Detect one contributing helicity\n contributing_hel=0\n n_contrib_hel=0\n proc_prefix_file = open(pjoin(dir_name,'proc_prefix.txt'),'r')\n proc_prefix = proc_prefix_file.read()\n proc_prefix_file.close()\n helicities = open(pjoin(dir_name,'MadLoop5_resources',\n '%sHelFilter.dat'%proc_prefix)).read().split()\n for i, hel in enumerate(helicities):\n if (self.loop_optimized_output and int(hel)>-10000) or hel=='T':\n if contributing_hel==0:\n contributing_hel=i+1\n n_contrib_hel += 1\n \n if contributing_hel==0:\n logger.error(\"Could not find a contributing helicity \"+\\\n \"configuration for process %s.\"%proc_name)\n return None\n \n res_timings['n_contrib_hel']=n_contrib_hel\n res_timings['n_tot_hel']=len(helicities)\n \n # We aim at a 30 sec run\n if not make_it_quick:\n target_pspoints_number = max(int(30.0/time_per_ps_estimate)+1,50)\n else:\n target_pspoints_number = 10\n \n logger.info(\"Checking timing for process %s \"%proc_name+\\\n \"with %d PS points.\"%target_pspoints_number)\n \n MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),\n read_ps = False, npoints = target_pspoints_number*2, \\\n hel_config = contributing_hel, split_orders=split_orders)\n compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)\n \n if compile_time == None: return None\n \n res_timings['run_polarized_total']=\\\n (run_time-res_timings['Booting_time'])/(target_pspoints_number*2)\n\n if make_it_quick:\n res_timings['run_unpolarized_total'] = 1.0\n res_timings['ram_usage'] = 0.0\n else:\n MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),\n read_ps = False, npoints = target_pspoints_number, hel_config = -1,\n split_orders=split_orders)\n compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name, \n checkRam=True)\n\n if compile_time == None: return None\n res_timings['run_unpolarized_total']=\\\n (run_time-res_timings['Booting_time'])/target_pspoints_number\n res_timings['ram_usage'] = ram_usage \n \n if not self.loop_optimized_output:\n return res_timings\n \n # For the loop optimized output, we also check the time spent in\n # computing the coefficients of the loop numerator polynomials.\n \n # So we modify loop_matrix.f in order to skip the loop evaluation phase.\n self.skip_loop_evaluation_setup(dir_name,skip=True)\n\n if make_it_quick:\n res_timings['run_unpolarized_coefs'] = 1.0\n else:\n MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),\n read_ps = False, npoints = target_pspoints_number, hel_config = -1,\n split_orders=split_orders)\n compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)\n if compile_time == None: return None\n res_timings['run_unpolarized_coefs']=\\\n (run_time-res_timings['Booting_time'])/target_pspoints_number\n \n MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),\n read_ps = False, npoints = target_pspoints_number*2, \\\n hel_config = contributing_hel, split_orders=split_orders)\n compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)\n if compile_time == None: return None\n res_timings['run_polarized_coefs']=\\\n (run_time-res_timings['Booting_time'])/(target_pspoints_number*2) \n\n # Restitute the original file.\n self.skip_loop_evaluation_setup(dir_name,skip=False)\n \n return res_timings\n\n#===============================================================================\n# Global helper function run_multiprocs\n#===============================================================================\n\n def check_matrix_element_stability(self, matrix_element,options=None,\n infos_IN = None, param_card = None, keep_folder = False,\n MLOptions = {}):\n \"\"\" Output the matrix_element in argument, run in for nPoints and return\n a dictionary containing the stability information on each of these points.\n If infos are provided, then the matrix element output is skipped and \n reused from a previous run and the content of infos.\n \"\"\"\n \n if not options:\n reusing = False\n nPoints = 100\n split_orders = -1\n else:\n reusing = options['reuse']\n nPoints = options['npoints']\n split_orders = options['split_orders']\n \n assert ((not reusing and isinstance(matrix_element, \\\n helas_objects.HelasMatrixElement)) or (reusing and \n isinstance(matrix_element, base_objects.Process))) \n\n # Helper functions\n def format_PS_point(ps, rotation=0):\n \"\"\" Write out the specified PS point to the file dir_path/PS.input\n while rotating it if rotation!=0. We consider only rotations of 90\n but one could think of having rotation of arbitrary angle too.\n The first two possibilities, 1 and 2 are a rotation and boost \n along the z-axis so that improve_ps can still work.\n rotation=0 => No rotation\n rotation=1 => Z-axis pi/2 rotation\n rotation=2 => Z-axis pi/4 rotation\n rotation=3 => Z-axis boost \n rotation=4 => (x'=z,y'=-x,z'=-y)\n rotation=5 => (x'=-z,y'=y,z'=x)\"\"\"\n if rotation==0:\n p_out=copy.copy(ps)\n elif rotation==1:\n p_out = [[pm[0],-pm[2],pm[1],pm[3]] for pm in ps]\n elif rotation==2:\n sq2 = math.sqrt(2.0)\n p_out = [[pm[0],(pm[1]-pm[2])/sq2,(pm[1]+pm[2])/sq2,pm[3]] for pm in ps]\n elif rotation==3:\n p_out = boost_momenta(ps, 3) \n # From this point the transformations will prevent the\n # improve_ps script of MadLoop to work. \n elif rotation==4:\n p_out=[[pm[0],pm[3],-pm[1],-pm[2]] for pm in ps]\n elif rotation==5:\n p_out=[[pm[0],-pm[3],pm[2],pm[1]] for pm in ps]\n else:\n raise MadGraph5Error(\"Rotation id %i not implemented\"%rotation)\n \n return '\\n'.join([' '.join(['%.16E'%pi for pi in p]) for p in p_out])\n \n def pick_PS_point(proc, options):\n \"\"\" Randomly generate a PS point and make sure it is eligible. Then\n return it. Users can edit the cuts here if they want.\"\"\"\n\n p, w_rambo = self.get_momenta(proc, options)\n if options['events']:\n return p\n # For 2>1 process, we don't check the cuts of course\n while (not MatrixElementEvaluator.pass_isolation_cuts(p) and len(p)>3):\n p, w_rambo = self.get_momenta(proc, options)\n \n # For a 2>1 process, it would always be the same PS point,\n # so here we bring in so boost along the z-axis, just for the sake\n # of it.\n if len(p)==3:\n p = boost_momenta(p,3,random.uniform(0.0,0.99))\n return p\n \n # Start loop on loop libraries \n # Accuracy threshold of double precision evaluations above which the\n # PS points is also evaluated in quadruple precision\n accuracy_threshold=1.0e-1\n \n # Number of lorentz transformations to consider for the stability test\n # (along with the loop direction test which is performed by default)\n num_rotations = 1\n \n if \"MLReductionLib\" not in MLOptions:\n tools=[1]\n else:\n tools=MLOptions[\"MLReductionLib\"]\n tools=list(set(tools)) # remove the duplication ones\n \n # not self-contained tir libraries\n tool_var={'pjfry':2,'golem':4,'samurai':5,'ninja':6,'collier':7}\n for tool in ['pjfry','golem','samurai','ninja','collier']:\n tool_dir='%s_dir'%tool\n if not tool_dir in self.tir_dir:\n continue\n tool_libpath=self.tir_dir[tool_dir]\n tool_libname=\"lib%s.a\"%tool\n if (not isinstance(tool_libpath,str)) or (not os.path.exists(tool_libpath)) \\\n or (not os.path.isfile(pjoin(tool_libpath,tool_libname))):\n if tool_var[tool] in tools:\n tools.remove(tool_var[tool])\n if not tools:\n return None\n \n # Normally, this should work for loop-induced processes as well\n if not reusing:\n process = matrix_element['processes'][0]\n else:\n process = matrix_element\n proc_name = process.shell_string()[2:]\n export_dir=pjoin(self.mg_root,(\"SAVED\" if keep_folder else \"\")+\\\n temp_dir_prefix+\"_%s\"%proc_name)\n \n tools_name=bannermod.MadLoopParam._ID_reduction_tool_map\n \n return_dict={}\n return_dict['Stability']={}\n infos_save={'Process_output': None,\n 'HELAS_MODEL_compilation' : None,\n 'dir_path' : None,\n 'Initialization' : None,\n 'Process_compilation' : None} \n \n for tool in tools:\n tool_name=tools_name[tool]\n # Each evaluations is performed in different ways to assess its stability.\n # There are two dictionaries, one for the double precision evaluation\n # and the second one for quadruple precision (if it was needed).\n # The keys are the name of the evaluation method and the value is the \n # float returned.\n DP_stability = []\n QP_stability = []\n # The unstable point encountered are stored in this list\n Unstable_PS_points = []\n # The exceptional PS points are those which stay unstable in quad prec.\n Exceptional_PS_points = []\n \n MLoptions=MLOptions\n MLoptions[\"MLReductionLib\"]=tool\n clean = (tool==tools[0]) and not nPoints==0\n if infos_IN==None or (tool_name not in infos_IN):\n infos=infos_IN\n else:\n infos=infos_IN[tool_name]\n\n if not infos:\n infos = self.setup_process(matrix_element,export_dir, \\\n reusing, param_card,MLoptions,clean)\n if not infos:\n return None\n \n if clean:\n infos_save['Process_output']=infos['Process_output']\n infos_save['HELAS_MODEL_compilation']=infos['HELAS_MODEL_compilation']\n infos_save['dir_path']=infos['dir_path']\n infos_save['Process_compilation']=infos['Process_compilation']\n else:\n if not infos['Process_output']:\n infos['Process_output']=infos_save['Process_output']\n if not infos['HELAS_MODEL_compilation']:\n infos['HELAS_MODEL_compilation']=infos_save['HELAS_MODEL_compilation']\n if not infos['dir_path']:\n infos['dir_path']=infos_save['dir_path']\n if not infos['Process_compilation']:\n infos['Process_compilation']=infos_save['Process_compilation']\n \n dir_path=infos['dir_path']\n\n # Reuse old stability runs if present\n savefile='SavedStabilityRun_%s%%s.pkl'%tools_name[tool]\n data_i = 0\n \n if reusing:\n # Possibly add additional data than the main one in 0\n data_i=0\n while os.path.isfile(pjoin(dir_path,savefile%('_%d'%data_i))):\n pickle_path = pjoin(dir_path,savefile%('_%d'%data_i))\n saved_run = save_load_object.load_from_file(pickle_path)\n if data_i>0:\n logger.info(\"Loading additional data stored in %s.\"%\n str(pickle_path))\n logger.info(\"Loaded data moved to %s.\"%str(pjoin(\n dir_path,'LOADED_'+savefile%('_%d'%data_i))))\n shutil.move(pickle_path,\n pjoin(dir_path,'LOADED_'+savefile%('%d'%data_i)))\n DP_stability.extend(saved_run['DP_stability'])\n QP_stability.extend(saved_run['QP_stability'])\n Unstable_PS_points.extend(saved_run['Unstable_PS_points'])\n Exceptional_PS_points.extend(saved_run['Exceptional_PS_points'])\n data_i += 1\n \n return_dict['Stability'][tool_name] = {'DP_stability':DP_stability,\n 'QP_stability':QP_stability,\n 'Unstable_PS_points':Unstable_PS_points,\n 'Exceptional_PS_points':Exceptional_PS_points}\n\n if nPoints==0:\n if len(return_dict['Stability'][tool_name]['DP_stability'])!=0:\n # In case some data was combined, overwrite the pickle\n if data_i>1:\n save_load_object.save_to_file(pjoin(dir_path,\n savefile%'_0'),return_dict['Stability'][tool_name])\n continue\n else:\n logger.info(\"ERROR: Not reusing a directory or any pickled\"+\n \" result for tool %s and the number\"%tool_name+\\\n \" of point for the check is zero.\")\n return None\n\n logger.info(\"Checking stability of process %s \"%proc_name+\\\n \"with %d PS points by %s.\"%(nPoints,tool_name))\n if infos['Initialization'] != None:\n time_per_ps_estimate = (infos['Initialization']/4.0)/2.0\n sec_needed = int(time_per_ps_estimate*nPoints*4)\n else:\n sec_needed = 0\n \n progress_bar = None\n time_info = False\n if sec_needed>5:\n time_info = True\n logger.info(\"This check should take about \"+\\\n \"%s to run. Started on %s.\"%(\\\n str(datetime.timedelta(seconds=sec_needed)),\\\n datetime.datetime.now().strftime(\"%d-%m-%Y %H:%M\")))\n if logger.getEffectiveLevel()<logging.WARNING and \\\n (sec_needed>5 or infos['Initialization'] == None):\n widgets = ['Stability check:', pbar.Percentage(), ' ', \n pbar.Bar(),' ', pbar.ETA(), ' ']\n progress_bar = pbar.ProgressBar(widgets=widgets, maxval=nPoints, \n fd=sys.stdout)\n MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),\n read_ps = True, npoints = 1, hel_config = -1, split_orders=split_orders)\n # Recompile (Notice that the recompilation is only necessary once) for\n # the change above to take effect.\n # Make sure to recreate the executable and modified sources\n try:\n os.remove(pjoin(dir_path,'check'))\n os.remove(pjoin(dir_path,'check_sa.o'))\n except OSError:\n pass\n # Now run make\n devnull = open(os.devnull, 'w')\n retcode = subprocess.call(['make','check'],\n cwd=dir_path, stdout=devnull, stderr=devnull)\n devnull.close() \n if retcode != 0:\n logging.info(\"Error while executing make in %s\" % dir_path)\n return None\n \n\n # First create the stability check fortran driver executable if not \n # already present.\n if not os.path.isfile(pjoin(dir_path,'StabilityCheckDriver.f')):\n # Use the presence of the file born_matrix.f to check if this output\n # is a loop_induced one or not.\n if os.path.isfile(pjoin(dir_path,'born_matrix.f')):\n checkerName = 'StabilityCheckDriver.f'\n else:\n checkerName = 'StabilityCheckDriver_loop_induced.f'\n\n with open(pjoin(self.mg_root,'Template','loop_material','Checks',\n checkerName),'r') as checkerFile:\n with open(pjoin(dir_path,'proc_prefix.txt')) as proc_prefix:\n checkerToWrite = checkerFile.read()%{'proc_prefix':\n proc_prefix.read()}\n checkerFile = open(pjoin(dir_path,'StabilityCheckDriver.f'),'w')\n checkerFile.write(checkerToWrite)\n checkerFile.close() \n #cp(pjoin(self.mg_root,'Template','loop_material','Checks',\\\n # checkerName),pjoin(dir_path,'StabilityCheckDriver.f'))\n \n # Make sure to recompile the possibly modified files (time stamps can be\n # off).\n if os.path.isfile(pjoin(dir_path,'StabilityCheckDriver')):\n os.remove(pjoin(dir_path,'StabilityCheckDriver'))\n if os.path.isfile(pjoin(dir_path,'loop_matrix.o')):\n os.remove(pjoin(dir_path,'loop_matrix.o'))\n misc.compile(arg=['StabilityCheckDriver'], cwd=dir_path, \\\n mode='fortran', job_specs = False)\n\n # Now for 2>1 processes, because the HelFilter was setup in for always\n # identical PS points with vec(p_1)=-vec(p_2), it is best not to remove\n # the helicityFilter double check\n if len(process['legs'])==3:\n self.fix_MadLoopParamCard(dir_path, mp=False,\n loop_filter=False, DoubleCheckHelicityFilter=True)\n\n StabChecker = subprocess.Popen([pjoin(dir_path,'StabilityCheckDriver')], \n stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \n cwd=dir_path, bufsize=0)\n start_index = len(DP_stability)\n if progress_bar!=None:\n progress_bar.start()\n\n # Flag to know if the run was interrupted or not\n interrupted = False\n # Flag to know wheter the run for one specific PS point got an IOError\n # and must be retried\n retry = 0\n # We do not use a for loop because we want to manipulate the updater.\n i=start_index\n if options and 'events' in options and options['events']:\n # it is necessary to reuse the events from lhe file\n import MadSpin.decay as madspin\n fsock = open(options['events'])\n self.event_file = madspin.Event(fsock)\n while i<(start_index+nPoints):\n # To be added to the returned statistics \n qp_dict={}\n dp_dict={}\n UPS = None\n EPS = None\n # Pick an eligible PS point with rambo, if not already done\n if retry==0:\n p = pick_PS_point(process, options)\n# print \"I use P_%i=\"%i,p\n try:\n if progress_bar!=None:\n progress_bar.update(i+1-start_index)\n # Write it in the input file\n PSPoint = format_PS_point(p,0)\n dp_res=[]\n dp_res.append(self.get_me_value(StabChecker,PSPoint,1,\n split_orders=split_orders))\n dp_dict['CTModeA']=dp_res[-1]\n dp_res.append(self.get_me_value(StabChecker,PSPoint,2,\n split_orders=split_orders))\n dp_dict['CTModeB']=dp_res[-1]\n for rotation in range(1,num_rotations+1):\n PSPoint = format_PS_point(p,rotation)\n dp_res.append(self.get_me_value(StabChecker,PSPoint,1,\n split_orders=split_orders))\n dp_dict['Rotation%i'%rotation]=dp_res[-1]\n # Make sure all results make sense\n if any([not res for res in dp_res]):\n return None\n dp_accuracy =((max(dp_res)-min(dp_res))/\n abs(sum(dp_res)/len(dp_res)))\n dp_dict['Accuracy'] = dp_accuracy\n if dp_accuracy>accuracy_threshold:\n if tool in [1,6]:\n # Only CutTools or Ninja can use QP\n UPS = [i,p]\n qp_res=[]\n PSPoint = format_PS_point(p,0)\n qp_res.append(self.get_me_value(StabChecker,PSPoint,4,\n split_orders=split_orders))\n qp_dict['CTModeA']=qp_res[-1]\n qp_res.append(self.get_me_value(StabChecker,PSPoint,5,\n split_orders=split_orders))\n qp_dict['CTModeB']=qp_res[-1]\n for rotation in range(1,num_rotations+1):\n PSPoint = format_PS_point(p,rotation)\n qp_res.append(self.get_me_value(StabChecker,PSPoint,4,\n split_orders=split_orders))\n qp_dict['Rotation%i'%rotation]=qp_res[-1]\n # Make sure all results make sense\n if any([not res for res in qp_res]):\n return None\n \n qp_accuracy = ((max(qp_res)-min(qp_res))/\n abs(sum(qp_res)/len(qp_res)))\n qp_dict['Accuracy']=qp_accuracy\n if qp_accuracy>accuracy_threshold:\n EPS = [i,p]\n else:\n # Simply consider the point as a UPS when not using\n # CutTools\n UPS = [i,p]\n\n except KeyboardInterrupt:\n interrupted = True\n break\n except IOError as e:\n if e.errno == errno.EINTR:\n if retry==100:\n logger.error(\"Failed hundred times consecutively because\"+\n \" of system call interruptions.\")\n raise\n else:\n logger.debug(\"Recovered from a system call interruption.\"+\\\n \"PSpoint #%i, Attempt #%i.\"%(i,retry+1))\n # Sleep for half a second. Safety measure.\n time.sleep(0.5) \n # We will retry this PS point\n retry = retry+1\n # Make sure the MadLoop process is properly killed\n try:\n StabChecker.kill()\n except Exception: \n pass\n StabChecker = subprocess.Popen(\\\n [pjoin(dir_path,'StabilityCheckDriver')], \n stdin=subprocess.PIPE, stdout=subprocess.PIPE, \n stderr=subprocess.PIPE, cwd=dir_path, bufsize=0)\n continue\n else:\n raise\n \n # Successfully processed a PS point so,\n # > reset retry\n retry = 0\n # > Update the while loop counter variable\n i=i+1\n \n # Update the returned statistics\n DP_stability.append(dp_dict)\n QP_stability.append(qp_dict)\n if not EPS is None:\n Exceptional_PS_points.append(EPS)\n if not UPS is None:\n Unstable_PS_points.append(UPS)\n\n if progress_bar!=None:\n progress_bar.finish()\n if time_info:\n logger.info('Finished check on %s.'%datetime.datetime.now().strftime(\\\n \"%d-%m-%Y %H:%M\"))\n\n # Close the StabChecker process.\n if not interrupted:\n StabChecker.stdin.write('y\\n'.encode())\n else:\n StabChecker.kill()\n \n #return_dict = {'DP_stability':DP_stability,\n # 'QP_stability':QP_stability,\n # 'Unstable_PS_points':Unstable_PS_points,\n # 'Exceptional_PS_points':Exceptional_PS_points}\n \n # Save the run for possible future use\n save_load_object.save_to_file(pjoin(dir_path,savefile%'_0'),\\\n return_dict['Stability'][tool_name])\n\n if interrupted:\n break\n \n return_dict['Process'] = matrix_element.get('processes')[0] if not \\\n reusing else matrix_element\n return return_dict\n\n @classmethod\n def get_me_value(cls, StabChecker, PSpoint, mode, hel=-1, mu_r=-1.0,\n split_orders=-1):\n \"\"\" This version of get_me_value is simplified for the purpose of this\n class. No compilation is necessary. The CT mode can be specified.\"\"\"\n\n # Reset the stdin with EOF character without closing it.\n StabChecker.stdin.write('\\x1a'.encode())\n StabChecker.stdin.write('1\\n'.encode())\n StabChecker.stdin.write(('%d\\n'%mode).encode()) \n StabChecker.stdin.write(('%s\\n'%PSpoint).encode())\n StabChecker.stdin.write(('%.16E\\n'%mu_r).encode()) \n StabChecker.stdin.write(('%d\\n'%hel).encode())\n StabChecker.stdin.write(('%d\\n'%split_orders).encode())\n \n\n try:\n #fsock = open('/tmp/log', 'w')\n while True:\n output = StabChecker.stdout.readline().decode()\n #fsock.write(output)\n if output != '':\n last_non_empty = output\n if output==' ##TAG#RESULT_START#TAG##\\n':\n break\n # Break if the checker has crashed for some reason.\n ret_code = StabChecker.poll()\n if not ret_code is None:\n output = StabChecker.stdout.readline().decode()\n if output != '':\n last_non_empty = output\n error = StabChecker.stderr.readline().decode()\n raise MadGraph5Error(\"The MadLoop stability checker crashed with return code = %d, and last output:\\n\\nstdout: %s\\nstderr: %s\\n\"%\\\n (ret_code, last_non_empty, error))\n \n res = \"\"\n while True:\n output = StabChecker.stdout.readline().decode()\n if output != '':\n last_non_empty = output\n if str(output)==' ##TAG#RESULT_STOP#TAG##\\n':\n break\n else:\n res += output\n ret_code = StabChecker.poll() \n if not ret_code is None:\n output = StabChecker.stdout.readline().decode()\n if output != '':\n last_non_empty = output\n error = StabChecker.stderr.readline().decode()\n raise MadGraph5Error(\"The MadLoop stability checker crashed with return code = %d, and last output:\\n\\nstdout: %s\\nstderr: %s\\n\"%\\\n (ret_code, last_non_empty, error))\n\n return cls.parse_check_output(res,format='tuple')[0][0]\n except IOError as e:\n logging.warning(\"Error while running MadLoop. Exception = %s\"%str(e))\n raise e \n\ndef evaluate_helicities(process, param_card = None, mg_root=\"\", \n cmass_scheme = False):\n \"\"\" Perform a python evaluation of the matrix element independently for\n all possible helicity configurations for a fixed number of points N and \n returns the average for each in the format [[hel_config, eval],...].\n This is used to determine what are the vanishing and dependent helicity \n configurations at generation time and accordingly setup the output.\n This is not yet implemented at LO.\"\"\"\n \n # Make sure this function is employed with a single process at LO\n assert isinstance(process,base_objects.Process)\n assert process.get('perturbation_couplings')==[]\n \n N_eval=50\n \n evaluator = MatrixElementEvaluator(process.get('model'), param_card,\n auth_skipping = False, reuse = True)\n \n amplitude = diagram_generation.Amplitude(process)\n matrix_element = helas_objects.HelasMatrixElement(amplitude,gen_color=False)\n \n cumulative_helEvals = []\n # Fill cumulative hel progressively with several evaluations of the ME.\n for i in range(N_eval):\n p, w_rambo = evaluator.get_momenta(process) \n helEvals = evaluator.evaluate_matrix_element(\\\n matrix_element, p = p, output = 'helEvals')['helEvals']\n if cumulative_helEvals==[]:\n cumulative_helEvals=copy.copy(helEvals)\n else:\n cumulative_helEvals = [[h[0],h[1]+helEvals[i][1]] for i, h in \\\n enumerate(cumulative_helEvals)]\n \n # Now normalize with the total number of evaluations\n cumulative_helEvals = [[h[0],h[1]/N_eval] for h in cumulative_helEvals]\n \n # As we are not in the context of a check command, so we clean the added\n # globals right away\n clean_added_globals(ADDED_GLOBAL)\n \n return cumulative_helEvals\n \ndef run_multiprocs_no_crossings(function, multiprocess, stored_quantities,\n opt=None, options=None):\n \"\"\"A wrapper function for running an iteration of a function over\n a multiprocess, without having to first create a process list\n (which makes a big difference for very large multiprocesses.\n stored_quantities is a dictionary for any quantities that we want\n to reuse between runs.\"\"\"\n \n model = multiprocess.get('model')\n isids = [leg.get('ids') for leg in multiprocess.get('legs') \\\n if not leg.get('state')]\n fsids = [leg.get('ids') for leg in multiprocess.get('legs') \\\n if leg.get('state')]\n # Create dictionary between isids and antiids, to speed up lookup\n id_anti_id_dict = {}\n for id in set(tuple(sum(isids+fsids, []))):\n id_anti_id_dict[id] = model.get_particle(id).get_anti_pdg_code()\n id_anti_id_dict[model.get_particle(id).get_anti_pdg_code()] = id \n sorted_ids = []\n results = []\n for is_prod in itertools.product(*isids):\n for fs_prod in itertools.product(*fsids):\n\n # Check if we have already checked the process\n if check_already_checked(is_prod, fs_prod, sorted_ids,\n multiprocess, model, id_anti_id_dict):\n continue\n # Generate process based on the selected ids\n process = multiprocess.get_process_with_legs(base_objects.LegList(\\\n [base_objects.Leg({'id': id, 'state':False}) for \\\n id in is_prod] + \\\n [base_objects.Leg({'id': id, 'state':True}) for \\\n id in fs_prod]))\n\n if opt is not None:\n if isinstance(opt, dict):\n try:\n value = opt[process.base_string()]\n except Exception:\n continue\n result = function(process, stored_quantities, value, options=options)\n else:\n result = function(process, stored_quantities, opt, options=options)\n else:\n result = function(process, stored_quantities, options=options)\n \n if result:\n results.append(result)\n\n return results\n\n#===============================================================================\n# Helper function check_already_checked\n#===============================================================================\n\ndef check_already_checked(is_ids, fs_ids, sorted_ids, process, model,\n id_anti_id_dict = {}):\n \"\"\"Check if process already checked, if so return True, otherwise add\n process and antiprocess to sorted_ids.\"\"\"\n\n # Check if process is already checked\n if id_anti_id_dict:\n is_ids = [id_anti_id_dict[id] for id in \\\n is_ids]\n else:\n is_ids = [model.get_particle(id).get_anti_pdg_code() for id in \\\n is_ids] \n\n ids = array.array('i', sorted(is_ids + list(fs_ids)) + \\\n [process.get('id')])\n\n if ids in sorted_ids:\n # We have already checked (a crossing of) this process\n return True\n\n # Add this process to tested_processes\n sorted_ids.append(ids)\n\n # Skip adding antiprocess below, since might be relevant too\n return False\n\n#===============================================================================\n# Generate a loop matrix element\n#===============================================================================\ndef generate_loop_matrix_element(process_definition, reuse, output_path=None,\n cmd = FakeInterface(), proc_name=None, loop_filter=None):\n \"\"\" Generate a loop matrix element from the process definition, and returns\n it along with the timing information dictionary.\n If reuse is True, it reuses the already output directory if found.\n There is the possibility of specifying the proc_name.\"\"\"\n\n assert isinstance(process_definition,\n (base_objects.ProcessDefinition,base_objects.Process))\n assert process_definition.get('perturbation_couplings')!=[]\n\n if isinstance(process_definition,base_objects.ProcessDefinition):\n if any(len(l.get('ids'))>1 for l in process_definition.get('legs')):\n raise InvalidCmd(\"This check can only be performed on single \"+\n \" processes. (i.e. without multiparticle labels).\")\n \n isids = [leg.get('ids')[0] for leg in process_definition.get('legs') \\\n if not leg.get('state')]\n fsids = [leg.get('ids')[0] for leg in process_definition.get('legs') \\\n if leg.get('state')]\n \n # Now generate a process based on the ProcessDefinition given in argument.\n process = process_definition.get_process(isids,fsids)\n else:\n process = process_definition\n \n if not output_path is None:\n root_path = output_path\n else:\n root_path = cmd._mgme_dir\n # By default, set all entries to None\n timing = {'Diagrams_generation': None,\n 'n_loops': None,\n 'HelasDiagrams_generation': None,\n 'n_loop_groups': None,\n 'n_loop_wfs': None,\n 'loop_wfs_ranks': None}\n\n if proc_name:\n proc_dir = pjoin(root_path,proc_name)\n else:\n proc_dir = pjoin(root_path,\"SAVED\"+temp_dir_prefix+\"_%s\"%(\n '_'.join(process.shell_string().split('_')[1:])))\n if reuse and os.path.isdir(proc_dir):\n logger.info(\"Reusing directory %s\"%str(proc_dir))\n # If reusing, return process instead of matrix element\n return timing, process\n \n logger.info(\"Generating p%s\"%process_definition.nice_string()[1:])\n\n start=time.time()\n try:\n amplitude = loop_diagram_generation.LoopAmplitude(process,\n loop_filter=loop_filter)\n except InvalidCmd:\n # An error about the sanity of the process can be thrown, in which case\n # we return nothing\n return time.time()-start, None \n if not amplitude.get('diagrams'):\n # Not matrix eleemnt for this process\n return time.time()-start, None\n\n # Make sure to disable loop_optimized_output when considering loop induced \n # processes\n loop_optimized_output = cmd.options['loop_optimized_output']\n timing['Diagrams_generation']=time.time()-start\n timing['n_loops']=len(amplitude.get('loop_diagrams'))\n start=time.time()\n \n matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude,\n optimized_output = loop_optimized_output,gen_color=True)\n # Here, the alohaModel used for analytica computations and for the aloha\n # subroutine output will be different, so that some optimization is lost.\n # But that is ok for the check functionality.\n matrix_element.compute_all_analytic_information()\n timing['HelasDiagrams_generation']=time.time()-start\n \n if loop_optimized_output:\n timing['n_loop_groups']=len(matrix_element.get('loop_groups'))\n lwfs=[l for ldiag in matrix_element.get_loop_diagrams() for l in \\\n ldiag.get('loop_wavefunctions')]\n timing['n_loop_wfs']=len(lwfs)\n timing['loop_wfs_ranks']=[]\n for rank in range(0,max([l.get_analytic_info('wavefunction_rank') \\\n for l in lwfs])+1):\n timing['loop_wfs_ranks'].append(\\\n len([1 for l in lwfs if \\\n l.get_analytic_info('wavefunction_rank')==rank]))\n\n return timing, matrix_element\n\n#===============================================================================\n# check profile for loop process (timings + stability in one go)\n#===============================================================================\ndef check_profile(process_definition, param_card = None,cuttools=\"\",tir={},\n options = {}, cmd = FakeInterface(),output_path=None,MLOptions={}):\n \"\"\"For a single loop process, check both its timings and then its stability\n in one go without regenerating it.\"\"\"\n\n if 'reuse' not in options:\n keep_folder=False\n else:\n keep_folder = options['reuse']\n\n model=process_definition.get('model')\n\n timing1, matrix_element = generate_loop_matrix_element(process_definition,\n keep_folder,output_path=output_path,cmd=cmd)\n reusing = isinstance(matrix_element, base_objects.Process)\n options['reuse'] = reusing\n myProfiler = LoopMatrixElementTimer(cuttools_dir=cuttools,tir_dir=tir,\n model=model, output_path=output_path, cmd=cmd)\n\n if not myProfiler.loop_optimized_output:\n MLoptions={}\n else:\n MLoptions=MLOptions\n\n timing2 = myProfiler.time_matrix_element(matrix_element, reusing, \n param_card, keep_folder=keep_folder,options=options,\n MLOptions = MLoptions) \n \n timing2['reduction_tool'] = MLoptions['MLReductionLib'][0]\n\n if timing2 == None:\n return None, None\n\n # The timing info is made of the merged two dictionaries\n timing = dict(list(timing1.items())+list(timing2.items()))\n stability = myProfiler.check_matrix_element_stability(matrix_element, \n options=options, infos_IN=timing,param_card=param_card,\n keep_folder = keep_folder,\n MLOptions = MLoptions)\n if stability == None:\n return None, None\n else:\n timing['loop_optimized_output']=myProfiler.loop_optimized_output\n stability['loop_optimized_output']=myProfiler.loop_optimized_output\n return timing, stability\n\n#===============================================================================\n# check_timing for loop processes\n#===============================================================================\ndef check_stability(process_definition, param_card = None,cuttools=\"\",tir={}, \n options=None,nPoints=100, output_path=None,\n cmd = FakeInterface(), MLOptions = {}):\n \"\"\"For a single loop process, give a detailed summary of the generation and\n execution timing.\"\"\"\n\n if \"reuse\" in options:\n reuse=options['reuse']\n else:\n reuse=False\n\n reuse=options['reuse']\n keep_folder = reuse\n model=process_definition.get('model')\n\n timing, matrix_element = generate_loop_matrix_element(process_definition,\n reuse, output_path=output_path, cmd=cmd)\n reusing = isinstance(matrix_element, base_objects.Process)\n options['reuse'] = reusing\n myStabilityChecker = LoopMatrixElementTimer(cuttools_dir=cuttools,tir_dir=tir,\n output_path=output_path,model=model,cmd=cmd)\n\n if not myStabilityChecker.loop_optimized_output:\n MLoptions = {}\n else:\n MLoptions = MLOptions\n # Make sure that the poles computation is disabled for COLLIER\n if 'COLLIERComputeUVpoles' not in MLoptions:\n MLoptions['COLLIERComputeUVpoles']=False\n if 'COLLIERComputeIRpoles' not in MLoptions:\n MLoptions['COLLIERComputeIRpoles']=False\n # Use high required accuracy in COLLIER's requirement if not specified\n if 'COLLIERRequiredAccuracy' not in MLoptions:\n MLoptions['COLLIERRequiredAccuracy']=1e-13\n # Use loop-direction switching as stability test if not specifed (more reliable)\n if 'COLLIERUseInternalStabilityTest' not in MLoptions:\n MLoptions['COLLIERUseInternalStabilityTest']=False\n # Finally we *must* forbid the use of COLLIER global cache here, because it\n # does not work with the way we call independently CTMODERun 1 and 2\n # with the StabilityChecker.\n MLoptions['COLLIERGlobalCache'] = 0\n\n if \"MLReductionLib\" not in MLOptions:\n MLoptions[\"MLReductionLib\"] = []\n if cuttools:\n MLoptions[\"MLReductionLib\"].extend([1])\n if \"iregi_dir\" in tir:\n MLoptions[\"MLReductionLib\"].extend([3])\n if \"pjfry_dir\" in tir:\n MLoptions[\"MLReductionLib\"].extend([2])\n if \"golem_dir\" in tir:\n MLoptions[\"MLReductionLib\"].extend([4])\n if \"samurai_dir\" in tir:\n MLoptions[\"MLReductionLib\"].extend([5])\n if \"ninja_dir\" in tir:\n MLoptions[\"MLReductionLib\"].extend([6])\n if \"collier_dir\" in tir:\n MLoptions[\"MLReductionLib\"].extend([7])\n\n stability = myStabilityChecker.check_matrix_element_stability(matrix_element, \n options=options,param_card=param_card, \n keep_folder=keep_folder,\n MLOptions=MLoptions)\n \n if stability == None:\n return None\n else:\n stability['loop_optimized_output']=myStabilityChecker.loop_optimized_output\n return stability\n\n#===============================================================================\n# check_timing for loop processes\n#===============================================================================\ndef check_timing(process_definition, param_card= None, cuttools=\"\",tir={},\n output_path=None, options={}, cmd = FakeInterface(),\n MLOptions = {}): \n \"\"\"For a single loop process, give a detailed summary of the generation and\n execution timing.\"\"\"\n\n if 'reuse' not in options:\n keep_folder = False\n else:\n keep_folder = options['reuse']\n model=process_definition.get('model')\n timing1, matrix_element = generate_loop_matrix_element(process_definition,\n keep_folder, output_path=output_path, cmd=cmd)\n reusing = isinstance(matrix_element, base_objects.Process)\n options['reuse'] = reusing\n myTimer = LoopMatrixElementTimer(cuttools_dir=cuttools,model=model,tir_dir=tir,\n output_path=output_path, cmd=cmd)\n\n if not myTimer.loop_optimized_output:\n MLoptions = {}\n else:\n MLoptions = MLOptions\n # Make sure that the poles computation is disabled for COLLIER\n if 'COLLIERComputeUVpoles' not in MLoptions:\n MLoptions['COLLIERComputeUVpoles']=False\n if 'COLLIERComputeIRpoles' not in MLoptions:\n MLoptions['COLLIERComputeIRpoles']=False\n # And the COLLIER global cache is active, if not specified\n if 'COLLIERGlobalCache' not in MLoptions:\n MLoptions['COLLIERGlobalCache']=-1\n # And time NINJA by default if not specified:\n if 'MLReductionLib' not in MLoptions or \\\n len(MLoptions['MLReductionLib'])==0:\n MLoptions['MLReductionLib'] = [6]\n \n timing2 = myTimer.time_matrix_element(matrix_element, reusing, param_card,\n keep_folder = keep_folder, options=options,\n MLOptions = MLoptions)\n \n if timing2 == None:\n return None\n else: \n # Return the merged two dictionaries\n res = dict(list(timing1.items())+list(timing2.items()))\n res['loop_optimized_output']=myTimer.loop_optimized_output\n res['reduction_tool'] = MLoptions['MLReductionLib'][0]\n return res\n\n#===============================================================================\n# check_processes\n#===============================================================================\ndef check_processes(processes, param_card = None, quick = [],cuttools=\"\",tir={},\n options=None, reuse = False, output_path=None, cmd = FakeInterface()):\n \"\"\"Check processes by generating them with all possible orderings\n of particles (which means different diagram building and Helas\n calls), and comparing the resulting matrix element values.\"\"\"\n\n cmass_scheme = cmd.options['complex_mass_scheme']\n if isinstance(processes, base_objects.ProcessDefinition):\n # Generate a list of unique processes\n # Extract IS and FS ids\n multiprocess = processes\n model = multiprocess.get('model')\n\n # Initialize matrix element evaluation\n if multiprocess.get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model,\n auth_skipping = True, reuse = False, cmd = cmd)\n else:\n evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir, \n model=model, auth_skipping = True,\n reuse = False, output_path=output_path, cmd = cmd)\n \n results = run_multiprocs_no_crossings(check_process,\n multiprocess,\n evaluator,\n quick,\n options)\n\n if \"used_lorentz\" not in evaluator.stored_quantities:\n evaluator.stored_quantities[\"used_lorentz\"] = []\n \n if multiprocess.get('perturbation_couplings')!=[] and not reuse:\n # Clean temporary folders created for the running of the loop processes\n clean_up(output_path)\n \n return results, evaluator.stored_quantities[\"used_lorentz\"]\n\n elif isinstance(processes, base_objects.Process):\n processes = base_objects.ProcessList([processes])\n elif isinstance(processes, base_objects.ProcessList):\n pass\n else:\n raise InvalidCmd(\"processes is of non-supported format\")\n\n if not processes:\n raise InvalidCmd(\"No processes given\")\n\n model = processes[0].get('model')\n\n # Initialize matrix element evaluation\n if processes[0].get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model, param_card,\n auth_skipping = True, reuse = False, cmd = cmd)\n else:\n evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools, tir_dir=tir,\n model=model,param_card=param_card,\n auth_skipping = True, reuse = False,\n output_path=output_path, cmd = cmd)\n\n # Keep track of tested processes, matrix elements, color and already\n # initiated Lorentz routines, to reuse as much as possible\n sorted_ids = []\n comparison_results = []\n\n # Check process by process\n for process in processes:\n \n # Check if we already checked process \n if check_already_checked([l.get('id') for l in process.get('legs') if \\\n not l.get('state')],\n [l.get('id') for l in process.get('legs') if \\\n l.get('state')],\n sorted_ids, process, model):\n continue\n # Get process result\n res = check_process(process, evaluator, quick, options)\n if res:\n comparison_results.append(res)\n\n if \"used_lorentz\" not in evaluator.stored_quantities:\n evaluator.stored_quantities[\"used_lorentz\"] = []\n \n if processes[0].get('perturbation_couplings')!=[] and not reuse:\n # Clean temporary folders created for the running of the loop processes\n clean_up(output_path) \n \n return comparison_results, evaluator.stored_quantities[\"used_lorentz\"]\n\ndef check_process(process, evaluator, quick, options):\n \"\"\"Check the helas calls for a process by generating the process\n using all different permutations of the process legs (or, if\n quick, use a subset of permutations), and check that the matrix\n element is invariant under this.\"\"\"\n\n model = process.get('model')\n\n # Ensure that leg numbers are set\n for i, leg in enumerate(process.get('legs')):\n leg.set('number', i+1)\n\n logger.info(\"Checking crossings of %s\" % \\\n process.nice_string().replace('Process:', 'process'))\n\n process_matrix_elements = []\n\n # For quick checks, only test twp permutations with leg \"1\" in\n # each position\n if quick:\n leg_positions = [[] for leg in process.get('legs')]\n quick = list(range(1,len(process.get('legs')) + 1))\n\n values = []\n\n # Now, generate all possible permutations of the legs\n number_checked=0\n for legs in itertools.permutations(process.get('legs')):\n \n order = [l.get('number') for l in legs]\n if quick:\n found_leg = True\n for num in quick:\n # Only test one permutation for each position of the\n # specified legs\n leg_position = legs.index([l for l in legs if \\\n l.get('number') == num][0])\n\n if not leg_position in leg_positions[num-1]:\n found_leg = False\n leg_positions[num-1].append(leg_position)\n\n if found_leg:\n continue\n \n # Further limit the total number of permutations checked to 3 for\n # loop processes.\n if quick and process.get('perturbation_couplings') and number_checked >3:\n continue\n\n legs = base_objects.LegList(legs)\n\n if order != list(range(1,len(legs) + 1)):\n logger.info(\"Testing permutation: %s\" % \\\n order)\n \n newproc = copy.copy(process)\n newproc.set('legs',legs)\n\n # Generate the amplitude for this process\n try:\n if newproc.get('perturbation_couplings')==[]:\n amplitude = diagram_generation.Amplitude(newproc)\n else:\n # Change the cutting method every two times.\n loop_base_objects.cutting_method = 'optimal' if \\\n number_checked%2 == 0 else 'default'\n amplitude = loop_diagram_generation.LoopAmplitude(newproc)\n except InvalidCmd:\n result=False\n else:\n result = amplitude.get('diagrams')\n # Make sure to re-initialize the cutting method to the original one.\n loop_base_objects.cutting_method = 'optimal'\n \n if not result:\n # This process has no diagrams; go to next process\n logging.info(\"No diagrams for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n break\n\n if order == list(range(1,len(legs) + 1)):\n # Generate phase space point to use\n p, w_rambo = evaluator.get_momenta(process, options)\n\n # Generate the HelasMatrixElement for the process\n if not isinstance(amplitude,loop_diagram_generation.LoopAmplitude):\n matrix_element = helas_objects.HelasMatrixElement(amplitude,\n gen_color=False)\n else:\n matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude,\n optimized_output=evaluator.loop_optimized_output)\n\n # The loop diagrams are always the same in the basis, so that the\n # LoopHelasMatrixElement always look alike. One needs to consider\n # the crossing no matter what then.\n if amplitude.get('process').get('has_born'):\n # But the born diagrams will change depending on the order of the\n # particles in the process definition\n if matrix_element in process_matrix_elements:\n # Exactly the same matrix element has been tested\n # for other permutation of same process\n continue\n\n process_matrix_elements.append(matrix_element)\n\n res = evaluator.evaluate_matrix_element(matrix_element, p = p, \n options=options)\n if res == None:\n break\n\n values.append(res[0])\n number_checked += 1\n\n # Check if we failed badly (1% is already bad) - in that\n # case done for this process\n if abs(max(values)) + abs(min(values)) > 0 and \\\n 2 * abs(max(values) - min(values)) / \\\n (abs(max(values)) + abs(min(values))) > 0.01:\n break\n \n # Check if process was interrupted\n if not values:\n return None\n\n # Done with this process. Collect values, and store\n # process and momenta\n diff = 0\n if abs(max(values)) + abs(min(values)) > 0:\n diff = 2* abs(max(values) - min(values)) / \\\n (abs(max(values)) + abs(min(values)))\n\n # be more tolerant with loop processes\n if process.get('perturbation_couplings'):\n passed = diff < 1.e-5\n else:\n passed = diff < 1.e-8 \n\n return {\"process\": process,\n \"momenta\": p,\n \"values\": values,\n \"difference\": diff,\n \"passed\": passed}\n\ndef clean_up(mg_root):\n \"\"\"Clean-up the possible left-over outputs from 'evaluate_matrix element' of\n the LoopMatrixEvaluator (when its argument proliferate is set to true). \"\"\"\n \n if mg_root is None:\n pass\n \n directories = misc.glob('%s*' % temp_dir_prefix, mg_root)\n if directories != []:\n logger.debug(\"Cleaning temporary %s* check runs.\"%temp_dir_prefix)\n for dir in directories:\n # For safety make sure that the directory contains a folder SubProcesses\n if os.path.isdir(pjoin(dir,'SubProcesses')):\n shutil.rmtree(dir)\n\ndef format_output(output,format):\n \"\"\" Return a string for 'output' with the specified format. If output is \n None, it returns 'NA'.\"\"\"\n \n if output!=None:\n return format%output\n else:\n return 'NA'\n\ndef output_profile(myprocdef, stability, timing, output_path, reusing=False):\n \"\"\"Present the results from a timing and stability consecutive check\"\"\"\n \n\n opt = timing['loop_optimized_output']\n\n text = 'Timing result for the '+('optimized' if opt else 'default')+\\\n ' output:\\n'\n text += output_timings(myprocdef,timing)\n\n text += '\\nStability result for the '+('optimized' if opt else 'default')+\\\n ' output:\\n'\n text += output_stability(stability,output_path, reusing=reusing)\n\n mode = 'optimized' if opt else 'default'\n logFilePath = pjoin(output_path, 'profile_%s_%s.log'\\\n %(mode,stability['Process'].shell_string())) \n logFile = open(logFilePath, 'w')\n logFile.write(text)\n logFile.close()\n logger.info('Log of this profile check was output to file %s'\\\n %str(logFilePath))\n return text\n\ndef output_stability(stability, output_path, reusing=False):\n \"\"\"Present the result of a stability check in a nice format.\n The full info is printed out in 'Stability_result_<proc_shell_string>.dat'\n under the MadGraph5_aMC@NLO root folder (output_path)\"\"\"\n \n def accuracy(eval_list):\n \"\"\" Compute the accuracy from different evaluations.\"\"\"\n return (2.0*(max(eval_list)-min(eval_list))/\n abs(max(eval_list)+min(eval_list)))\n \n def best_estimate(eval_list):\n \"\"\" Returns the best estimate from different evaluations.\"\"\"\n return (max(eval_list)+min(eval_list))/2.0\n \n def loop_direction_test_power(eval_list):\n \"\"\" Computes the loop direction test power P is computed as follow:\n P = accuracy(loop_dir_test) / accuracy(all_test)\n So that P is large if the loop direction test is effective.\n The tuple returned is (log(median(P)),log(min(P)),frac)\n where frac is the fraction of events with powers smaller than -3\n which means events for which the reading direction test shows an\n accuracy three digits higher than it really is according to the other\n tests.\"\"\"\n powers=[]\n for eval in eval_list:\n loop_dir_evals = [eval['CTModeA'],eval['CTModeB']]\n # CTModeA is the reference so we keep it in too\n other_evals = [eval[key] for key in eval.keys() if key not in \\\n ['CTModeB','Accuracy']]\n if accuracy(other_evals)!=0.0 and accuracy(loop_dir_evals)!=0.0:\n powers.append(accuracy(loop_dir_evals)/accuracy(other_evals))\n \n n_fail=0\n for p in powers:\n if (math.log(p)/math.log(10))<-3:\n n_fail+=1\n \n if len(powers)==0:\n return (None,None,None)\n\n return (math.log(median(powers))/math.log(10),\n math.log(min(powers))/math.log(10),\n n_fail/len(powers))\n \n def test_consistency(dp_eval_list, qp_eval_list):\n \"\"\" Computes the consistency test C from the DP and QP evaluations.\n C = accuracy(all_DP_test) / abs(best_QP_eval-best_DP_eval)\n So a consistent test would have C as close to one as possible.\n The tuple returned is (log(median(C)),log(min(C)),log(max(C)))\"\"\"\n consistencies = []\n for dp_eval, qp_eval in zip(dp_eval_list,qp_eval_list):\n dp_evals = [dp_eval[key] for key in dp_eval.keys() \\\n if key!='Accuracy']\n qp_evals = [qp_eval[key] for key in qp_eval.keys() \\\n if key!='Accuracy']\n if (abs(best_estimate(qp_evals)-best_estimate(dp_evals)))!=0.0 and \\\n accuracy(dp_evals)!=0.0:\n consistencies.append(accuracy(dp_evals)/(abs(\\\n best_estimate(qp_evals)-best_estimate(dp_evals))))\n\n if len(consistencies)==0:\n return (None,None,None)\n\n return (math.log(median(consistencies))/math.log(10),\n math.log(min(consistencies))/math.log(10),\n math.log(max(consistencies))/math.log(10))\n \n def median(orig_list):\n \"\"\" Find the median of a sorted float list. \"\"\"\n tmp=copy.copy(orig_list)\n tmp.sort()\n if len(tmp)%2==0:\n return (tmp[int((len(tmp)/2)-1)]+tmp[int(len(tmp)/2)])/2.0\n else:\n return tmp[int((len(tmp)-1)/2)]\n\n # Define shortcut\n f = format_output \n opt = stability['loop_optimized_output']\n\n mode = 'optimized' if opt else 'default'\n process = stability['Process']\n res_str = \"Stability checking for %s (%s mode)\\n\"\\\n %(process.nice_string()[9:],mode)\n\n logFile = open(pjoin(output_path, 'stability_%s_%s.log'\\\n %(mode,process.shell_string())), 'w')\n\n logFile.write('Stability check results\\n\\n')\n logFile.write(res_str)\n data_plot_dict={}\n accuracy_dict={}\n nPSmax=0\n max_acc=0.0\n min_acc=1.0\n if stability['Stability']:\n toolnames= list(stability['Stability'].keys())\n toolnamestr=\" | \".join(tn+\n ''.join([' ']*(10-len(tn))) for tn in toolnames)\n DP_stability = [[eval['Accuracy'] for eval in stab['DP_stability']] \\\n for key,stab in stability['Stability'].items()]\n med_dp_stab_str=\" | \".join([f(median(dp_stab),'%.2e ') for dp_stab in DP_stability])\n min_dp_stab_str=\" | \".join([f(min(dp_stab),'%.2e ') for dp_stab in DP_stability])\n max_dp_stab_str=\" | \".join([f(max(dp_stab),'%.2e ') for dp_stab in DP_stability])\n UPS = [stab['Unstable_PS_points'] for key,stab in stability['Stability'].items()]\n res_str_i = \"\\n= Tool (DoublePrec for CT)....... %s\\n\"%toolnamestr\n len_PS=[\"%i\"%len(evals)+\\\n ''.join([' ']*(10-len(\"%i\"%len(evals)))) for evals in DP_stability]\n len_PS_str=\" | \".join(len_PS)\n res_str_i += \"|= Number of PS points considered %s\\n\"%len_PS_str \n res_str_i += \"|= Median accuracy............... %s\\n\"%med_dp_stab_str\n res_str_i += \"|= Max accuracy.................. %s\\n\"%min_dp_stab_str\n res_str_i += \"|= Min accuracy.................. %s\\n\"%max_dp_stab_str\n pmedminlist=[]\n pfraclist=[]\n for key,stab in stability['Stability'].items():\n (pmed,pmin,pfrac)=loop_direction_test_power(stab['DP_stability'])\n ldtest_str = \"%s,%s\"%(f(pmed,'%.1f'),f(pmin,'%.1f'))\n pfrac_str = f(pfrac,'%.2e')\n pmedminlist.append(ldtest_str+''.join([' ']*(10-len(ldtest_str))))\n pfraclist.append(pfrac_str+''.join([' ']*(10-len(pfrac_str))))\n pmedminlist_str=\" | \".join(pmedminlist)\n pfraclist_str=\" | \".join(pfraclist)\n res_str_i += \"|= Overall DP loop_dir test power %s\\n\"%pmedminlist_str\n res_str_i += \"|= Fraction of evts with power<-3 %s\\n\"%pfraclist_str\n len_UPS=[\"%i\"%len(upup)+\\\n ''.join([' ']*(10-len(\"%i\"%len(upup)))) for upup in UPS]\n len_UPS_str=\" | \".join(len_UPS)\n res_str_i += \"|= Number of Unstable PS points %s\\n\"%len_UPS_str\n res_str_i += \\\n \"\"\"\n= Legend for the statistics of the stability tests. (all log below ar log_10)\nThe loop direction test power P is computed as follow:\n P = accuracy(loop_dir_test) / accuracy(all_other_test)\n So that log(P) is positive if the loop direction test is effective.\n The tuple printed out is (log(median(P)),log(min(P)))\n The consistency test C is computed when QP evaluations are available:\n C = accuracy(all_DP_test) / abs(best_QP_eval-best_DP_eval)\n So a consistent test would have log(C) as close to zero as possible.\n The tuple printed out is (log(median(C)),log(min(C)),log(max(C)))\\n\"\"\"\n res_str+=res_str_i\n for key in stability['Stability'].keys():\n toolname=key\n stab=stability['Stability'][key]\n DP_stability = [eval['Accuracy'] for eval in stab['DP_stability']]\n # Remember that an evaluation which did not require QP has an empty dictionary\n QP_stability = [eval['Accuracy'] if eval!={} else -1.0 for eval in \\\n stab['QP_stability']]\n nPS = len(DP_stability)\n if nPS>nPSmax:nPSmax=nPS\n UPS = stab['Unstable_PS_points']\n UPS_stability_DP = [DP_stability[U[0]] for U in UPS]\n UPS_stability_QP = [QP_stability[U[0]] for U in UPS]\n EPS = stab['Exceptional_PS_points']\n EPS_stability_DP = [DP_stability[E[0]] for E in EPS]\n EPS_stability_QP = [QP_stability[E[0]] for E in EPS]\n res_str_i = \"\"\n # Use nicer name for the XML tag in the log file\n xml_toolname = {'GOLEM95':'GOLEM','IREGI':'IREGI',\n 'CUTTOOLS':'CUTTOOLS','PJFRY++':'PJFRY',\n 'NINJA':'NINJA','SAMURAI':'SAMURAI',\n 'COLLIER':'COLLIER'}[toolname.upper()]\n if len(UPS)>0:\n res_str_i = \"\\nDetails of the %d/%d UPS encountered by %s\\n\"\\\n %(len(UPS),nPS,toolname)\n prefix = 'DP' if toolname=='CutTools' else '' \n res_str_i += \"|= %s Median inaccuracy.......... %s\\n\"\\\n %(prefix,f(median(UPS_stability_DP),'%.2e'))\n res_str_i += \"|= %s Max accuracy............... %s\\n\"\\\n %(prefix,f(min(UPS_stability_DP),'%.2e'))\n res_str_i += \"|= %s Min accuracy............... %s\\n\"\\\n %(prefix,f(max(UPS_stability_DP),'%.2e'))\n (pmed,pmin,pfrac)=loop_direction_test_power(\\\n [stab['DP_stability'][U[0]] for U in UPS])\n if toolname=='CutTools':\n res_str_i += \"|= UPS DP loop_dir test power.... %s,%s\\n\"\\\n %(f(pmed,'%.1f'),f(pmin,'%.1f'))\n res_str_i += \"|= UPS DP fraction with power<-3. %s\\n\"\\\n %f(pfrac,'%.2e')\n res_str_i += \"|= QP Median accuracy............ %s\\n\"\\\n %f(median(UPS_stability_QP),'%.2e')\n res_str_i += \"|= QP Max accuracy............... %s\\n\"\\\n %f(min(UPS_stability_QP),'%.2e')\n res_str_i += \"|= QP Min accuracy............... %s\\n\"\\\n %f(max(UPS_stability_QP),'%.2e')\n (pmed,pmin,pfrac)=loop_direction_test_power(\\\n [stab['QP_stability'][U[0]] for U in UPS])\n res_str_i += \"|= UPS QP loop_dir test power.... %s,%s\\n\"\\\n %(f(pmed,'%.1f'),f(pmin,'%.1f'))\n res_str_i += \"|= UPS QP fraction with power<-3. %s\\n\"%f(pfrac,'%.2e')\n (pmed,pmin,pmax)=test_consistency(\\\n [stab['DP_stability'][U[0]] for U in UPS],\n [stab['QP_stability'][U[0]] for U in UPS])\n res_str_i += \"|= DP vs QP stab test consistency %s,%s,%s\\n\"\\\n %(f(pmed,'%.1f'),f(pmin,'%.1f'),f(pmax,'%.1f'))\n if len(EPS)==0: \n res_str_i += \"= Number of Exceptional PS points : 0\\n\"\n if len(EPS)>0:\n res_str_i = \"\\nDetails of the %d/%d EPS encountered by %s\\n\"\\\n %(len(EPS),nPS,toolname)\n res_str_i += \"|= DP Median accuracy............ %s\\n\"\\\n %f(median(EPS_stability_DP),'%.2e')\n res_str_i += \"|= DP Max accuracy............... %s\\n\"\\\n %f(min(EPS_stability_DP),'%.2e')\n res_str_i += \"|= DP Min accuracy............... %s\\n\"\\\n %f(max(EPS_stability_DP),'%.2e')\n pmed,pmin,pfrac=loop_direction_test_power(\\\n [stab['DP_stability'][E[0]] for E in EPS])\n res_str_i += \"|= EPS DP loop_dir test power.... %s,%s\\n\"\\\n %(f(pmed,'%.1f'),f(pmin,'%.1f'))\n res_str_i += \"|= EPS DP fraction with power<-3. %s\\n\"\\\n %f(pfrac,'%.2e')\n res_str_i += \"|= QP Median accuracy............ %s\\n\"\\\n %f(median(EPS_stability_QP),'%.2e')\n res_str_i += \"|= QP Max accuracy............... %s\\n\"\\\n %f(min(EPS_stability_QP),'%.2e')\n res_str_i += \"|= QP Min accuracy............... %s\\n\"\\\n %f(max(EPS_stability_QP),'%.2e')\n pmed,pmin,pfrac=loop_direction_test_power(\\\n [stab['QP_stability'][E[0]] for E in EPS])\n res_str_i += \"|= EPS QP loop_dir test power.... %s,%s\\n\"\\\n %(f(pmed,'%.1f'),f(pmin,'%.1f'))\n res_str_i += \"|= EPS QP fraction with power<-3. %s\\n\"%f(pfrac,'%.2e')\n\n logFile.write(res_str_i)\n\n if len(EPS)>0:\n logFile.write('\\nFull details of the %i EPS encountered by %s.\\n'\\\n %(len(EPS),toolname))\n logFile.write('<EPS_data reduction=%s>\\n'%xml_toolname.upper())\n for i, eps in enumerate(EPS):\n logFile.write('\\nEPS #%i\\n'%(i+1))\n logFile.write('\\n'.join([' '+' '.join(['%.16E'%pi for pi in p]) \\\n for p in eps[1]]))\n logFile.write('\\n DP accuracy : %.4e\\n'%DP_stability[eps[0]])\n logFile.write(' QP accuracy : %.4e\\n'%QP_stability[eps[0]])\n logFile.write('</EPS_data>\\n')\n if len(UPS)>0:\n logFile.write('\\nFull details of the %i UPS encountered by %s.\\n'\\\n %(len(UPS),toolname))\n logFile.write('<UPS_data reduction=%s>\\n'%xml_toolname.upper())\n for i, ups in enumerate(UPS):\n logFile.write('\\nUPS #%i\\n'%(i+1))\n logFile.write('\\n'.join([' '+' '.join(['%.16E'%pi for pi in p]) \\\n for p in ups[1]]))\n logFile.write('\\n DP accuracy : %.4e\\n'%DP_stability[ups[0]])\n logFile.write(' QP accuracy : %.4e\\n'%QP_stability[ups[0]])\n logFile.write('</UPS_data>\\n')\n\n logFile.write('\\nData entries for the stability plot.\\n')\n logFile.write('First row is a maximal accuracy delta, second is the '+\\\n 'fraction of events with DP accuracy worse than delta.\\n')\n logFile.write('<plot_data reduction=%s>\\n'%xml_toolname.upper())\n # Set the x-range so that it spans [10**-17,10**(min_digit_accuracy)]\n if max(DP_stability)>0.0:\n min_digit_acc=int(math.log(max(DP_stability))/math.log(10))\n if min_digit_acc>=0:\n min_digit_acc = min_digit_acc+1\n accuracies=[10**(-17+(i/5.0)) for i in range(5*(17+min_digit_acc)+1)]\n else:\n logFile.writelines('%.4e %.4e\\n'%(accuracies[i], 0.0) for i in \\\n range(len(accuracies))) \n logFile.write('</plot_data>\\n')\n res_str_i += '\\nPerfect accuracy over all the trial PS points. No plot'+\\\n ' is output then.'\n logFile.write('Perfect accuracy over all the trial PS points.')\n res_str +=res_str_i\n continue\n\n accuracy_dict[toolname]=accuracies\n if max(accuracies) > max_acc: max_acc=max(accuracies)\n if min(accuracies) < min_acc: min_acc=min(accuracies)\n data_plot=[]\n for acc in accuracies:\n data_plot.append(float(len([d for d in DP_stability if d>acc]))\\\n /float(len(DP_stability)))\n data_plot_dict[toolname]=data_plot\n \n logFile.writelines('%.4e %.4e\\n'%(accuracies[i], data_plot[i]) for i in \\\n range(len(accuracies)))\n logFile.write('</plot_data>\\n')\n logFile.write('\\nList of accuracies recorded for the %i evaluations with %s\\n'\\\n %(nPS,toolname))\n logFile.write('First row is DP, second is QP (if available).\\n\\n')\n logFile.write('<accuracies reduction=%s>\\n'%xml_toolname.upper())\n logFile.writelines('%.4e '%DP_stability[i]+('NA\\n' if QP_stability[i]==-1.0 \\\n else '%.4e\\n'%QP_stability[i]) for i in range(nPS))\n logFile.write('</accuracies>\\n')\n res_str+=res_str_i\n logFile.close()\n res_str += \"\\n= Stability details of the run are output to the file\"+\\\n \" stability_%s_%s.log\\n\"%(mode,process.shell_string())\n \n # Bypass the plotting if the madgraph logger has a FileHandler (like it is\n # done in the check command acceptance test) because in this case it makes\n # no sense to plot anything.\n if any(isinstance(handler,logging.FileHandler) for handler in \\\n logging.getLogger('madgraph').handlers):\n return res_str\n\n try:\n import matplotlib.pyplot as plt\n colorlist=['b','r','g','y','m','c','k']\n for i,key in enumerate(data_plot_dict.keys()):\n color=colorlist[i]\n data_plot=data_plot_dict[key]\n accuracies=accuracy_dict[key]\n plt.plot(accuracies, data_plot, color=color, marker='', linestyle='-',\\\n label=key)\n plt.axis([min_acc,max_acc,\\\n 10**(-int(math.log(nPSmax-0.5)/math.log(10))-1), 1])\n plt.yscale('log')\n plt.xscale('log')\n plt.title('Stability plot for %s (%s mode, %d points)'%\\\n (process.nice_string()[9:],mode,nPSmax))\n plt.ylabel('Fraction of events')\n plt.xlabel('Maximal precision')\n plt.legend()\n if not reusing:\n logger.info('Some stability statistics will be displayed once you '+\\\n 'close the plot window')\n plt.show()\n else:\n fig_output_file = str(pjoin(output_path, \n 'stability_plot_%s_%s.png'%(mode,process.shell_string())))\n logger.info('Stability plot output to file %s. '%fig_output_file)\n plt.savefig(fig_output_file)\n return res_str\n except Exception as e:\n if isinstance(e, ImportError):\n res_str += \"\\n= Install matplotlib to get a \"+\\\n \"graphical display of the results of this check.\"\n else:\n res_str += \"\\n= Could not produce the stability plot because of \"+\\\n \"the following error: %s\"%str(e)\n return res_str\n \ndef output_timings(process, timings):\n \"\"\"Present the result of a timings check in a nice format \"\"\"\n \n # Define shortcut\n f = format_output\n loop_optimized_output = timings['loop_optimized_output']\n reduction_tool = bannermod.MadLoopParam._ID_reduction_tool_map[\n timings['reduction_tool']]\n \n res_str = \"%s \\n\"%process.nice_string()\n try:\n gen_total = timings['HELAS_MODEL_compilation']+\\\n timings['HelasDiagrams_generation']+\\\n timings['Process_output']+\\\n timings['Diagrams_generation']+\\\n timings['Process_compilation']+\\\n timings['Initialization']\n except TypeError:\n gen_total = None\n res_str += \"\\n= Generation time total...... ========== %s\\n\"%f(gen_total,'%.3gs')\n res_str += \"|= Diagrams generation....... %s\\n\"\\\n %f(timings['Diagrams_generation'],'%.3gs')\n res_str += \"|= Helas Diagrams generation. %s\\n\"\\\n %f(timings['HelasDiagrams_generation'],'%.3gs')\n res_str += \"|= Process output............ %s\\n\"\\\n %f(timings['Process_output'],'%.3gs')\n res_str += \"|= HELAS+model compilation... %s\\n\"\\\n %f(timings['HELAS_MODEL_compilation'],'%.3gs')\n res_str += \"|= Process compilation....... %s\\n\"\\\n %f(timings['Process_compilation'],'%.3gs')\n res_str += \"|= Initialization............ %s\\n\"\\\n %f(timings['Initialization'],'%.3gs')\n\n res_str += \"\\n= Reduction tool tested...... %s\\n\"%reduction_tool\n res_str += \"\\n= Helicity sum time / PSpoint ========== %.3gms\\n\"\\\n %(timings['run_unpolarized_total']*1000.0)\n if loop_optimized_output:\n coef_time=timings['run_unpolarized_coefs']*1000.0\n loop_time=(timings['run_unpolarized_total']-\\\n timings['run_unpolarized_coefs'])*1000.0\n total=coef_time+loop_time\n res_str += \"|= Coefs. computation time... %.3gms (%d%%)\\n\"\\\n %(coef_time,int(round(100.0*coef_time/total)))\n res_str += \"|= Loop evaluation time...... %.3gms (%d%%)\\n\"\\\n %(loop_time,int(round(100.0*loop_time/total)))\n res_str += \"\\n= One helicity time / PSpoint ========== %.3gms\\n\"\\\n %(timings['run_polarized_total']*1000.0)\n if loop_optimized_output:\n coef_time=timings['run_polarized_coefs']*1000.0\n loop_time=(timings['run_polarized_total']-\\\n timings['run_polarized_coefs'])*1000.0\n total=coef_time+loop_time \n res_str += \"|= Coefs. computation time... %.3gms (%d%%)\\n\"\\\n %(coef_time,int(round(100.0*coef_time/total)))\n res_str += \"|= Loop evaluation time...... %.3gms (%d%%)\\n\"\\\n %(loop_time,int(round(100.0*loop_time/total)))\n res_str += \"\\n= Miscellaneous ========================\\n\"\n res_str += \"|= Number of hel. computed... %s/%s\\n\"\\\n %(f(timings['n_contrib_hel'],'%d'),f(timings['n_tot_hel'],'%d'))\n res_str += \"|= Number of loop diagrams... %s\\n\"%f(timings['n_loops'],'%d')\n if loop_optimized_output:\n res_str += \"|= Number of loop groups..... %s\\n\"\\\n %f(timings['n_loop_groups'],'%d')\n res_str += \"|= Number of loop wfs........ %s\\n\"\\\n %f(timings['n_loop_wfs'],'%d')\n if timings['loop_wfs_ranks']!=None:\n for i, r in enumerate(timings['loop_wfs_ranks']):\n res_str += \"||= # of loop wfs of rank %d.. %d\\n\"%(i,r)\n res_str += \"|= Loading time (Color data). ~%.3gms\\n\"\\\n %(timings['Booting_time']*1000.0)\n res_str += \"|= Maximum RAM usage (rss)... %s\\n\"\\\n %f(float(timings['ram_usage']/1000.0),'%.3gMb') \n res_str += \"\\n= Output disk size =====================\\n\"\n res_str += \"|= Source directory sources.. %s\\n\"%f(timings['du_source'],'%sb')\n res_str += \"|= Process sources........... %s\\n\"%f(timings['du_process'],'%sb') \n res_str += \"|= Color and helicity data... %s\\n\"%f(timings['du_color'],'%sb')\n res_str += \"|= Executable size........... %s\\n\"%f(timings['du_exe'],'%sb')\n \n return res_str\n\ndef output_comparisons(comparison_results):\n \"\"\"Present the results of a comparison in a nice list format\n mode short: return the number of fail process\n \"\"\" \n proc_col_size = 17\n pert_coupl = comparison_results[0]['process']['perturbation_couplings']\n if pert_coupl:\n process_header = \"Process [virt=\"+\" \".join(pert_coupl)+\"]\"\n else:\n process_header = \"Process\"\n\n if len(process_header) + 1 > proc_col_size:\n proc_col_size = len(process_header) + 1\n\n for proc in comparison_results:\n if len(proc['process'].base_string()) + 1 > proc_col_size:\n proc_col_size = len(proc['process'].base_string()) + 1\n\n col_size = 18\n\n pass_proc = 0\n fail_proc = 0\n no_check_proc = 0\n\n failed_proc_list = []\n no_check_proc_list = []\n\n res_str = fixed_string_length(process_header, proc_col_size) + \\\n fixed_string_length(\"Min element\", col_size) + \\\n fixed_string_length(\"Max element\", col_size) + \\\n fixed_string_length(\"Relative diff.\", col_size) + \\\n \"Result\"\n\n for result in comparison_results:\n proc = result['process'].base_string()\n values = result['values']\n \n if len(values) <= 1:\n res_str += '\\n' + fixed_string_length(proc, proc_col_size) + \\\n \" * No permutations, process not checked *\" \n no_check_proc += 1\n no_check_proc_list.append(result['process'].nice_string())\n continue\n\n passed = result['passed']\n\n res_str += '\\n' + fixed_string_length(proc, proc_col_size) + \\\n fixed_string_length(\"%1.10e\" % min(values), col_size) + \\\n fixed_string_length(\"%1.10e\" % max(values), col_size) + \\\n fixed_string_length(\"%1.10e\" % result['difference'],\n col_size)\n if passed:\n pass_proc += 1\n res_str += \"Passed\"\n else:\n fail_proc += 1\n failed_proc_list.append(result['process'].nice_string())\n res_str += \"Failed\"\n\n res_str += \"\\nSummary: %i/%i passed, %i/%i failed\" % \\\n (pass_proc, pass_proc + fail_proc,\n fail_proc, pass_proc + fail_proc)\n\n if fail_proc != 0:\n res_str += \"\\nFailed processes: %s\" % ', '.join(failed_proc_list)\n if no_check_proc != 0:\n res_str += \"\\nNot checked processes: %s\" % ', '.join(no_check_proc_list)\n\n return res_str\n\ndef fixed_string_length(mystr, length):\n \"\"\"Helper function to fix the length of a string by cutting it \n or adding extra space.\"\"\"\n \n if len(mystr) > length:\n return mystr[0:length]\n else:\n return mystr + \" \" * (length - len(mystr))\n \n\n#===============================================================================\n# check_gauge\n#===============================================================================\ndef check_gauge(processes, param_card = None,cuttools=\"\", tir={}, reuse = False, \n options=None, output_path=None, cmd = FakeInterface()):\n \"\"\"Check gauge invariance of the processes by using the BRS check.\n For one of the massless external bosons (e.g. gluon or photon), \n replace the polarization vector (epsilon_mu) with its momentum (p_mu)\n \"\"\"\n cmass_scheme = cmd.options['complex_mass_scheme']\n if isinstance(processes, base_objects.ProcessDefinition):\n # Generate a list of unique processes\n # Extract IS and FS ids\n multiprocess = processes\n\n model = multiprocess.get('model') \n # Initialize matrix element evaluation\n if multiprocess.get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model, param_card,cmd= cmd,\n auth_skipping = True, reuse = False)\n else:\n evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,\n cmd=cmd,model=model, param_card=param_card,\n auth_skipping = False, reuse = False,\n output_path=output_path)\n\n if not cmass_scheme and multiprocess.get('perturbation_couplings')==[]:\n # Set all widths to zero for gauge check\n logger.info('Set All width to zero for non complex mass scheme checks')\n for particle in evaluator.full_model.get('particles'):\n if particle.get('width') != 'ZERO':\n evaluator.full_model.get('parameter_dict')[particle.get('width')] = 0.\n results = run_multiprocs_no_crossings(check_gauge_process,\n multiprocess,\n evaluator,\n options=options\n )\n \n if multiprocess.get('perturbation_couplings')!=[] and not reuse:\n # Clean temporary folders created for the running of the loop processes\n clean_up(output_path)\n \n return results\n\n elif isinstance(processes, base_objects.Process):\n processes = base_objects.ProcessList([processes])\n elif isinstance(processes, base_objects.ProcessList):\n pass\n else:\n raise InvalidCmd(\"processes is of non-supported format\")\n\n assert processes, \"No processes given\"\n\n model = processes[0].get('model')\n\n # Initialize matrix element evaluation\n if processes[0].get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model, param_card,\n auth_skipping = True, reuse = False, \n cmd = cmd)\n else:\n evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,\n model=model, param_card=param_card,\n auth_skipping = False, reuse = False,\n output_path=output_path, cmd = cmd)\n comparison_results = []\n comparison_explicit_flip = []\n\n # For each process, make sure we have set up leg numbers:\n for process in processes:\n # Check if we already checked process\n #if check_already_checked([l.get('id') for l in process.get('legs') if \\\n # not l.get('state')],\n ## [l.get('id') for l in process.get('legs') if \\\n # l.get('state')],\n # sorted_ids, process, model):\n # continue\n \n # Get process result\n result = check_gauge_process(process, evaluator,options=options)\n if result:\n comparison_results.append(result)\n\n if processes[0].get('perturbation_couplings')!=[] and not reuse:\n # Clean temporary folders created for the running of the loop processes\n clean_up(output_path)\n \n return comparison_results\n\n\ndef check_gauge_process(process, evaluator, options=None):\n \"\"\"Check gauge invariance for the process, unless it is already done.\"\"\"\n\n model = process.get('model')\n\n # Check that there are massless vector bosons in the process\n found_gauge = False\n for i, leg in enumerate(process.get('legs')):\n part = model.get_particle(leg.get('id'))\n if part.get('spin') == 3 and part.get('mass').lower() == 'zero':\n found_gauge = True\n break\n if not found_gauge:\n logger.info(\"No ward identity for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n # This process can't be checked\n return None\n\n for i, leg in enumerate(process.get('legs')):\n leg.set('number', i+1)\n\n logger.info(\"Checking ward identities for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n\n legs = process.get('legs')\n # Generate a process with these legs\n # Generate the amplitude for this process\n try:\n if process.get('perturbation_couplings')==[]:\n amplitude = diagram_generation.Amplitude(process)\n else:\n amplitude = loop_diagram_generation.LoopAmplitude(process)\n except InvalidCmd:\n logging.info(\"No diagrams for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n return None \n if not amplitude.get('diagrams'):\n # This process has no diagrams; go to next process\n logging.info(\"No diagrams for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n return None\n # Generate the HelasMatrixElement for the process\n if not isinstance(amplitude,loop_diagram_generation.LoopAmplitude):\n matrix_element = helas_objects.HelasMatrixElement(amplitude,\n gen_color = False)\n else:\n matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude,\n optimized_output=evaluator.loop_optimized_output)\n\n #p, w_rambo = evaluator.get_momenta(process)\n\n# MLOptions = {'ImprovePS':True,'ForceMP':True}\n\n# brsvalue = evaluator.evaluate_matrix_element(matrix_element, gauge_check = True,\n# output='jamp',MLOptions=MLOptions, options=options)\n\n brsvalue = evaluator.evaluate_matrix_element(matrix_element, gauge_check = True,\n output='jamp', options=options)\n\n if not isinstance(amplitude,loop_diagram_generation.LoopAmplitude):\n matrix_element = helas_objects.HelasMatrixElement(amplitude,\n gen_color = False)\n \n mvalue = evaluator.evaluate_matrix_element(matrix_element, gauge_check = False,\n output='jamp', options=options)\n \n if mvalue and mvalue['m2']:\n return {'process':process,'value':mvalue,'brs':brsvalue}\n\ndef output_gauge(comparison_results, output='text'):\n \"\"\"Present the results of a comparison in a nice list format\"\"\"\n\n proc_col_size = 17\n \n pert_coupl = comparison_results[0]['process']['perturbation_couplings']\n \n # Of course, be more tolerant for loop processes\n if pert_coupl:\n threshold=1e-5\n else:\n threshold=1e-10\n \n if pert_coupl:\n process_header = \"Process [virt=\"+\" \".join(pert_coupl)+\"]\"\n else:\n process_header = \"Process\"\n\n if len(process_header) + 1 > proc_col_size:\n proc_col_size = len(process_header) + 1\n\n for one_comp in comparison_results:\n proc = one_comp['process'].base_string()\n mvalue = one_comp['value']\n brsvalue = one_comp['brs']\n if len(proc) + 1 > proc_col_size:\n proc_col_size = len(proc) + 1\n\n col_size = 18\n\n pass_proc = 0\n fail_proc = 0\n\n failed_proc_list = []\n no_check_proc_list = []\n\n res_str = fixed_string_length(process_header, proc_col_size) + \\\n fixed_string_length(\"matrix\", col_size) + \\\n fixed_string_length(\"BRS\", col_size) + \\\n fixed_string_length(\"ratio\", col_size) + \\\n \"Result\"\n\n for one_comp in comparison_results:\n proc = one_comp['process'].base_string()\n mvalue = one_comp['value']\n brsvalue = one_comp['brs']\n ratio = (abs(brsvalue['m2'])/abs(mvalue['m2']))\n res_str += '\\n' + fixed_string_length(proc, proc_col_size) + \\\n fixed_string_length(\"%1.10e\" % mvalue['m2'], col_size)+ \\\n fixed_string_length(\"%1.10e\" % brsvalue['m2'], col_size)+ \\\n fixed_string_length(\"%1.10e\" % ratio, col_size)\n \n if ratio > threshold:\n fail_proc += 1\n proc_succeed = False\n failed_proc_list.append(proc)\n res_str += \"Failed\"\n else:\n pass_proc += 1\n proc_succeed = True\n res_str += \"Passed\"\n\n #check all the JAMP\n # loop over jamp\n # This is not available for loop processes where the jamp list returned\n # is empty.\n if len(mvalue['jamp'])!=0:\n for k in range(len(mvalue['jamp'][0])):\n m_sum = 0\n brs_sum = 0\n # loop over helicity\n for j in range(len(mvalue['jamp'])):\n #values for the different lorentz boost\n m_sum += abs(mvalue['jamp'][j][k])**2\n brs_sum += abs(brsvalue['jamp'][j][k])**2 \n \n # Compare the different helicity \n if not m_sum:\n continue\n ratio = abs(brs_sum) / abs(m_sum)\n \n tmp_str = '\\n' + fixed_string_length(' JAMP %s'%k , proc_col_size) + \\\n fixed_string_length(\"%1.10e\" % m_sum, col_size) + \\\n fixed_string_length(\"%1.10e\" % brs_sum, col_size) + \\\n fixed_string_length(\"%1.10e\" % ratio, col_size) \n \n if ratio > 1e-15:\n if not len(failed_proc_list) or failed_proc_list[-1] != proc:\n fail_proc += 1\n pass_proc -= 1\n failed_proc_list.append(proc)\n res_str += tmp_str + \"Failed\"\n elif not proc_succeed:\n res_str += tmp_str + \"Passed\"\n\n\n res_str += \"\\nSummary: %i/%i passed, %i/%i failed\" % \\\n (pass_proc, pass_proc + fail_proc,\n fail_proc, pass_proc + fail_proc)\n\n if fail_proc != 0:\n res_str += \"\\nFailed processes: %s\" % ', '.join(failed_proc_list)\n\n if output=='text':\n return res_str\n else:\n return fail_proc\n#===============================================================================\n# check_lorentz\n#===============================================================================\ndef check_lorentz(processes, param_card = None,cuttools=\"\", tir={}, options=None, \\\n reuse = False, output_path=None, cmd = FakeInterface()):\n \"\"\" Check if the square matrix element (sum over helicity) is lorentz \n invariant by boosting the momenta with different value.\"\"\"\n\n cmass_scheme = cmd.options['complex_mass_scheme']\n if isinstance(processes, base_objects.ProcessDefinition):\n # Generate a list of unique processes\n # Extract IS and FS ids\n multiprocess = processes\n model = multiprocess.get('model')\n # Initialize matrix element evaluation\n if multiprocess.get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model,\n cmd= cmd, auth_skipping = False, reuse = True)\n else:\n evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,\n model=model, auth_skipping = False, reuse = True,\n output_path=output_path, cmd = cmd)\n\n if not cmass_scheme and processes.get('perturbation_couplings')==[]:\n # Set all widths to zero for lorentz check\n logger.info('Set All width to zero for non complex mass scheme checks')\n for particle in evaluator.full_model.get('particles'):\n if particle.get('width') != 'ZERO':\n evaluator.full_model.get('parameter_dict')[\\\n particle.get('width')] = 0.\n\n results = run_multiprocs_no_crossings(check_lorentz_process,\n multiprocess,\n evaluator,\n options=options)\n\n if multiprocess.get('perturbation_couplings')!=[] and not reuse:\n # Clean temporary folders created for the running of the loop processes\n clean_up(output_path)\n \n return results\n \n elif isinstance(processes, base_objects.Process):\n processes = base_objects.ProcessList([processes])\n elif isinstance(processes, base_objects.ProcessList):\n pass\n else:\n raise InvalidCmd(\"processes is of non-supported format\")\n\n assert processes, \"No processes given\"\n\n model = processes[0].get('model')\n\n # Initialize matrix element evaluation\n if processes[0].get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model, param_card,\n auth_skipping = False, reuse = True, \n cmd=cmd)\n else:\n evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools, tir_dir=tir,\n model=model,param_card=param_card,\n auth_skipping = False, reuse = True,\n output_path=output_path, cmd = cmd)\n\n comparison_results = []\n\n # For each process, make sure we have set up leg numbers:\n for process in processes:\n # Check if we already checked process\n #if check_already_checked([l.get('id') for l in process.get('legs') if \\\n # not l.get('state')],\n # [l.get('id') for l in process.get('legs') if \\\n # l.get('state')],\n # sorted_ids, process, model):\n # continue\n \n # Get process result\n result = check_lorentz_process(process, evaluator,options=options)\n if result:\n comparison_results.append(result)\n\n if processes[0].get('perturbation_couplings')!=[] and not reuse:\n # Clean temporary folders created for the running of the loop processes\n clean_up(output_path)\n\n return comparison_results\n\n\ndef check_lorentz_process(process, evaluator,options=None):\n \"\"\"Check gauge invariance for the process, unless it is already done.\"\"\"\n\n amp_results = []\n model = process.get('model')\n\n for i, leg in enumerate(process.get('legs')):\n leg.set('number', i+1)\n\n logger.info(\"Checking lorentz transformations for %s\" % \\\n process.nice_string().replace('Process:', 'process'))\n\n legs = process.get('legs')\n # Generate a process with these legs\n # Generate the amplitude for this process\n try:\n if process.get('perturbation_couplings')==[]:\n amplitude = diagram_generation.Amplitude(process)\n else:\n amplitude = loop_diagram_generation.LoopAmplitude(process)\n except InvalidCmd:\n logging.info(\"No diagrams for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n return None\n \n if not amplitude.get('diagrams'):\n # This process has no diagrams; go to next process\n logging.info(\"No diagrams for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n return None\n\n # Generate the HelasMatrixElement for the process\n p, w_rambo = evaluator.get_momenta(process, options)\n\n # Generate the HelasMatrixElement for the process\n if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):\n matrix_element = helas_objects.HelasMatrixElement(amplitude,\n gen_color = True)\n else:\n matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude,\n optimized_output = evaluator.loop_optimized_output)\n\n MLOptions = {'ImprovePS':True,'ForceMP':True}\n if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):\n data = evaluator.evaluate_matrix_element(matrix_element, p=p, output='jamp',\n auth_skipping = True, options=options)\n else:\n data = evaluator.evaluate_matrix_element(matrix_element, p=p, output='jamp',\n auth_skipping = True, PS_name = 'original', MLOptions=MLOptions,\n options = options)\n\n if data and data['m2']:\n if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):\n results = [data]\n else:\n results = [('Original evaluation',data)]\n else:\n return {'process':process, 'results':'pass'}\n\n # The boosts are not precise enough for the loop evaluations and one need the\n # fortran improve_ps function of MadLoop to work. So we only consider the\n # boosts along the z directions for loops or simple rotations.\n if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):\n for boost in range(1,4):\n boost_p = boost_momenta(p, boost)\n results.append(evaluator.evaluate_matrix_element(matrix_element,\n p=boost_p,output='jamp'))\n else:\n # We only consider the rotations around the z axis so to have the\n boost_p = boost_momenta(p, 3)\n results.append(('Z-axis boost',\n evaluator.evaluate_matrix_element(matrix_element, options=options,\n p=boost_p, PS_name='zBoost', output='jamp',MLOptions = MLOptions)))\n # We add here also the boost along x and y for reference. In the output\n # of the check, it is now clearly stated that MadLoop improve_ps script\n # will not work for them. The momenta read from event file are not\n # precise enough so these x/yBoost checks are omitted.\n if not options['events']:\n boost_p = boost_momenta(p, 1)\n results.append(('X-axis boost',\n evaluator.evaluate_matrix_element(matrix_element, options=options,\n p=boost_p, PS_name='xBoost', output='jamp',MLOptions = MLOptions)))\n boost_p = boost_momenta(p, 2)\n results.append(('Y-axis boost',\n evaluator.evaluate_matrix_element(matrix_element,options=options,\n p=boost_p, PS_name='yBoost', output='jamp',MLOptions = MLOptions)))\n # We only consider the rotations around the z axis so to have the \n # improve_ps fortran routine work.\n rot_p = [[pm[0],-pm[2],pm[1],pm[3]] for pm in p]\n results.append(('Z-axis pi/2 rotation',\n evaluator.evaluate_matrix_element(matrix_element,options=options,\n p=rot_p, PS_name='Rotation1', output='jamp',MLOptions = MLOptions)))\n # Now a pi/4 rotation around the z-axis\n sq2 = math.sqrt(2.0)\n rot_p = [[pm[0],(pm[1]-pm[2])/sq2,(pm[1]+pm[2])/sq2,pm[3]] for pm in p]\n results.append(('Z-axis pi/4 rotation',\n evaluator.evaluate_matrix_element(matrix_element,options=options,\n p=rot_p, PS_name='Rotation2', output='jamp',MLOptions = MLOptions)))\n \n \n return {'process': process, 'results': results}\n\n#===============================================================================\n# check_gauge\n#===============================================================================\ndef check_unitary_feynman(processes_unit, processes_feynm, param_card=None, \n options=None, tir={}, output_path=None,\n cuttools=\"\", reuse=False, cmd = FakeInterface()):\n \"\"\"Check gauge invariance of the processes by flipping\n the gauge of the model\n \"\"\"\n \n mg_root = cmd._mgme_dir\n \n cmass_scheme = cmd.options['complex_mass_scheme']\n \n if isinstance(processes_unit, base_objects.ProcessDefinition):\n # Generate a list of unique processes\n # Extract IS and FS ids\n multiprocess_unit = processes_unit\n model = multiprocess_unit.get('model')\n\n # Initialize matrix element evaluation\n # For the unitary gauge, open loops should not be used\n loop_optimized_bu = cmd.options['loop_optimized_output']\n if processes_unit.get('squared_orders'):\n if processes_unit.get('perturbation_couplings') in [[],['QCD']]:\n cmd.options['loop_optimized_output'] = True\n else:\n raise InvalidCmd(\"The gauge test cannot be performed for \"+\n \" a process with more than QCD corrections and which\"+\n \" specifies squared order constraints.\")\n else:\n cmd.options['loop_optimized_output'] = False\n \n aloha.unitary_gauge = True\n if processes_unit.get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model, param_card,\n cmd=cmd,auth_skipping = False, reuse = True)\n else:\n evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,\n cmd=cmd, model=model,\n param_card=param_card,\n auth_skipping = False, \n output_path=output_path,\n reuse = False)\n if not cmass_scheme and multiprocess_unit.get('perturbation_couplings')==[]:\n logger.info('Set All width to zero for non complex mass scheme checks')\n for particle in evaluator.full_model.get('particles'):\n if particle.get('width') != 'ZERO':\n evaluator.full_model.get('parameter_dict')[particle.get('width')] = 0.\n\n output_u = run_multiprocs_no_crossings(get_value,\n multiprocess_unit,\n evaluator,\n options=options)\n \n clean_added_globals(ADDED_GLOBAL)\n # Clear up previous run if checking loop output\n if processes_unit.get('perturbation_couplings')!=[]:\n clean_up(output_path)\n\n momentum = {}\n for data in output_u:\n momentum[data['process']] = data['p']\n \n multiprocess_feynm = processes_feynm\n model = multiprocess_feynm.get('model')\n\n # Initialize matrix element evaluation\n aloha.unitary_gauge = False\n # We could use the default output as well for Feynman, but it provides\n # an additional check\n cmd.options['loop_optimized_output'] = True\n if processes_feynm.get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model, param_card,\n cmd= cmd, auth_skipping = False, reuse = False)\n else:\n evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,\n cmd= cmd, model=model,\n param_card=param_card,\n auth_skipping = False, \n output_path=output_path,\n reuse = False)\n\n if not cmass_scheme and multiprocess_feynm.get('perturbation_couplings')==[]:\n # Set all widths to zero for gauge check\n for particle in evaluator.full_model.get('particles'):\n if particle.get('width') != 'ZERO':\n evaluator.full_model.get('parameter_dict')[particle.get('width')] = 0.\n\n output_f = run_multiprocs_no_crossings(get_value, multiprocess_feynm,\n evaluator, momentum,\n options=options) \n output = [processes_unit] \n for data in output_f:\n local_dico = {}\n local_dico['process'] = data['process']\n local_dico['value_feynm'] = data['value']\n local_dico['value_unit'] = [d['value'] for d in output_u \n if d['process'] == data['process']][0]\n output.append(local_dico)\n \n if processes_feynm.get('perturbation_couplings')!=[] and not reuse:\n # Clean temporary folders created for the running of the loop processes\n clean_up(output_path)\n\n # Reset the original global variable loop_optimized_output.\n cmd.options['loop_optimized_output'] = loop_optimized_bu\n\n return output\n# elif isinstance(processes, base_objects.Process):\n# processes = base_objects.ProcessList([processes])\n# elif isinstance(processes, base_objects.ProcessList):\n# pass\n else:\n raise InvalidCmd(\"processes is of non-supported format\")\n\n#===============================================================================\n# check_cms\n#===============================================================================\ndef check_complex_mass_scheme(process_line, param_card=None, cuttools=\"\",tir={}, \n cmd = FakeInterface(), output_path=None, MLOptions = {}, options={}):\n \"\"\"Check complex mass scheme consistency in the offshell region of s-channels\n detected for this process, by varying the expansion paramer consistently\n with the corresponding width and making sure that the difference between\n the complex mass-scheme and the narrow-width approximation is higher order.\n \"\"\"\n\n if not isinstance(process_line, str):\n raise InvalidCmd(\"Proces definition must be given as a stirng for this check\")\n\n # Generate a list of unique processes in the NWA scheme\n cmd.do_set('complex_mass_scheme False', log=False)\n #cmd.do_import('model loop_qcd_qed_sm-NWA')\n multiprocess_nwa = cmd.extract_process(process_line)\n\n # Change the option 'recompute_width' to the optimal value if set to 'auto'.\n has_FRdecay = os.path.isfile(pjoin(cmd._curr_model.get('modelpath'),\n 'decays.py'))\n\n # Proceed with some warning\n missing_perturbations = cmd._curr_model.get_coupling_orders()-\\\n set(multiprocess_nwa.get('perturbation_couplings'))\n\n if len(multiprocess_nwa.get('perturbation_couplings'))>0 and \\\n len(missing_perturbations)>0:\n logger.warning(\"------------------------------------------------------\")\n logger.warning(\"The process considered does not specify the following \"+\n \"type of loops to be included : %s\"%str(list(missing_perturbations)))\n logger.warning(\"Consequently, the CMS check will be unsuccessful if the\"+\n \" process involves any resonating particle whose LO decay is \"+\n \"mediated by one of these orders.\")\n logger.warning(\"You can use the syntax '[virt=all]' to automatically\"+\n \" include all loops supported by the model.\")\n logger.warning(\"------------------------------------------------------\")\n\n if len(multiprocess_nwa.get('perturbation_couplings'))>0 and \\\n len(multiprocess_nwa.get('legs'))<=4:\n logger.warning(\"------------------------------------------------------\")\n logger.warning(\"Processes with four or less external states are typically not\"+\\\n \" sensitive to incorrect Complex Mass Scheme implementations.\")\n logger.warning(\"You can test this sensitivity by making sure that the\"+\n \" same check on the leading-order counterpart of this process *fails*\"+\n \" when using the option '--diff_lambda_power=2'.\")\n logger.warning(\"If it does not, then consider adding a massless \"+\n \"gauge vector to the external states.\")\n logger.warning(\"------------------------------------------------------\")\n\n if options['recompute_width']=='auto':\n if multiprocess_nwa.get('perturbation_couplings')!=[]:\n # NLO, so it is necessary to have the correct LO width for the check\n options['recompute_width'] = 'first_time'\n else:\n options['recompute_width'] = 'never'\n\n # Some warnings\n if options['recompute_width'] in ['first_time', 'always'] and \\\n not has_FRdecay and not 'cached_widths' in options:\n logger.info('The LO widths will need to be recomputed but the '+\n 'model considered does not appear to have a decay module.\\nThe widths'+\n ' will need to be computed numerically and it will slow down the test.\\n'+\n 'Consider using a param_card already specifying correct LO widths and'+\n \" adding the option --recompute_width=never when doing this check.\")\n\n if options['recompute_width']=='never' and \\\n any(order in multiprocess_nwa.get('perturbation_couplings') for order in\n options['expansion_orders']):\n logger.warning('You chose not to recompute the widths while including'+\n ' loop corrections. The check will be successful only if the width'+\\\n ' specified in the default param_card is LO accurate (Remember that'+\\\n ' the default values of alpha_s and awem1 are set to 0.1 and 10.0'+\\\n ' respectively by default).')\n\n # Reload the model including the decay.py to have efficient MadWidth if\n # possible (this model will be directly given to MadWidth. Notice that \n # this will not be needed for the CMS run because MadWidth is not supposed\n # to be used there (the widths should be recycled from those of the NWA run).\n if options['recompute_width'] in ['first_time', 'always'] and has_FRdecay:\n modelname = cmd._curr_model.get('modelpath+restriction')\n with misc.MuteLogger(['madgraph'], ['INFO']):\n model = import_ufo.import_model(modelname, decay=True,\n complex_mass_scheme=False)\n multiprocess_nwa.set('model', model)\n\n run_options = copy.deepcopy(options)\n\n # Set the seed if chosen by user\n if options['seed'] > 0:\n random.seed(options['seed'])\n \n # Add useful entries\n run_options['param_card'] = param_card\n if isinstance(cmd, FakeInterface):\n raise MadGraph5Error(\"Check CMS cannot be run with a FakeInterface.\")\n run_options['cmd'] = cmd\n run_options['MLOptions'] = MLOptions\n if output_path:\n run_options['output_path'] = output_path\n else:\n run_options['output_path'] = cmd._mgme_dir\n \n # Add the information regarding FR decay for optimal log information\n run_options['has_FRdecay'] = has_FRdecay\n\n # And one for caching the widths computed along the way\n if 'cached_widths' not in run_options:\n run_options['cached_widths'] = {}\n # Cached param_cards, first is param_card instance, second is\n # param_name dictionary\n run_options['cached_param_card'] = {'NWA':[None,None],'CMS':[None,None]}\n\n if options['tweak']['name']:\n logger.info(\"Now running the CMS check for tweak '%s'\"\\\n %options['tweak']['name'])\n\n model = multiprocess_nwa.get('model')\n # Make sure all masses are defined as external\n for particle in model.get('particles'):\n mass_param = model.get_parameter(particle.get('mass'))\n if particle.get('mass')!='ZERO' and 'external' not in mass_param.depend:\n if model.get('name') not in ['sm','loop_sm']:\n logger.warning(\"The mass '%s' of particle '%s' is not an external\"%\\\n (model.get_parameter(particle.get('mass')).name,particle.get('name'))+\\\n \" parameter as required by this check. \\nMG5_aMC will try to\"+\\\n \" modify the model to remedy the situation. No guarantee.\")\n status = model.change_electroweak_mode(set(['mz','mw','alpha']))\n if not status:\n raise InvalidCmd('The EW scheme could apparently not be changed'+\\\n ' so as to have the W-boson mass external. The check cannot'+\\\n ' proceed.')\n break\n\n veto_orders = [order for order in model.get('coupling_orders') if \\\n order not in options['expansion_orders']]\n if len(veto_orders)>0:\n logger.warning('You did not define any parameter scaling rule for the'+\\\n \" coupling orders %s. They will be \"%','.join(veto_orders)+\\\n \"forced to zero in the tests. Consider adding the scaling rule to\"+\\\n \"avoid this. (see option '--cms' in 'help check')\")\n for order in veto_orders:\n multiprocess_nwa.get('orders')[order]==0\n multiprocess_nwa.set('perturbation_couplings', [order for order in\n multiprocess_nwa['perturbation_couplings'] if order not in veto_orders])\n\n if multiprocess_nwa.get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model, param_card,\n cmd=cmd,auth_skipping = False, reuse = True)\n else:\n evaluator = LoopMatrixElementTimer(cuttools_dir=cuttools,tir_dir=tir,\n cmd=cmd, model=model,\n param_card=param_card,\n auth_skipping = False, \n output_path=output_path,\n reuse = False)\n\n cached_information = []\n output_nwa = run_multiprocs_no_crossings(check_complex_mass_scheme_process,\n multiprocess_nwa,\n evaluator,\n # This empty list 'opt' will be passed to the check_complex_mass_scheme_process\n # function which will fill it with the specification of the particle for which\n # the the complex mass scheme must be checked. The fact that it is a list\n # at this stage tells the function check_complex_mass_scheme_process that\n # we are doing nwa. It will then be converted to a dictionary when doing cms.\n opt = cached_information,\n options=run_options)\n\n # Make sure to start from fresh for LO runs\n clean_added_globals(ADDED_GLOBAL)\n\n # Generate a list of unique processes in the CMS scheme\n cmd.do_set('complex_mass_scheme True', log=False)\n #cmd.do_import('model loop_qcd_qed_sm__CMS__-CMS')\n\n multiprocess_cms = cmd.extract_process(process_line) \n model = multiprocess_cms.get('model')\n # Apply veto\n if len(veto_orders)>0:\n for order in veto_orders:\n multiprocess_cms.get('orders')[order]==0\n multiprocess_cms.set('perturbation_couplings', [order for order in\n multiprocess_cms['perturbation_couplings'] if order not in veto_orders])\n \n if multiprocess_cms.get('perturbation_couplings')==[]:\n evaluator = MatrixElementEvaluator(model, param_card,\n cmd=cmd,auth_skipping = False, reuse = True)\n else:\n evaluator = LoopMatrixElementTimer(cuttools_dir=cuttools,tir_dir=tir,\n cmd=cmd, model=model,\n param_card=param_card,\n auth_skipping = False, \n output_path=output_path,\n reuse = False)\n\n output_cms = run_multiprocs_no_crossings(check_complex_mass_scheme_process,\n multiprocess_cms,\n evaluator,\n # We now substituted the cached information\n opt = dict(cached_information),\n options=run_options)\n\n if multiprocess_cms.get('perturbation_couplings')!=[] and not options['reuse']:\n # Clean temporary folders created for the running of the loop processes\n clean_up(output_path)\n\n # Now reformat a bit the output by putting the CMS and NWA results together\n # as values of a dictionary with the process name as key. \n # Also a 'processes_order' to list all processes in their order of appearance\n result = {'ordered_processes':[],'lambdaCMS':options['lambdaCMS']}\n # Recall what perturbation orders were used\n result['perturbation_orders']=multiprocess_nwa.get('perturbation_couplings')\n for i, proc_res in enumerate(output_nwa):\n result['ordered_processes'].append(proc_res[0])\n result[proc_res[0]] = {\n 'NWA':proc_res[1]['resonances_result'],\n 'CMS':output_cms[i][1]['resonances_result'],\n 'born_order':proc_res[1]['born_order'],\n 'loop_order':proc_res[1]['loop_order']}\n \n # As an optimization we propagate the widths as they could be reused when\n # using several tweaks\n options['cached_widths'] = run_options['cached_widths']\n \n # Add widths information to the result\n result['recompute_width'] = options['recompute_width']\n result['has_FRdecay'] = has_FRdecay\n result['widths_computed'] = []\n cached_widths = sorted(list(options['cached_widths'].items()), key=lambda el: \\\n abs(el[0][0]))\n for (pdg, lambda_value), width in cached_widths:\n if lambda_value != 1.0:\n continue\n result['widths_computed'].append((model.get_particle(pdg).get_name(),\n width))\n \n # Make sure to clear the python ME definitions generated in LO runs\n clean_added_globals(ADDED_GLOBAL)\n \n return result\n\n\n# Check CMS for a given process\ndef check_complex_mass_scheme_process(process, evaluator, opt = [], \n options=None):\n \"\"\"Check CMS for the process in argument. The options 'opt' is quite important.\n When opt is a list, it means that we are doing NWA and we are filling the\n list with the following tuple \n ('proc_name',({'ParticlePDG':ParticlePDG,\n 'FinalStateMothersNumbers':set([]), \n 'PS_point_used':[]},...))\n When opt is a dictionary, we are in the CMS mode and it will be reused then.\n \"\"\"\n\n # a useful logical to check if we are in LO (python on the flight) or \n # NLO (output and compilation) mode\n NLO = process.get('perturbation_couplings') != []\n \n def glue_momenta(production, decay):\n \"\"\" Merge together the kinematics for the production of particle \n positioned last in the 'production' array with the 1>N 'decay' kinematic' \n provided where the decay particle is first.\"\"\"\n \n from MadSpin.decay import momentum\n \n full = production[:-1]\n \n # Consistency check:\n # target = production[decay_number-1]\n # boosted = momentum(decay[0][0],decay[0][1],decay[0][2],decay[0][3])\n # print 'Consistency check ',target==boosted\n for p in decay[1:]:\n bp = momentum(*p).boost(momentum(*production[-1]))\n full.append([bp.E,bp.px,bp.py,bp.pz])\n \n return full\n \n def find_resonances(diagrams):\n \"\"\" Find all the resonances in the matrix element in argument \"\"\"\n \n model = process['model']\n resonances_found = []\n\n for ll, diag in enumerate(diagrams):\n for amp in diag.get('amplitudes'):\n # 0 specifies the PDG given to the fake s-channels from \n # vertices with more than four legs\n s_channels, t_channels = amp.\\\n get_s_and_t_channels(process.get_ninitial(), model, 0)\n # The s-channel are given from the outmost ones going inward as\n # vertices, so we must replace parent legs with the outermost ones\n replacement_dict = {}\n for s_channel in s_channels:\n new_resonance = {\n 'ParticlePDG':s_channel.get('legs')[-1].get('id'),\n 'FSMothersNumbers':[],\n 'PS_point_used':[]}\n for leg in s_channel.get('legs')[:-1]:\n if leg.get('number')>0:\n new_resonance['FSMothersNumbers'].append(\n leg.get('number'))\n else:\n try:\n new_resonance['FSMothersNumbers'].extend(\n replacement_dict[leg.get('number')])\n except KeyError:\n raise Exception('The following diagram '+\\\n 'is malformed:'+diag.nice_string())\n \n replacement_dict[s_channel.get('legs')[-1].get('number')] = \\\n new_resonance['FSMothersNumbers']\n new_resonance['FSMothersNumbers'] = set(\n new_resonance['FSMothersNumbers'])\n if new_resonance not in resonances_found:\n resonances_found.append(new_resonance)\n\n # Now we setup the phase-space point for each resonance found\n kept_resonances = []\n for resonance in resonances_found:\n # Discard fake s-channels\n if resonance['ParticlePDG'] == 0:\n continue\n\n # Discard if the particle appears in the final state\n if abs(resonance['ParticlePDG']) in \\\n [abs(l.get('id')) for l in process.get('legs')]:\n continue\n\n mass_string = evaluator.full_model.get_particle(\n resonance['ParticlePDG']).get('mass')\n mass = evaluator.full_model.get('parameter_dict')[mass_string].real\n # Discard massless s-channels\n if mass==0.0:\n continue\n \n width_string = evaluator.full_model.get_particle(\n resonance['ParticlePDG']).get('width')\n width = evaluator.full_model.get('parameter_dict')[width_string].real\n\n # Discard stable s-channels\n if width==0.0:\n continue\n\n final_state_energy = sum(\n evaluator.full_model.get('parameter_dict')[\n evaluator.full_model.get_particle(l.get('id')).get('mass')].real\n for l in process.get('legs') if l.get('number') in \n resonance['FSMothersNumbers'])\n \n # Choose the offshellness\n special_mass = (1.0 + options['offshellness'])*mass\n \n # Discard impossible kinematics\n if special_mass<final_state_energy:\n raise InvalidCmd('The offshellness specified (%s) is such'\\\n %options['offshellness']+' that the resulting kinematic is '+\\\n 'impossible for resonance %s %s.'%(evaluator.full_model.\n get_particle(resonance['ParticlePDG']).get_name(),\n str(list(resonance['FSMothersNumbers']))))\n continue\n \n # Add it to the list of accepted resonances\n kept_resonances.append(resonance)\n \n for resonance in kept_resonances:\n # Chose the PS point for the resonance\n set_PSpoint(resonance, force_other_res_offshell=kept_resonances)\n\n return tuple(kept_resonances)\n\n def set_PSpoint(resonance, force_other_res_offshell=[], \n allow_energy_increase=1.5, isolation_cuts=True):\n \"\"\" Starting from the specified resonance, construct a phase space point\n for it and possibly also enforce other resonances to be onshell. Possibly\n allow to progressively increase enregy by steps of the integer specified\n (negative float to forbid it) and possible enforce default isolation cuts\n as well.\"\"\"\n \n def invmass(momenta):\n \"\"\" Computes the invariant mass of a list of momenta.\"\"\"\n ptot = [sum(p[i] for p in momenta) for i in range(4)]\n return math.sqrt(ptot[0]**2-ptot[1]**2-ptot[2]**2-ptot[3]**2)\n \n model = evaluator.full_model\n def getmass(pdg):\n \"\"\" Returns the mass of a particle given the current model and its\n pdg given in argument.\"\"\"\n return model.get('parameter_dict')[\n model.get_particle(pdg).get('mass')].real\n\n N_trials = 0\n max_trial = 1e4\n nstep_for_energy_increase = 1e3\n PS_point_found = None\n if options['offshellness'] > 0.0:\n offshellness = options['offshellness']\n else:\n # We must undershoot the offshellness since one needs more \n # energy than the target mass to have a valid PS point. So we\n # start with an offshellness 4 times larger, and progressively reduce\n # it later\n offshellness = (0.25*(options['offshellness']+1.0))-1.0\n \n # When offshellness is negative, it is progressively decreased every\n # nstep_for_energy_increase attempts (not increased!), so it is more\n # dangerous, and we therefore want the steps to be smaller\n if options['offshellness'] < 0.0:\n energy_increase = math.sqrt(allow_energy_increase)\n else:\n energy_increase = allow_energy_increase\n # Make sure to remove the resonance itself from force_other_res_offshell\n other_res_offshell = [res for res in force_other_res_offshell if \n res!=resonance]\n\n # Now play it smart on finding starting energy and offshellness and \n # register all resonance masses\n all_other_res_masses = [getmass(res['ParticlePDG'])\n for res in other_res_offshell]\n resonance_mass = getmass(resonance['ParticlePDG'])\n\n str_res = '%s %s'%(model.get_particle(\n resonance['ParticlePDG']).get_name(),\n str(list(resonance['FSMothersNumbers'])))\n leg_number_to_leg = dict((l.get('number'),l) for l in process.get('legs'))\n # Find what is the minimum possible offshellness given\n # the mass of the daughters of this resonance.\n # This will only be relevant when options['offshellness'] is negative\n daughter_masses = sum(getmass(leg_number_to_leg[\\\n number].get('id')) for number in resonance['FSMothersNumbers'])\n min_offshellnes = 4.0*((daughter_masses*1.2)/resonance_mass)-1.0\n\n # Compute the minimal energy given the external states, add 20% to leave\n # enough phase-space\n min_energy = max(sum(getmass(l.get('id')) for l in \\\n process.get('legs') if l.get('state')==True),\n sum(getmass(l.get('id')) for l in \\\n process.get('legs') if l.get('state')==False))\n \n # List all other offshellnesses of the potential daughters of this \n # resonance\n daughter_offshellnesses = [(1.0+options['offshellness'])*mass \n for i, mass in enumerate(all_other_res_masses) if \n other_res_offshell[i]['FSMothersNumbers'].issubset(\n resonance['FSMothersNumbers'])]\n \n if options['offshellness'] >= 0.0:\n \n if len(daughter_offshellnesses)>0:\n max_mass = max(daughter_offshellnesses)\n # A factor two to have enough phase-space\n offshellness = max(2.0*(max_mass/resonance_mass)-1.0,\n options['offshellness'])\n \n max_mass = max([(1.0+options['offshellness'])*mass for mass in \\\n all_other_res_masses]+[(1.0+offshellness)*resonance_mass])\n \n # Account for external_masses too\n # A factor two to have enough phase-space open\n target = max(min_energy*1.2,max_mass*2.0)\n if target > options['energy']:\n logger.warning(\"The user-defined energy %f seems \"%options['energy']+\n \" insufficient to reach the minimum propagator invariant mass \"+\n \"%f required for the chosen offshellness %f.\"%(max_mass,\n options['offshellness']) + \" Energy reset to %f.\"%target)\n options['energy'] = target\n \n else:\n if len(daughter_offshellnesses) > 0:\n min_mass = min(daughter_offshellnesses)\n # A factor one half to have enough phase-space\n offshellness = min(0.25*(min_mass/resonance_mass)-1.0,\n options['offshellness'])\n \n # Make sure the chosen offshellness leaves enough energy to produce\n # the daughter masses\n if (1.0+offshellness)*resonance_mass < daughter_masses*1.2:\n msg = 'The resonance %s cannot accomodate'%str_res+\\\n ' an offshellness of %f because the daughter'%options['offshellness']+\\\n ' masses are %f.'%daughter_masses\n if options['offshellness']<min_offshellnes:\n msg += ' Try again with an offshellness'+\\\n ' smaller (in absolute value) of at least %f.'%min_offshellnes\n else:\n msg += ' Try again with a smalled offshellness (in absolute value).'\n raise InvalidCmd(msg)\n \n min_mass = min([(1.0+options['offshellness'])*mass for mass in \\\n all_other_res_masses]+[(1.0+offshellness)*resonance_mass])\n # Account for external_masses too\n # A factor two to have enough phase-space open\n if 2.0*min_mass < options['energy']:\n new_energy = max(min_energy*1.2, 2.0*min_mass)\n logger.warning(\"The user-defined energy %f seems \"%options['energy']+\n \" too large to not overshoot the maximum propagator invariant mass \"+\n \"%f required for the chosen offshellness %f.\"%(min_mass,\n options['offshellness']) + \" Energy reset to %f.\"%new_energy)\n options['energy'] = new_energy \n \n if options['offshellness'] < 0.0 and options['energy'] >= min_mass:\n logger.debug(\"The target energy is not compatible with the mass\"+\n \" of the external states for this process (%f). It is \"%min_mass+\n \"unlikely that a valid kinematic configuration will be found.\")\n \n if options['offshellness']<0.0 and offshellness<options['offshellness'] or \\\n options['offshellness']>0.0 and offshellness>options['offshellness']:\n logger.debug(\"Offshellness increased to %f\"%offshellness+\n \" so as to try to find a kinematical configuration with\"+\n \" offshellness at least equal to %f\"%options['offshellness']+\n \" for all resonances.\")\n\n start_energy = options['energy'] \n while N_trials<max_trial:\n N_trials += 1\n if N_trials%nstep_for_energy_increase==0:\n if allow_energy_increase > 0.0:\n old_offshellness = offshellness\n if offshellness > 0.0:\n options['energy'] *= energy_increase\n offshellness *= energy_increase\n else:\n options['energy'] = max(options['energy']/energy_increase, \n min_energy*1.2)\n offshellness = max(min_offshellnes,\n ((offshellness+1.0)/energy_increase)-1.0)\n if old_offshellness!=offshellness:\n logger.debug('Trying to find a valid kinematic'+\\\n \" configuration for resonance '%s'\"%str_res+\\\n ' with increased offshellness %f'%offshellness)\n\n candidate = get_PSpoint_for_resonance(resonance, offshellness)\n pass_offshell_test = True\n for i, res in enumerate(other_res_offshell):\n # Make sure other resonances are sufficiently offshell too\n if offshellness > 0.0:\n if invmass([candidate[j-1] for j in res['FSMothersNumbers']]) <\\\n ((1.0+options['offshellness'])*all_other_res_masses[i]):\n pass_offshell_test = False\n break\n else:\n if invmass([candidate[j-1] for j in res['FSMothersNumbers']]) >\\\n ((1.0+options['offshellness'])*all_other_res_masses[i]):\n pass_offshell_test = False\n break\n if not pass_offshell_test:\n continue\n # Make sure it is isolated\n if isolation_cuts:\n # Set ptcut to 5% of total energy\n if not evaluator.pass_isolation_cuts(candidate,\n ptcut=0.05*invmass([candidate[0],candidate[1]]), drcut=0.4):\n continue\n PS_point_found = candidate\n break\n \n # Restore the initial energy setup\n options['energy'] = start_energy\n\n if PS_point_found is None:\n err_msg = 'Could not find a valid PS point in %d'%max_trial+\\\n ' trials. Try increasing the energy, modify the offshellness '+\\\n 'or relax some constraints.'\n if options['offshellness']<0.0:\n err_msg +='Try with a positive offshellness instead (or a '+\\\n 'negative one of smaller absolute value)'\n raise InvalidCmd(err_msg)\n else:\n# misc.sprint('PS point found in %s trials.'%N_trials)\n# misc.sprint(PS_point_found)\n resonance['offshellnesses'] = []\n all_other_res_masses = [resonance_mass] + all_other_res_masses\n other_res_offshell = [resonance] + other_res_offshell\n for i, res in enumerate(other_res_offshell):\n if i==0:\n res_str = 'self'\n else:\n res_str = '%s %s'%(model.get_particle(\n res['ParticlePDG']).get_name(),\n str(list(res['FSMothersNumbers'])))\n resonance['offshellnesses'].append((res_str,(\n (invmass([PS_point_found[j-1] for j in \n res['FSMothersNumbers']])/all_other_res_masses[i])-1.0)))\n\n resonance['PS_point_used'] = PS_point_found\n \n def get_PSpoint_for_resonance(resonance, offshellness = options['offshellness']):\n \"\"\" Assigns a kinematic configuration to the resonance dictionary \n given in argument.\"\"\" \n\n # Get the particle mass\n mass_string = evaluator.full_model.get_particle(\n resonance['ParticlePDG']).get('mass')\n mass = evaluator.full_model.get('parameter_dict')[mass_string].real\n \n # Choose the offshellness\n special_mass = (1.0 + offshellness)*mass\n \n # Create a fake production and decay process\n prod_proc = base_objects.Process({'legs':base_objects.LegList(\n copy.copy(leg) for leg in process.get('legs') if \n leg.get('number') not in resonance['FSMothersNumbers'])})\n # Add the resonant particle as a final state\n # ID set to 0 since its mass will be forced\n # Number set so as to be first in the list in get_momenta\n prod_proc.get('legs').append(base_objects.Leg({\n 'number':max(l.get('number') for l in process.get('legs'))+1,\n 'state':True,\n 'id':0}))\n # now the decay process\n decay_proc = base_objects.Process({'legs':base_objects.LegList(\n copy.copy(leg) for leg in process.get('legs') if leg.get('number') \n in resonance['FSMothersNumbers'] and not leg.get('state')==False)})\n # Add the resonant particle as an initial state\n # ID set to 0 since its mass will be forced\n # Number set to -1 as well so as to be sure it appears first in \n # get_momenta\n decay_proc.get('legs').insert(0,base_objects.Leg({\n 'number':-1,\n 'state':False,\n 'id':0}))\n prod_kinematic = evaluator.get_momenta(prod_proc, options=options,\n special_mass=special_mass)[0]\n decay_kinematic = evaluator.get_momenta(decay_proc, options=options, \n special_mass=special_mass)[0]\n momenta = glue_momenta(prod_kinematic,decay_kinematic)\n # Reshuffle the momentum so as to put it back in the order specified\n # in the process definition.\n # First the production momenta, without the special decayed particle\n ordered_momenta = [(prod_proc.get('legs')[i].get('number'),momenta[i])\n for i in range(len(prod_proc.get('legs'))-1)]\n # And then the decay ones.\n ordered_momenta += [(decay_proc.get('legs')[-i].get('number'),\n momenta[-i]) for i in range(1,len(decay_proc.get('legs')))]\n\n # Return the PSpoint found in the right order\n return [m[1] for m in sorted(ordered_momenta, key = lambda el: el[0])]\n \n # misc.sprint(resonance['PS_point_used']) \n \n @misc.mute_logger()\n def get_width(PDG, lambdaCMS, param_card):\n \"\"\" Returns the width to use for particle with absolute PDG 'PDG' and\n for the the lambdaCMS value 'lambdaCMS' using the cache if possible.\"\"\"\n\n # If an unstable particle is in the external state, then set its width\n # to zero and don't cache the result of course.\n if abs(PDG) in [abs(leg.get('id')) for leg in process.get('legs')]:\n return 0.0\n\n particle = evaluator.full_model.get_particle(PDG)\n \n # If it is a goldstone or a ghost, return zero as its width should anyway\n # not be independent.\n if particle.get('ghost') or particle.get('goldstone'):\n return 0.0\n\n # If its width is analytically set to zero, then return zero right away\n if particle.get('width')=='ZERO':\n return 0.0\n \n if (PDG,lambdaCMS) in options['cached_widths']:\n return options['cached_widths'][(PDG,lambdaCMS)]\n\n if options['recompute_width'] == 'never':\n width = evaluator.full_model.\\\n get('parameter_dict')[particle.get('width')].real\n else:\n # Crash if we are doing CMS and the width was not found and recycled above\n if aloha.complex_mass:\n raise MadGraph5Error(\"The width for particle with PDG %d and\"%PDG+\\\n \" lambdaCMS=%f should have already been \"%lambdaCMS+\\\n \"computed during the NWA run.\")\n\n # Use MadWith\n if options['recompute_width'] in ['always','first_time']:\n particle_name = particle.get_name()\n with misc.TMP_directory(dir=options['output_path']) as path:\n param_card.write(pjoin(path,'tmp.dat'))\n # 2-body decay is the maximum that should be considered for NLO check.\n # The default 1% accuracy is not enough when pushing to small\n # lambdaCMS values, we need 1 per mil at least.\n command = '%s --output=%s'%(particle_name,pjoin(path,'tmp.dat'))+\\\n ' --path=%s --body_decay=2'%pjoin(path,'tmp.dat')+\\\n ' --precision_channel=0.001'\n# misc.sprint(command)\n param_card.write(pjoin(options['output_path'],'tmp.dat'))\n # The MG5 command get_width will change the cmd._curr_model\n # and the cmd._curr_fortran_model which what we specified, so \n # we must make sure to restore them after it finishes\n orig_model = options['cmd']._curr_model\n orig_helas_model = options['cmd']._curr_helas_model\n options['cmd'].do_compute_widths(command, evaluator.full_model)\n # Restore the models\n options['cmd']._curr_model = orig_model\n options['cmd']._curr_helas_model = orig_helas_model\n # Restore the width of the model passed in argument since\n # MadWidth will automatically update the width\n evaluator.full_model.set_parameters_and_couplings(\n param_card=param_card)\n try:\n tmp_param_card = check_param_card.ParamCard(pjoin(path,'tmp.dat'))\n except:\n raise MadGraph5Error('Error occured during width '+\\\n 'computation with command:\\n compute_widths %s'%command) \n width = tmp_param_card['decay'].get(PDG).value\n# misc.sprint('lambdaCMS checked is', lambdaCMS,\n# 'for particle',particle_name)\n# misc.sprint('Width obtained :', width)\n# if lambdaCMS != 1.0:\n# misc.sprint('Naively expected (lin. scaling) :',\n# options['cached_widths'][(PDG,1.0)]*lambdaCMS)\n \n if options['recompute_width'] in ['never','first_time']:\n # Assume linear scaling of the width\n for lam in options['lambdaCMS']:\n options['cached_widths'][(PDG,lam)]=width*(lam/lambdaCMS)\n else:\n options['cached_widths'][(PDG,lambdaCMS)] = width\n \n return options['cached_widths'][(PDG,lambdaCMS)]\n \n def get_order(diagrams, diagsName):\n \"\"\"Compute the common summed of coupling orders used for this cms check\n in the diagrams specified. When inconsistency occurs, use orderName\n in the warning message if throwm.\"\"\"\n \n orders = set([])\n for diag in diagrams:\n diag_orders = diag.calculate_orders()\n orders.add(sum((diag_orders[order] if order in diag_orders else 0)\n for order in options['expansion_orders']))\n if len(orders)>1:\n logger.warning(msg%('%s '%diagsName,str(orders)))\n return min(list(orders))\n else:\n return list(orders)[0]\n \n MLoptions = copy.copy(options['MLOptions'])\n # Make sure double-check helicities is set to False\n MLoptions['DoubleCheckHelicityFilter'] = False\n \n # Apply the seed tweak if present\n for tweak in options['tweak']['custom']:\n if tweak.startswith('seed'):\n try:\n new_seed = int(tweak[4:])\n except ValueError:\n raise MadGraph5Error(\"Seed '%s' is not of the right format 'seed<int>'.\"%tweak)\n random.seed(new_seed)\n \n mode = 'CMS' if aloha.complex_mass else 'NWA'\n for i, leg in enumerate(process.get('legs')):\n leg.set('number', i+1)\n\n logger.info(\"Running CMS check for process %s (now doing %s scheme)\" % \\\n ( process.nice_string().replace('Process:', 'process'), mode))\n \n proc_dir = None\n resonances = None\n warning_msg = \"All %sdiagrams do not share the same sum of orders \"+\\\n \"%s; found %%s.\"%(','.join(options['expansion_orders']))+\\\n \" This potentially problematic for the CMS check.\"\n if NLO:\n # We must first create the matrix element, export it and set it up.\n # If the reuse option is specified, it will be recycled.\n \n if options['name']=='auto':\n proc_name = \"%s%s_%s%s__%s__\"%(('SAVED' if options['reuse'] else ''),\n temp_dir_prefix, '_'.join(process.shell_string().split('_')[1:]), \n ('_' if process.get('perturbation_couplings') else '')+\n '_'.join(process.get('perturbation_couplings')),mode)\n else:\n proc_name = \"%s%s_%s__%s__\"%(('SAVED' if options['reuse'] else ''),\n temp_dir_prefix,options['name'], mode)\n # Generate the ME\n timing, matrix_element = generate_loop_matrix_element(process, \n options['reuse'], output_path=options['output_path'], \n cmd = options['cmd'], proc_name=proc_name, \n loop_filter=options['loop_filter'])\n if matrix_element is None:\n # No diagrams for this process\n return None\n\n reusing = isinstance(matrix_element, base_objects.Process)\n proc_dir = pjoin(options['output_path'],proc_name)\n\n # Export the ME\n infos = evaluator.setup_process(matrix_element, proc_dir, \n reusing = reusing, param_card = options['param_card'], \n MLOptions=MLoptions)\n # Make sure the right MLoptions are set\n evaluator.fix_MadLoopParamCard(pjoin(proc_dir,'Cards'),\n mp = None, loop_filter = True,MLOptions=MLoptions)\n \n # Make sure to start from fresh if previous run was stopped\n tmp_card_backup = pjoin(proc_dir,'Cards','param_card.dat__TemporaryBackup__')\n if os.path.isfile(tmp_card_backup):\n # Run was stopped mid-way, we must then restore the original card\n logger.info(\"Last run in process '%s' apparently aborted.\"%proc_dir+\\\n \" Now reverting 'param_card.dat' to its original value.\")\n shutil.copy(tmp_card_backup, pjoin(proc_dir, 'Cards','param_card.dat'))\n else:\n # Create a temporary backup which will be cleaned if the run ends properly\n shutil.copy(pjoin(proc_dir,'Cards','param_card.dat'), tmp_card_backup)\n # Now do the same with model_functions.f\n tmp_modelfunc_backup = pjoin(proc_dir,'Source','MODEL',\n 'model_functions.f__TemporaryBackup__')\n if os.path.isfile(tmp_modelfunc_backup):\n # Run was stopped mid-way, we must then restore the model functions\n logger.info(\"Last run in process '%s' apparently aborted.\"%proc_dir+\\\n \" Now reverting 'model_functions.f' to its original value.\")\n shutil.copy(tmp_modelfunc_backup, pjoin(proc_dir,'Source','MODEL',\n 'model_functions.f'))\n evaluator.apply_log_tweak(proc_dir, 'recompile')\n else:\n # Create a temporary backup which will be cleaned if the run ends properly\n shutil.copy(pjoin(proc_dir,'Source','MODEL','model_functions.f'),\n tmp_modelfunc_backup)\n\n # Make sure to setup correctly the helicity\n MadLoopInitializer.fix_PSPoint_in_check(pjoin(proc_dir,'SubProcesses'),\n read_ps = True, npoints = 1, hel_config = options['helicity'], \n split_orders=options['split_orders'])\n \n # And recompile while making sure to recreate the executable and \n # modified sources\n for dir in misc.glob('P*_*', pjoin(proc_dir,'SubProcesses')):\n if not (re.search(r'.*P\\d+_\\w*$', dir) or not os.path.isdir(dir)):\n continue\n try:\n os.remove(pjoin(dir,'check'))\n os.remove(pjoin(dir,'check_sa.o'))\n except OSError:\n pass\n # Now run make\n with open(os.devnull, 'w') as devnull:\n retcode = subprocess.call(['make','check'],\n cwd=dir, stdout=devnull, stderr=devnull) \n if retcode != 0:\n raise MadGraph5Error(\"Compilation error with \"+\\\n \"'make check' in %s\"%dir)\n\n # Now find all the resonances of the ME, if not saved from a previous run\n pkl_path = pjoin(proc_dir,'resonance_specs.pkl')\n if reusing:\n # We recover the information from the pickle dumped during the\n # original run\n if not os.path.isfile(pkl_path):\n raise InvalidCmd('The folder %s could'%proc_dir+\\\n \" not be reused because the resonance specification file \"+\n \"'resonance_specs.pkl' is missing.\")\n else:\n proc_name, born_order, loop_order, resonances = \\\n save_load_object.load_from_file(pkl_path)\n # Make sure to rederive the phase-space point since parameters\n # such as masses, seed, offshellness could have affected it\n for res in resonances:\n set_PSpoint(res, force_other_res_offshell=resonances)\n\n # Second run (CMS), we can reuse the information if it is a dictionary\n if isinstance(opt, list):\n opt.append((proc_name, resonances))\n else:\n resonances = opt\n else:\n helas_born_diagrams = matrix_element.get_born_diagrams()\n if len(helas_born_diagrams)==0:\n logger.warning('The CMS check for loop-induced process is '+\\\n 'not yet available (nor is it very interesting).')\n return None\n born_order = get_order(helas_born_diagrams,'Born')\n loop_order = get_order(matrix_element.get_loop_diagrams(),'loop')\n\n # Second run (CMS), we can reuse the information if it is a dictionary\n if isinstance(opt, list):\n opt.append((process.base_string(),find_resonances(helas_born_diagrams)))\n resonances = opt[-1][1]\n else:\n resonances = opt\n # Save the resonances to a pickle file in the output directory so that\n # it can potentially be reused.\n save_load_object.save_to_file(pkl_path, (process.base_string(),\n born_order, loop_order,resonances))\n\n else:\n # The LO equivalent\n try:\n amplitude = diagram_generation.Amplitude(process)\n except InvalidCmd:\n logging.info(\"No diagrams for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n return None\n if not amplitude.get('diagrams'):\n # This process has no diagrams; go to next process\n logging.info(\"No diagrams for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n return None\n\n matrix_element = helas_objects.HelasMatrixElement(amplitude,\n gen_color=True)\n diagrams = matrix_element.get('diagrams')\n born_order = get_order(diagrams,'Born')\n # Loop order set to -1 indicates an LO result\n loop_order = -1\n # Find all the resonances of the ME, if not already given in opt\n if isinstance(opt, list):\n opt.append((process.base_string(),find_resonances(diagrams)))\n resonances = opt[-1][1]\n else:\n resonances= opt\n \n if len(resonances)==0:\n logger.info(\"No resonance found for process %s.\"\\\n %process.base_string())\n return None\n \n # Cache the default param_card for NLO\n if not options['cached_param_card'][mode][0]:\n if NLO:\n param_card = check_param_card.ParamCard(\n pjoin(proc_dir,'Cards','param_card.dat'))\n else:\n param_card = check_param_card.ParamCard(\n StringIO.StringIO(evaluator.full_model.write_param_card()))\n options['cached_param_card'][mode][0] = param_card\n name2block, _ = param_card.analyze_param_card()\n options['cached_param_card'][mode][1] = name2block\n \n else:\n param_card = options['cached_param_card'][mode][0]\n name2block = options['cached_param_card'][mode][1]\n\n # Already add the coupling order for this sqaured ME.\n if loop_order != -1 and (loop_order+born_order)%2 != 0:\n raise MadGraph5Error('The summed squared matrix element '+\\\n \" order '%d' is not even.\"%(loop_order+born_order))\n result = {'born_order':born_order, \n 'loop_order': (-1 if loop_order==-1 else (loop_order+born_order)/2),\n 'resonances_result':[]}\n\n # Create a physical backup of the param_card\n if NLO:\n try:\n shutil.copy(pjoin(proc_dir,'Cards','param_card.dat'),\n pjoin(proc_dir,'Cards','param_card.dat__backUp__'))\n except:\n pass\n\n # Apply custom tweaks\n had_log_tweaks=False\n if NLO:\n for tweak in options['tweak']['custom']:\n if tweak.startswith('seed'):\n continue\n try:\n logstart, logend = tweak.split('->')\n except:\n raise Madgraph5Error(\"Tweak '%s' not reckognized.\"%tweak)\n if logstart in ['logp','logm', 'log'] and \\\n logend in ['logp','logm', 'log']:\n if NLO:\n evaluator.apply_log_tweak(proc_dir, [logstart, logend])\n had_log_tweaks = True\n else:\n raise Madgraph5Error(\"Tweak '%s' not reckognized.\"%tweak)\n if had_log_tweaks:\n evaluator.apply_log_tweak(proc_dir, 'recompile')\n\n # Select what resonances should be run\n if options['resonances']=='all':\n resonances_to_run = resonances\n elif isinstance(options['resonances'],int):\n resonances_to_run = resonances[:options['resonances']] \n elif isinstance(options['resonances'],list):\n resonances_to_run = []\n for res in resonances:\n for res_selection in options['resonances']:\n if abs(res['ParticlePDG'])==res_selection[0] and \\\n res['FSMothersNumbers']==set(res_selection[1]):\n resonances_to_run.append(res)\n break\n else:\n raise InvalidCmd(\"Resonance selection '%s' not reckognized\"%\\\n str(options['resonances']))\n\n # Display progressbar both for LO and NLO for now but not when not showing\n # the plots\n if NLO and options['show_plot']:\n widgets = ['ME evaluations:', pbar.Percentage(), ' ', \n pbar.Bar(),' ', pbar.ETA(), ' ']\n progress_bar = pbar.ProgressBar(widgets=widgets, \n maxval=len(options['lambdaCMS'])*len(resonances_to_run), fd=sys.stdout)\n progress_bar.update(0)\n # Flush stdout to force the progress_bar to appear\n sys.stdout.flush()\n else:\n progress_bar = None\n\n for resNumber, res in enumerate(resonances_to_run):\n # First add a dictionary for this resonance to the result with already\n # one key specifying the resonance\n result['resonances_result'].append({'resonance':res,'born':[]})\n if NLO:\n result['resonances_result'][-1]['finite'] = []\n # Now scan the different lambdaCMS values\n for lambdaNumber, lambdaCMS in enumerate(options['lambdaCMS']):\n # Setup the model for that value of lambdaCMS\n # The copy constructor below creates a deep copy\n new_param_card = check_param_card.ParamCard(param_card)\n # Change all specified parameters\n for param, replacement in options['expansion_parameters'].items():\n # Replace the temporary prefix used for evaluation of the \n # substitution expression \n orig_param = param.replace('__tmpprefix__','')\n if orig_param not in name2block:\n # It can be that some parameter ar in the NWA model but not\n # in the CMS, such as the Yukawas for example.\n # logger.warning(\"Unknown parameter '%s' in mode '%s'.\"%(param,mode))\n continue\n for block, lhaid in name2block[orig_param]:\n orig_value = float(param_card[block].get(lhaid).value)\n new_value = eval(replacement,\n {param:orig_value,'lambdacms':lambdaCMS})\n new_param_card[block].get(lhaid).value=new_value\n\n # Apply these changes already (for the purpose of Width computation.\n # although it is optional since we now provide the new_param_card to\n # the width computation function.). Also in principle this matters\n # only in the CMS and there the widths would be reused from their \n # prior computation within NWA with zero widths. So, all in all,\n # the line below is really not crucial, but semantically, it ought\n # to be there.\n evaluator.full_model.set_parameters_and_couplings(\n param_card=new_param_card)\n # Now compute or recyle all widths\n for decay in new_param_card['decay'].keys():\n if mode=='CMS':\n new_width = get_width(abs(decay[0]), lambdaCMS, \n new_param_card)\n else:\n new_width = 0.0\n new_param_card['decay'].get(decay).value= new_width\n\n # Apply these changes for the purpose of the final computation\n evaluator.full_model.set_parameters_and_couplings(\n param_card=new_param_card)\n if NLO:\n new_param_card.write(pjoin(proc_dir,'Cards','param_card.dat'))\n # Write the recomputed widths so that it can potentially be\n # used for future runs (here for the model in the CMS format)\n if lambdaCMS==1.0 and mode=='CMS' and \\\n options['recompute_width'] in ['always','first_time']:\n new_param_card.write(pjoin(proc_dir,\n 'Cards','param_card.dat_recomputed_widths'))\n \n # If recomputing widths with MadWidths, we want to do it within\n # the NWA models with zero widths.\n if mode=='NWA' and (options['recompute_width']=='always' or (\n options['recompute_width']=='first_time' and lambdaCMS==1.0)):\n # The copy constructor below creates a deep copy\n tmp_param_card = check_param_card.ParamCard(new_param_card)\n # We don't use the result here, it is just so that it is put\n # in the cache and reused in the CMS run that follows.\n for decay in new_param_card['decay'].keys():\n particle_name = evaluator.full_model.get_particle(\\\n abs(decay[0])).get_name()\n new_width = get_width(abs(decay[0]),lambdaCMS,new_param_card)\n tmp_param_card['decay'].get(decay).value = new_width\n if not options['has_FRdecay'] and new_width != 0.0 and \\\n (abs(decay[0]),lambdaCMS) not in options['cached_widths']:\n logger.info('Numerically computed width of particle'+\\\n ' %s for lambda=%.4g : %-9.6gGeV'%\n (particle_name,lambdaCMS,new_width))\n\n # Write the recomputed widths so that it can potentially be\n # used for future runs (here the model in the NWA format)\n if lambdaCMS==1.0 and NLO:\n tmp_param_card.write(pjoin(proc_dir,\n 'Cards','param_card.dat_recomputed_widths'))\n \n # Apply the params tweaks\n for param, replacement in options['tweak']['params'].items():\n # Replace the temporary prefix used for evaluation of the \n # substitution expression \n orig_param = param.replace('__tmpprefix__','')\n # Treat the special keyword 'allwidths'\n if orig_param.lower() == 'allwidths':\n # Apply the rule to all widhts\n for decay in new_param_card['decay'].keys():\n orig_value = float(new_param_card['decay'].get(decay).value)\n new_value = eval(replacement,\n {param:orig_value,'lambdacms':lambdaCMS})\n new_param_card['decay'].get(decay).value = new_value\n continue\n if orig_param not in name2block:\n # It can be that some parameter are in the NWA model but not\n # in the CMS, such as the Yukawas for example.\n continue\n for block, lhaid in name2block[orig_param]:\n orig_value = float(new_param_card[block].get(lhaid).value)\n new_value = eval(replacement,\n {param:orig_value,'lambdacms':lambdaCMS})\n new_param_card[block].get(lhaid).value=new_value\n \n if options['tweak']['params']:\n # Apply the tweaked param_card one last time\n evaluator.full_model.set_parameters_and_couplings(\n param_card=new_param_card)\n if NLO:\n new_param_card.write(pjoin(proc_dir,'Cards','param_card.dat'))\n\n # Finally ready to compute the matrix element\n if NLO:\n ME_res = LoopMatrixElementEvaluator.get_me_value(process, 0, \n proc_dir, PSpoint=res['PS_point_used'], verbose=False, \n format='dict', skip_compilation=True)\n # Notice that there is much more information in ME_res. It can\n # be forwarded to check_complex_mass_scheme in this result\n # dictionary if necessary for the analysis. (or even the full \n # dictionary ME_res can be added).\n result['resonances_result'][-1]['born'].append(ME_res['born'])\n result['resonances_result'][-1]['finite'].append(\n ME_res['finite']*ME_res['born']*ME_res['alphaS_over_2pi'])\n else:\n ME_res = evaluator.evaluate_matrix_element(matrix_element,\n p=res['PS_point_used'], auth_skipping=False, output='m2')[0]\n result['resonances_result'][-1]['born'].append(ME_res)\n if not progress_bar is None:\n progress_bar.update(resNumber*len(options['lambdaCMS'])+\\\n (lambdaNumber+1))\n # Flush to force the printout of the progress_bar to be updated\n sys.stdout.flush()\n\n # Restore the original continued log definition if necessary\n log_reversed = False\n for tweak in options['tweak']['custom']:\n if tweak.startswith('log') and had_log_tweaks:\n if log_reversed:\n continue\n if NLO:\n evaluator.apply_log_tweak(proc_dir, 'default')\n evaluator.apply_log_tweak(proc_dir, 'recompile') \n log_reversed = True\n\n # Restore the original model parameters\n evaluator.full_model.set_parameters_and_couplings(param_card=param_card)\n if NLO:\n try:\n shutil.copy(pjoin(proc_dir,'Cards','param_card.dat__backUp__'),\n pjoin(proc_dir,'Cards','param_card.dat'))\n except:\n param_card.write(pjoin(proc_dir,'Cards','param_card.dat'))\n \n # All should have been restored properly, so we can now clean the temporary\n # backups\n try:\n os.remove(pjoin(proc_dir,'Cards','param_card.dat__TemporaryBackup__'))\n os.remove(pjoin(proc_dir,'Source','MODEL',\n 'model_functions.f__TemporaryBackup__'))\n except:\n pass\n\n return (process.nice_string().replace('Process:', '').strip(),result)\n\ndef get_value(process, evaluator, p=None, options=None):\n \"\"\"Return the value/momentum for a phase space point\"\"\"\n \n for i, leg in enumerate(process.get('legs')):\n leg.set('number', i+1)\n\n logger.info(\"Checking %s in %s gauge\" % \\\n ( process.nice_string().replace('Process:', 'process'),\n 'unitary' if aloha.unitary_gauge else 'feynman'))\n\n legs = process.get('legs')\n # Generate a process with these legs\n # Generate the amplitude for this process\n try:\n if process.get('perturbation_couplings')==[]:\n amplitude = diagram_generation.Amplitude(process)\n else:\n amplitude = loop_diagram_generation.LoopAmplitude(process)\n except InvalidCmd:\n logging.info(\"No diagrams for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n return None\n \n if not amplitude.get('diagrams'):\n # This process has no diagrams; go to next process\n logging.info(\"No diagrams for %s\" % \\\n process.nice_string().replace('Process', 'process'))\n return None\n \n if not p:\n # Generate phase space point to use\n p, w_rambo = evaluator.get_momenta(process, options)\n \n # Generate the HelasMatrixElement for the process\n if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):\n matrix_element = helas_objects.HelasMatrixElement(amplitude,\n gen_color = True)\n else:\n matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude, \n gen_color = True, optimized_output = evaluator.loop_optimized_output)\n\n mvalue = evaluator.evaluate_matrix_element(matrix_element, p=p,\n output='jamp',options=options)\n \n if mvalue and mvalue['m2']:\n return {'process':process.base_string(),'value':mvalue,'p':p}\n\ndef output_lorentz_inv_loop(comparison_results, output='text'):\n \"\"\"Present the results of a comparison in a nice list format for loop \n processes. It detail the results from each lorentz transformation performed.\n \"\"\"\n\n process = comparison_results[0]['process']\n results = comparison_results[0]['results']\n # Rotations do not change the reference vector for helicity projection,\n # the loop ME are invarariant under them with a relatively good accuracy.\n threshold_rotations = 1e-6\n # This is typically not the case for the boosts when one cannot really \n # expect better than 1e-5. It turns out that this is even true in \n # quadruple precision, for an unknown reason so far.\n threshold_boosts = 1e-3\n res_str = \"%s\" % process.base_string()\n \n transfo_col_size = 17\n col_size = 18\n transfo_name_header = 'Transformation name'\n\n if len(transfo_name_header) + 1 > transfo_col_size:\n transfo_col_size = len(transfo_name_header) + 1\n \n for transfo_name, value in results:\n if len(transfo_name) + 1 > transfo_col_size:\n transfo_col_size = len(transfo_name) + 1\n \n res_str += '\\n' + fixed_string_length(transfo_name_header, transfo_col_size) + \\\n fixed_string_length(\"Value\", col_size) + \\\n fixed_string_length(\"Relative diff.\", col_size) + \"Result\"\n \n ref_value = results[0]\n res_str += '\\n' + fixed_string_length(ref_value[0], transfo_col_size) + \\\n fixed_string_length(\"%1.10e\" % ref_value[1]['m2'], col_size)\n # Now that the reference value has been recuperated, we can span all the \n # other evaluations\n all_pass = True\n for res in results[1:]:\n threshold = threshold_boosts if 'BOOST' in res[0].upper() else \\\n threshold_rotations\n rel_diff = abs((ref_value[1]['m2']-res[1]['m2'])\\\n /((ref_value[1]['m2']+res[1]['m2'])/2.0))\n this_pass = rel_diff <= threshold\n if not this_pass: \n all_pass = False\n res_str += '\\n' + fixed_string_length(res[0], transfo_col_size) + \\\n fixed_string_length(\"%1.10e\" % res[1]['m2'], col_size) + \\\n fixed_string_length(\"%1.10e\" % rel_diff, col_size) + \\\n (\"Passed\" if this_pass else \"Failed\")\n if all_pass:\n res_str += '\\n' + 'Summary: passed'\n else:\n res_str += '\\n' + 'Summary: failed'\n \n return res_str\n\ndef output_lorentz_inv(comparison_results, output='text'):\n \"\"\"Present the results of a comparison in a nice list format\n if output='fail' return the number of failed process -- for test-- \n \"\"\"\n\n # Special output for loop processes\n if comparison_results[0]['process']['perturbation_couplings']!=[]:\n return output_lorentz_inv_loop(comparison_results, output)\n\n proc_col_size = 17\n\n threshold=1e-10\n process_header = \"Process\"\n\n if len(process_header) + 1 > proc_col_size:\n proc_col_size = len(process_header) + 1\n \n for proc, values in comparison_results:\n if len(proc) + 1 > proc_col_size:\n proc_col_size = len(proc) + 1\n\n col_size = 18\n\n pass_proc = 0\n fail_proc = 0\n no_check_proc = 0\n\n failed_proc_list = []\n no_check_proc_list = []\n\n res_str = fixed_string_length(process_header, proc_col_size) + \\\n fixed_string_length(\"Min element\", col_size) + \\\n fixed_string_length(\"Max element\", col_size) + \\\n fixed_string_length(\"Relative diff.\", col_size) + \\\n \"Result\"\n\n for one_comp in comparison_results:\n proc = one_comp['process'].base_string()\n data = one_comp['results']\n \n if data == 'pass':\n no_check_proc += 1\n no_check_proc_list.append(proc)\n continue\n\n values = [data[i]['m2'] for i in range(len(data))]\n \n min_val = min(values)\n max_val = max(values)\n diff = (max_val - min_val) / abs(max_val) \n \n res_str += '\\n' + fixed_string_length(proc, proc_col_size) + \\\n fixed_string_length(\"%1.10e\" % min_val, col_size) + \\\n fixed_string_length(\"%1.10e\" % max_val, col_size) + \\\n fixed_string_length(\"%1.10e\" % diff, col_size)\n \n if diff < threshold:\n pass_proc += 1\n proc_succeed = True\n res_str += \"Passed\"\n else:\n fail_proc += 1\n proc_succeed = False\n failed_proc_list.append(proc)\n res_str += \"Failed\"\n\n #check all the JAMP\n # loop over jamp\n # Keep in mind that this is not available for loop processes where the\n # jamp list is empty\n if len(data[0]['jamp'])!=0:\n for k in range(len(data[0]['jamp'][0])):\n sum = [0] * len(data)\n # loop over helicity\n for j in range(len(data[0]['jamp'])):\n #values for the different lorentz boost\n values = [abs(data[i]['jamp'][j][k])**2 for i in range(len(data))]\n sum = [sum[i] + values[i] for i in range(len(values))]\n \n # Compare the different lorentz boost \n min_val = min(sum)\n max_val = max(sum)\n if not max_val:\n continue\n diff = (max_val - min_val) / max_val \n \n tmp_str = '\\n' + fixed_string_length(' JAMP %s'%k , proc_col_size) + \\\n fixed_string_length(\"%1.10e\" % min_val, col_size) + \\\n fixed_string_length(\"%1.10e\" % max_val, col_size) + \\\n fixed_string_length(\"%1.10e\" % diff, col_size)\n \n if diff > 1e-10:\n if not len(failed_proc_list) or failed_proc_list[-1] != proc:\n fail_proc += 1\n pass_proc -= 1\n failed_proc_list.append(proc)\n res_str += tmp_str + \"Failed\"\n elif not proc_succeed:\n res_str += tmp_str + \"Passed\" \n \n \n \n res_str += \"\\nSummary: %i/%i passed, %i/%i failed\" % \\\n (pass_proc, pass_proc + fail_proc,\n fail_proc, pass_proc + fail_proc)\n\n if fail_proc != 0:\n res_str += \"\\nFailed processes: %s\" % ', '.join(failed_proc_list)\n if no_check_proc:\n res_str += \"\\nNot checked processes: %s\" % ', '.join(no_check_proc_list)\n \n if output == 'text':\n return res_str \n else: \n return fail_proc\n\ndef output_unitary_feynman(comparison_results, output='text'):\n \"\"\"Present the results of a comparison in a nice list format\n if output='fail' return the number of failed process -- for test-- \n \"\"\"\n \n proc_col_size = 17\n \n # We use the first element of the comparison_result list to store the\n # process definition object\n pert_coupl = comparison_results[0]['perturbation_couplings']\n comparison_results = comparison_results[1:]\n \n if pert_coupl:\n process_header = \"Process [virt=\"+\" \".join(pert_coupl)+\"]\"\n else:\n process_header = \"Process\"\n \n if len(process_header) + 1 > proc_col_size:\n proc_col_size = len(process_header) + 1\n \n for data in comparison_results:\n proc = data['process']\n if len(proc) + 1 > proc_col_size:\n proc_col_size = len(proc) + 1\n\n pass_proc = 0\n fail_proc = 0\n no_check_proc = 0\n\n failed_proc_list = []\n no_check_proc_list = []\n\n col_size = 18\n\n res_str = fixed_string_length(process_header, proc_col_size) + \\\n fixed_string_length(\"Unitary\", col_size) + \\\n fixed_string_length(\"Feynman\", col_size) + \\\n fixed_string_length(\"Relative diff.\", col_size) + \\\n \"Result\"\n\n for one_comp in comparison_results:\n proc = one_comp['process']\n data = [one_comp['value_unit'], one_comp['value_feynm']]\n \n \n if data[0] == 'pass':\n no_check_proc += 1\n no_check_proc_list.append(proc)\n continue\n \n values = [data[i]['m2'] for i in range(len(data))]\n \n min_val = min(values)\n max_val = max(values)\n # when max_val is also negative\n # diff will be negative if there is no abs\n diff = (max_val - min_val) / abs(max_val) \n \n res_str += '\\n' + fixed_string_length(proc, proc_col_size) + \\\n fixed_string_length(\"%1.10e\" % values[0], col_size) + \\\n fixed_string_length(\"%1.10e\" % values[1], col_size) + \\\n fixed_string_length(\"%1.10e\" % diff, col_size)\n \n if diff < 1e-8:\n pass_proc += 1\n proc_succeed = True\n res_str += \"Passed\"\n else:\n fail_proc += 1\n proc_succeed = False\n failed_proc_list.append(proc)\n res_str += \"Failed\"\n\n #check all the JAMP\n # loop over jamp\n # This is not available for loop processes where the jamp list returned\n # is empty.\n if len(data[0]['jamp'])>0:\n for k in range(len(data[0]['jamp'][0])):\n sum = [0, 0]\n # loop over helicity\n for j in range(len(data[0]['jamp'])):\n #values for the different lorentz boost\n values = [abs(data[i]['jamp'][j][k])**2 for i in range(len(data))]\n sum = [sum[i] + values[i] for i in range(len(values))]\n \n # Compare the different lorentz boost \n min_val = min(sum)\n max_val = max(sum)\n if not max_val:\n continue\n diff = (max_val - min_val) / max_val \n \n tmp_str = '\\n' + fixed_string_length(' JAMP %s'%k , col_size) + \\\n fixed_string_length(\"%1.10e\" % sum[0], col_size) + \\\n fixed_string_length(\"%1.10e\" % sum[1], col_size) + \\\n fixed_string_length(\"%1.10e\" % diff, col_size)\n \n if diff > 1e-10:\n if not len(failed_proc_list) or failed_proc_list[-1] != proc:\n fail_proc += 1\n pass_proc -= 1\n failed_proc_list.append(proc)\n res_str += tmp_str + \"Failed\"\n elif not proc_succeed:\n res_str += tmp_str + \"Passed\" \n \n \n \n res_str += \"\\nSummary: %i/%i passed, %i/%i failed\" % \\\n (pass_proc, pass_proc + fail_proc,\n fail_proc, pass_proc + fail_proc)\n\n if fail_proc != 0:\n res_str += \"\\nFailed processes: %s\" % ', '.join(failed_proc_list)\n if no_check_proc:\n res_str += \"\\nNot checked processes: %s\" % ', '.join(no_check_proc_list)\n \n \n if output == 'text':\n return res_str \n else: \n return fail_proc\n\ndef CMS_save_path(extension, cms_res, used_model, opts, output_path=None):\n \"\"\"Creates a suitable filename for saving these results.\"\"\"\n \n if opts['name']=='auto' and opts['analyze']!='None':\n # Reuse the same name then\n return '%s.%s'%(os.path.splitext(opts['analyze'].split(',')[0])\\\n [0],extension)\n # if a name is specified, use it\n if opts['name']!='auto':\n basename = opts['name']\n else:\n prefix = 'cms_check_'\n # Use process name if there is only one process \n if len(cms_res['ordered_processes'])==1:\n proc = cms_res['ordered_processes'][0]\n replacements = [('=>','gt'),('<=','lt'),('/','_no_'),\n (' ',''),('+','p'),('-','m'),\n ('~','x'), ('>','_'),('=','eq'),('^2','squared')]\n # Remove the perturbation couplings:\n try:\n proc=proc[:proc.index('[')]\n except ValueError:\n pass\n \n for key, value in replacements:\n proc = proc.replace(key,value)\n \n basename =prefix+proc+'_%s_'%used_model.get('name')+\\\n ( ('_'+'_'.join(cms_res['perturbation_orders'])) if \\\n cms_res['perturbation_orders']!=[] else '')\n # Use timestamp otherwise\n else:\n basename = prefix+datetime.datetime.now().strftime(\"%Y_%m_%d_%Hh%Mm%Ss\")\n \n suffix = '_%s'%opts['tweak']['name'] if opts['tweak']['name']!='' else '' \n if output_path: \n return pjoin(output_path,'%s%s.%s'%(basename,suffix,extension))\n else:\n return '%s%s.%s'%(basename,suffix,extension)\n\ndef output_complex_mass_scheme(result,output_path, options, model, output='text'):\n \"\"\" Outputs nicely the outcome of the complex mass scheme check performed\n by varying the width in the offshell region of resonances found for eahc process.\n Output just specifies whether text should be returned or a list of failed\n processes. Use 'concise_text' for a consise report of the results.\"\"\"\n \n pert_orders=result['perturbation_orders']\n \n ######## CHECK PARAMETERS #########\n #\n # diff_lambda_power choses the power by which one should divide the difference\n # curve. The test should only work with 1, but it is useful for the LO\n # check to see the difference has O(\\lambda) contribution by setting this\n # parameter to 2. If the Born does not have O(\\lambda) contributions\n # (i.e. if the test still pas with diff_lambda_power=2) then the NLO test\n # will not be sensitive to the CMS implementation details.\n diff_lambda_power = options['diff_lambda_power']\n # DISLAIMER:\n # The CMS check is non trivial to automate and it is actually best done\n # manually by looking at plots for various implementation of the CMS.\n # The automatic check performed here with the default parameters below\n # should typically capture the main features of the CMS implementation.\n # There will always be exceptions however.\n #\n if 'has_FRdecay' in result:\n has_FRdecay = result['has_FRdecay']\n else:\n has_FRdecay = False\n # be tighter at LO\n if not pert_orders:\n CMS_test_threshold = 1e-3\n else:\n # AT NLO, a correct cancellation is typically of the order of 2% with\n # a lowest lambda value of 10^-4. It is clear that the threshold should\n # scale with the minimum lambda value because any little offset in the\n # LO width value for example (acceptable when less than one 1% if the\n # widths were computed numerically) will lead to an inaccuracy of the \n # cancellation scaling with lambda. \n if not has_FRdecay and ('recomputed_with' not in result or \\\n result['recompute_width'] in ['always','first_time']):\n CMS_test_threshold = 2e-2*(1.0e-4/min(result['lambdaCMS']))\n else:\n # If the widths were not computed numerically, then the accuracy of\n # the cancellation should be better.\n CMS_test_threshold = 2e-2*(1.0e-5/min(result['lambdaCMS']))\n \n # This threshold sets how flat the diff line must be when approaching it from\n # the right to start considering its value. Notice that it cannot be larger\n # than the CMS_test_threshold\n consideration_threshold = min(CMS_test_threshold/10.0, 0.05)\n # Number of values groupes with the median technique to avoid being\n # sensitive to unstabilities\n group_val = 3\n # Starting from which value, relative to the averaged diff, should one consider\n # the asymptotic diff median to be exactly 0.0 in which case one would use this\n # average instead of this asymptotic median. u d~ > e+ ve LO exhibit a \\\n # difference at zero for example.\n diff_zero_threshold = 1e-3\n # Plotting parameters. Specify the lambda range to plot. \n # lambda_range = [-1,-1] returns the default automatic setup\n lambda_range = options['lambda_plot_range']\n ##################################\n \n# One can print out the raw results by uncommenting the line below\n# misc.sprint(result)\n# for i, res in enumerate(result['a e- > e- ve ve~ [ virt = QCD QED ]']['CMS']):\n# for i, res in enumerate(result['u d~ > e+ ve a [ virt = QCD QED ]']['CMS']):\n# if res['resonance']['FSMothersNumbers'] == set([3, 4]):\n# misc.sprint(res['resonance']['PS_point_used'])\n# stop\n \n res_str = ''\n # Variables for the concise report\n concise_str = ''\n concise_data = '%%(process)-%ds%%(asymptot)-15s%%(cms_check)-25s%%(status)-25s\\n'\n concise_repl_dict = {'Header':{'process':'Process',\n 'asymptot':'Asymptot',\n 'cms_check':'Deviation to asymptot',\n 'status':'Result'}}\n \n ####### BEGIN helper functions\n\n # Chose here whether to use Latex particle names or not\n # Possible values are 'none', 'model' or 'built-in'\n useLatexParticleName = 'built-in'\n name2tex = {'e+':r'e^+','w+':r'W^+','a':r'\\gamma','g':'g',\n 'e-':r'e^-','w-':r'W^-','z':'Z','h':'H',\n 'mu+':r'\\mu^+',\n 'mu-':r'\\mu^-',\n 'ta+':r'\\tau^+',\n 'ta-':r'\\tau^-'}\n for p in ['e','m','t']:\n d = {'e':'e','m':r'\\mu','t':r'\\tau'}\n name2tex['v%s'%p]=r'\\nu_{%s}'%d[p]\n name2tex['v%s~'%p]=r'\\bar{\\nu_{%s}}'%d[p]\n \n for p in ['u','d','c','s','b','t']:\n name2tex[p]=p\n name2tex['%s~'%p]=r'\\bar{%s}'%p\n \n def format_particle_name(particle, latex=useLatexParticleName):\n p_name = particle\n if latex=='model':\n try:\n texname = model.get_particle(particle).get('texname')\n if texname and texname!='none':\n p_name = r'$\\displaystyle %s$'%texname\n except:\n pass\n elif latex=='built-in':\n try:\n p_name = r'$\\displaystyle %s$'%name2tex[particle]\n except:\n pass\n return p_name\n\n def resonance_str(resonance, latex=useLatexParticleName):\n \"\"\" Provides a concise string to characterize the resonance \"\"\"\n particle_name = model.get_particle(resonance['ParticlePDG']).get_name()\n mothersID=['%d'%n for n in sorted(resonance['FSMothersNumbers'])]\n return r\"%s [%s]\"%(format_particle_name(particle_name,latex=latex),\n ','.join(mothersID))\n\n def format_title(process, resonance):\n \"\"\" Format the plot title given the process and resonance \"\"\"\n \n process_string = []\n for particle in process.split():\n if '<=' in particle:\n particle = particle.replace('<=',r'$\\displaystyle <=$')\n if '^2' in particle:\n particle = particle.replace('^2',r'$\\displaystyle ^2$')\n if particle=='$$':\n process_string.append(r'\\$\\$')\n continue\n if particle=='>':\n process_string.append(r'$\\displaystyle \\rightarrow$')\n continue\n if particle=='/':\n process_string.append(r'$\\displaystyle /$')\n continue\n process_string.append(format_particle_name(particle)) \n\n if resonance=='':\n return r'CMS check for %s' %(' '.join(process_string))\n else:\n return r'CMS check for %s ( resonance %s )'\\\n %(' '.join(process_string),resonance)\n\n def guess_lambdaorder(ME_values_list, lambda_values, expected=None,\n proc=None, res=None):\n \"\"\" Guess the lambda scaling from a list of ME values and return it.\n Also compare with the expected result if specified and trigger a \n warning if not in agreement.\"\"\"\n # guess the lambdaCMS power in the amplitude squared\n bpowers = []\n for i, lambdaCMS in enumerate(lambda_values[1:]):\n bpowers.append(round(math.log(ME_values_list[0]/ME_values_list[i+1],\\\n lambda_values[0]/lambdaCMS)))\n\n # Pick the most representative power\n bpower = sorted([(el, bpowers.count(el)) for el in set(bpowers)],\n key = lambda elem: elem[1], reverse=True)[0][0]\n if not expected:\n return bpower\n if bpower != expected:\n logger.warning('The apparent scaling of the squared amplitude'+\n 'seems inconsistent w.r.t to detected value '+\n '(%i vs %i). %i will be used.'%(expected,bpower,bpower)+\n ' This happend for process %s and resonance %s'%(proc, res))\n return bpower \n\n def check_stability(ME_values, lambda_values, lambda_scaling, values_name):\n \"\"\" Checks if the values passed in argument are stable and return the \n stability check outcome warning if it is not precise enough. \"\"\"\n\n values = sorted([\n abs(val*(lambda_values[0]/lambda_values[i])**lambda_scaling) for \\\n i, val in enumerate(ME_values)])\n median = values[len(values)//2]\n max_diff = max(abs(values[0]-median),abs(values[-1]-median))\n stability = max_diff/median\n stab_threshold = 1e-2\n if stability >= stab_threshold:\n return \"== WARNING: Stability check failed for '%s' with stability %.2e.\\n\"\\\n %(values_name, stability)\n else:\n return None\n ####### END helper functions\n if options['analyze']=='None':\n if options['reuse']:\n save_path = CMS_save_path('pkl', result, model, options, \n output_path=output_path)\n buff = \"\\nThe results of this check have been stored on disk and its \"+\\\n \"analysis can be rerun at anytime with the MG5aMC command:\\n \"+\\\n \" check cms --analyze=%s\\n\"%save_path\n res_str += buff\n concise_str += buff\n save_load_object.save_to_file(save_path, result)\n elif len(result['ordered_processes'])>0:\n buff = \"\\nUse the following synthax if you want to store \"+\\\n \"the raw results on disk.\\n\"+\\\n \" check cms -reuse <proc_def> <options>\\n\"\n res_str += buff\n concise_str += buff \n \n ############################\n # Numerical check first #\n ############################\n \n checks = []\n for process in result['ordered_processes']:\n checks.extend([(process,resID) for resID in \\\n range(len(result[process]['CMS']))])\n \n if options['reuse']:\n logFile = open(CMS_save_path(\n 'log', result, model, options, output_path=output_path),'w')\n \n lambdaCMS_list=result['lambdaCMS']\n \n # List of failed processes\n failed_procs = []\n\n # A bar printing function helper. Change the length here for esthetics\n bar = lambda char: char*47\n\n # Write out the widths used if information is present:\n if 'widths_computed' in result:\n res_str += '\\n%s%s%s\\n'%(bar('='),' Widths ',bar('='))\n if result['recompute_width'] == 'never':\n res_str += '| Widths extracted from the param_card.dat'\n else:\n res_str += '| Widths computed %s'%('analytically' if has_FRdecay \n else 'numerically')\n if result['recompute_width'] == 'first_time':\n res_str += ' for \\lambda = 1'\n elif result['recompute_width'] == 'always':\n res_str += ' for all \\lambda values'\n res_str += \" using mode '--recompute_width=%s'.\\n\"%result['recompute_width']\n for particle_name, width in result['widths_computed']:\n res_str += '| %-10s = %-11.6gGeV\\n'%('Width(%s)'%particle_name,width)\n res_str += '%s%s%s\\n'%(bar('='),'='*8,bar('='))\n\n # Doing the analysis to printout to the MG5 interface and determine whether\n # the test is passed or not\n # Number of last points to consider for the stability test\n nstab_points=group_val\n # Store here the asymptot detected for each difference curve\n differences_target = {}\n for process, resID in checks:\n # Reinitialize the concise result replacement dictionary \n # (only one resonance is indicated in this one, no matter what.)\n concise_repl_dict[process] = {'process':process,\n 'asymptot':'N/A',\n 'cms_check':'N/A',\n 'status':'N/A'}\n proc_res = result[process]\n cms_res = proc_res['CMS'][resID]\n nwa_res = proc_res['NWA'][resID]\n resonance = resonance_str(cms_res['resonance'], latex='none')\n cms_born=cms_res['born']\n nwa_born=nwa_res['born']\n # Starting top thick bar\n res_str += '\\n%s%s%s\\n'%(bar('='),'='*8,bar('='))\n # Centered process and resonance title\n proc_title = \"%s (resonance %s)\"%(process,resonance)\n centering = (bar(2)+8-len(proc_title))//2\n res_str += \"%s%s\\n\"%(' '*centering,proc_title)\n # Starting bottom thin bar\n res_str += '%s%s%s\\n'%(bar('-'),'-'*8,bar('-'))\n # Reminder if diff_lambda_power is not 1\n \n if diff_lambda_power!=1:\n res_str += \"== WARNING diff_lambda_power is not 1 but = %g\\n\"%diff_lambda_power\n res_str += '%s%s%s\\n'%(bar('-'),'-'*8,bar('-'))\n\n born_power = guess_lambdaorder(nwa_born,lambdaCMS_list,\n expected=proc_res['born_order'], proc=process, res=resonance)\n stab_cms_born = check_stability(cms_born[-nstab_points:], \n lambdaCMS_list[-nstab_points:], born_power, 'CMS Born')\n if stab_cms_born:\n res_str += stab_cms_born\n stab_nwa_born = check_stability(nwa_born[-nstab_points:], \n lambdaCMS_list[-nstab_points:], born_power, 'NWA Born')\n if stab_nwa_born:\n res_str += stab_nwa_born\n # Write out the phase-space point\n res_str += \"== Kinematic configuration in GeV (E,px,pypz)\\n\"\n for i, p in enumerate(cms_res['resonance']['PS_point_used']):\n res_str += \" | p%-2.d = \"%(i+1)\n for pi in p:\n res_str += '%-24.17g'%pi if pi<0.0 else ' %-23.17g'%pi \n res_str += \"\\n\"\n # Write out the offshellnesses specification\n res_str += \"== Offshellnesses of all detected resonances\\n\"\n for res_name, offshellness in cms_res['resonance']['offshellnesses']:\n res_str += \" | %-15s = %f\\n\"%(res_name, offshellness)\n res_str += '%s%s%s\\n'%(bar('-'),'-'*8,bar('-'))\n\n if not pert_orders:\n res_str += \"== Born scaling lambda^n_born. nborn = %d\\n\"%born_power\n else:\n cms_finite=cms_res['finite']\n nwa_finite=nwa_res['finite']\n loop_power = guess_lambdaorder(nwa_finite,lambdaCMS_list,\n expected=proc_res['loop_order'], proc=process, res=resonance) \n res_str += \"== Scaling lambda^n. nborn, nloop = %d, %d\\n\"\\\n %(born_power,loop_power)\n stab_cms_finite = check_stability(cms_finite[-nstab_points:], \n lambdaCMS_list[-nstab_points:], loop_power, 'CMS finite')\n if stab_cms_finite:\n res_str += stab_cms_finite\n stab_nwa_finite = check_stability(nwa_finite[-nstab_points:], \n lambdaCMS_list[-nstab_points:], loop_power, 'NWA finite')\n if stab_nwa_finite:\n res_str += stab_nwa_finite\n # Now organize data\n CMSData = []\n NWAData = []\n DiffData = []\n for idata, lam in enumerate(lambdaCMS_list):\n if not pert_orders:\n new_cms=cms_born[idata]/(lam**born_power)\n new_nwa=nwa_born[idata]/(lam**born_power)\n else:\n new_cms=(cms_finite[idata]+cms_born[idata]-nwa_born[idata])/(lam*nwa_born[idata])\n new_nwa=nwa_finite[idata]/(lam*nwa_born[idata])\n new_diff=(new_cms-new_nwa)/(lam**diff_lambda_power)\n CMSData.append(new_cms)\n NWAData.append(new_nwa)\n DiffData.append(new_diff)\n\n \n # NWA Born median\n\n # Find which values to start the test at by looking at the CMSdata scaling\n # First compute the median of the middle 60% of entries in the plot\n trim_range=int(((1.0-0.6)/2.0)*len(DiffData))\n low_diff_median = sorted(DiffData[trim_range:-trim_range])\\\n [(len(DiffData)-2*trim_range)//2]\n \n # Now walk the values from the right of the diff plot until we reaches\n # values stable with respect to the CMS_tale_median. This value will\n # be limit of the range considered for the CMS test. Do it in a way which\n # is insensitive to instabilities, by considering medians of group_val \n # consecutive points.\n current_median = 0\n # We really want to select only the very stable region\n scan_index = 0\n reference = abs(sorted(NWAData)[len(NWAData)//2])\n if low_diff_median!= 0.0:\n if abs(reference/low_diff_median)<diff_zero_threshold:\n reference = abs(low_diff_median)\n while True:\n scanner = DiffData[scan_index:group_val+scan_index]\n current_median = sorted(scanner)[len(scanner)//2]\n # Useful for debugging\n #misc.sprint(scanner,current_median,abs(current_median-low_diff_median)/reference,reference,consideration_threshold)\n if abs(current_median-low_diff_median)/reference<\\\n consideration_threshold:\n break;\n scan_index += 1\n if (group_val+scan_index)>=len(DiffData):\n # this should not happen, but in this case we arbitrarily take\n # half of the data\n logger.warning('The median scanning failed during the CMS check '+\n 'for process %s'%proc_title+\\\n 'This is means that the difference plot has not stable'+\\\n 'intermediate region and MG5_aMC will arbitrarily consider the'+\\\n 'left half of the values.')\n scan_index = -1\n break;\n \n if scan_index == -1:\n cms_check_data_range = len(DiffData)//2\n else:\n cms_check_data_range = scan_index + group_val\n\n res_str += \"== Data range considered (min, max, n_val) = (%.1e, %.1e, %d)\\n\"\\\n %(lambdaCMS_list[-1],lambdaCMS_list[scan_index],\n len(lambdaCMS_list)-scan_index)\n # Now setup the list of values affecting the CMScheck\n CMScheck_values = DiffData[cms_check_data_range:]\n\n # For the purpose of checking the stability of the tale, we now do\n # the consideration_threshold scan from the *left* and if we finsih\n # before the end, it means that there is an unstable region.\n if scan_index >= 0: \n # try to find the numerical instability region\n scan_index = len(CMScheck_values)\n used_group_val = max(3,group_val)\n unstability_found = True\n while True:\n scanner = CMScheck_values[scan_index-used_group_val:scan_index]\n maxdiff = max(abs(scan-low_diff_median) for scan in scanner)\n if maxdiff/reference<consideration_threshold:\n break;\n if (scan_index-used_group_val)==0:\n # this only happens when no stable intermediate region can be found\n # Set scan_index to -99 so as to prevent warning\n unstability_found = False\n break;\n # Proceed to th next block of data\n scan_index -= 1\n\n # Now report here the unstability found\n if unstability_found:\n unstab_check=CMScheck_values[scan_index:]\n relative_array = [val > CMScheck_values[scan_index-1] for \n val in unstab_check]\n upper = relative_array.count(True)\n lower = relative_array.count(False)\n if not ((lower==0 and upper>=0) or (lower>=0 and upper==0)):\n logger.warning(\n\"\"\"For process %s, a numerically unstable region was detected starting from lambda < %.1e.\nLook at the plot in this region (and possibly throw more points using the option --lambdaCMS).\nIf this is indeed a stability issue, then either decrease MLStabThreshold in MadLoop or decrease the\nminimum value of lambda to be considered in the CMS check.\"\"\"\\\n %(proc_title, lambdaCMS_list[cms_check_data_range+scan_index-1]))\n \n # Now apply the same same technique, as above but to the difference plot\n # Now we will use low_diff_median instead of diff_tale_median\n #diff_tale_median = sorted(CMScheck_values)[len(CMScheck_values)//2]\n scan_index = 0\n max_diff = 0.0\n res_str += \"== Ref. value used in the ratios (Born NWA) = %s\\n\"\\\n %('%.3g'%reference)\n res_str += \"== Asymptotic difference value detected = %s\\n\"\\\n %('%.3g'%low_diff_median)\n concise_repl_dict[process]['asymptot'] = '%.3e'%low_diff_median\n\n # Pass information to the plotter for the difference target\n differences_target[(process,resID)]= low_diff_median\n# misc.sprint('Now doing resonance %s.'%res_str)\n while True:\n current_vals = CMScheck_values[scan_index:scan_index+group_val] \n max_diff = max(max_diff, abs(low_diff_median-\n sorted(current_vals)[len(current_vals)//2])/reference)\n if (scan_index+group_val)>=len(CMScheck_values):\n break\n scan_index += 1\n \n # Now use the CMS check result\n cms_check = (max_diff*100.0, '>' if max_diff>CMS_test_threshold else '<',\n CMS_test_threshold*100.0) \n res_str += \"== CMS check result (threshold) = %.3g%% (%s%.3g%%)\\n\"%cms_check\n concise_repl_dict[process]['cms_check'] = \\\n \"%-10s (%s%.3g%%)\"%('%.3g%%'%cms_check[0],cms_check[1],cms_check[2])\n\n if max_diff>CMS_test_threshold:\n failed_procs.append((process,resonance))\n res_str += \"%s %s %s\\n\"%(bar('='),\n 'FAILED' if max_diff>CMS_test_threshold else 'PASSED',bar('='))\n concise_repl_dict[process]['status'] = 'Failed' if max_diff>CMS_test_threshold \\\n else 'Passed'\n\n if output=='concise_text':\n # Find what is the maximum size taken by the process string\n max_proc_size = max(\n [len(process) for process in result['ordered_processes']]+[10])\n # Re-initialize the res_str so as to contain only the minimal report\n res_str = concise_str\n res_str += '\\n'+concise_data%(max_proc_size+4)%concise_repl_dict['Header']\n for process in result['ordered_processes']:\n res_str += (concise_data%(max_proc_size+4)%concise_repl_dict[process])\n\n if len(checks):\n res_str += \"Summary: %i/%i passed\"%(len(checks)-len(failed_procs),len(checks))+\\\n ('.\\n' if not failed_procs else ', failed checks are for:\\n')\n else:\n return \"\\nNo CMS check to perform, the process either has no diagram or does not \"+\\\n \"not feature any massive s-channel resonance.\"\n \n for process, resonance in failed_procs:\n res_str += \"> %s, %s\\n\"%(process, resonance)\n\n if output=='concise_text':\n res_str += '\\nMore detailed information on this check available with the command:\\n'\n res_str += ' MG5_aMC>display checks\\n'\n\n ############################\n # Now we turn to the plots #\n ############################\n if not options['show_plot']:\n if options['reuse']:\n logFile.write(res_str)\n logFile.close()\n if output.endswith('text'):\n return res_str\n else:\n return failed_procs\n \n fig_output_file = CMS_save_path('pdf', result, model, options, \n output_path=output_path)\n base_fig_name = fig_output_file[:-4]\n suffix = 1\n while os.path.isfile(fig_output_file):\n fig_output_file = '%s__%d__.pdf'%(base_fig_name,suffix)\n suffix+=1\n\n process_data_plot_dict={}\n \n # load possible additional results. The second element of the tuple is\n # the dataset name.\n all_res = [(result, None)]\n for i, add_res in enumerate(options['analyze'].split(',')[1:]):\n specs =re.match(r'^(?P<filename>.*)\\((?P<title>.*)\\)$', add_res)\n if specs:\n filename = specs.group('filename')\n title = specs.group('title')\n else:\n filename = add_res\n title = '#%d'%(i+1)\n\n new_result = save_load_object.load_from_file(filename)\n if new_result is None:\n raise InvalidCmd('The complex mass scheme check result'+\n \" file below could not be read.\\n %s\"%filename)\n if len(new_result['ordered_processes'])!=len(result['ordered_processes']) \\\n or len(new_result['lambdaCMS'])!=len(result['lambdaCMS']):\n raise self.InvalidCmd('The complex mass scheme check result'+\n \" file below does not seem compatible.\\n %s\"%filename)\n all_res.append((new_result,title))\n \n # Prepare the data\n for process, resID in checks:\n data1=[] # for subplot 1,i.e. CMS and NWA\n data2=[] # for subplot 2,i.e. diff\n info ={} # info to be passed to the plotter\n for res in all_res:\n proc_res = res[0][process]\n cms_res = proc_res['CMS'][resID]\n nwa_res = proc_res['NWA'][resID]\n resonance = resonance_str(cms_res['resonance'])\n if options['resonances']!=1:\n info['title'] = format_title(process, resonance)\n else:\n info['title'] = format_title(process, '')\n # Born result\n cms_born=cms_res['born']\n nwa_born=nwa_res['born']\n if len(cms_born) != len(lambdaCMS_list) or\\\n len(nwa_born) != len(lambdaCMS_list):\n raise MadGraph5Error('Inconsistent list of results w.r.t. the'+\\\n ' lambdaCMS values specified for process %s'%process)\n if pert_orders:\n cms_finite=cms_res['finite'] \n nwa_finite=nwa_res['finite']\n if len(cms_finite) != len(lambdaCMS_list) or\\\n len(nwa_finite) != len(lambdaCMS_list):\n raise MadGraph5Error('Inconsistent list of results w.r.t. the'+\\\n ' lambdaCMS values specified for process %s'%process)\n \n bpower = guess_lambdaorder(nwa_born,lambdaCMS_list,\n expected=proc_res['born_order'], proc=process, res=resonance)\n\n CMSData = []\n NWAData = []\n DiffData = [] \n for idata, lam in enumerate(lambdaCMS_list):\n if not pert_orders:\n new_cms = cms_born[idata]/lam**bpower\n new_nwa = nwa_born[idata]/lam**bpower\n else:\n new_cms=cms_finite[idata]+cms_born[idata]-nwa_born[idata]\n new_nwa=nwa_finite[idata]\n new_cms /= lam*nwa_born[idata]\n new_nwa /= lam*nwa_born[idata]\n new_diff=(new_cms-new_nwa)/(lam**diff_lambda_power)\n CMSData.append(new_cms)\n NWAData.append(new_nwa)\n DiffData.append(new_diff)\n if res[1] is None:\n if not pert_orders:\n data1.append([r'$\\displaystyle CMS\\;=\\;\\mathcal{M}_{CMS}^{(0)}/\\lambda^%d$'%bpower,CMSData])\n data1.append([r'$\\displaystyle NWA\\;=\\;\\mathcal{M}_{NWA}^{(0)}/\\lambda^%d$'%bpower,NWAData])\n else:\n data1.append([r'$\\displaystyle CMS\\;=\\;(\\mathcal{M}^{(1)}_{CMS}+\\mathcal{M}_{CMS}^{(0)}-\\mathcal{M}^{(0)}_{NWA})/(\\lambda\\cdot\\mathcal{M}^{(0)}_{NWA})$',CMSData])\n data1.append([r'$\\displaystyle NWA\\;=\\;\\mathcal{M}^{(1)}_{NWA}/(\\lambda\\cdot\\mathcal{M}^{(0)}_{NWA})$',NWAData])\n data2.append([r'$\\displaystyle\\Delta\\;=\\;(CMS-NWA)/\\lambda%s$'\\\n %('' if diff_lambda_power==1 else r'^{%g}'%diff_lambda_power)\n ,DiffData])\n data2.append([r'Detected asymptot',[differences_target[(process,resID)] \n for i in range(len(lambdaCMS_list))]])\n else:\n data1.append([r'$\\displaystyle CMS$ %s'%res[1].replace('_',' ').replace('#','\\#'), CMSData])\n data1.append([r'$\\displaystyle NWA$ %s'%res[1].replace('_',' ').replace('#','\\#'), NWAData])\n data2.append([r'$\\displaystyle\\Delta$ %s'%res[1].replace('_',' ').replace('#','\\#'), DiffData])\n \n process_data_plot_dict[(process,resID)]=(data1,data2, info)\n\n # Now turn to the actual plotting\n try:\n import matplotlib.pyplot as plt\n from matplotlib.backends.backend_pdf import PdfPages\n logger.info('Rendering plots... (this can take some time because of the latex labels)')\n\n res_str += \\\n\"\"\"\\n-----------------------------------------------------------------------------------------------\n| In the plots, the Complex Mass Scheme check is successful if the normalized difference |\n| between the CMS and NWA result (lower inset) tends to a constant when \\lambda goes to zero. |\n-----------------------------------------------------------------------------------------------\\n\"\"\"\n\n # output the figures\n if lambda_range[1]>0:\n min_lambda_index = -1\n for i, lam in enumerate(lambdaCMS_list):\n if lam<=lambda_range[1]:\n min_lambda_index = i\n break\n else:\n min_lambda_index = 0\n if lambda_range[0]>0:\n max_lambda_index = -1\n for i, lam in enumerate(lambdaCMS_list):\n if lam<=lambda_range[0]:\n max_lambda_index=i-1\n break\n else:\n max_lambda_index=len(lambdaCMS_list)-1\n \n if max_lambda_index==-1 or min_lambda_index==-1 or \\\n min_lambda_index==max_lambda_index:\n raise InvalidCmd('Invalid lambda plotting range: (%.1e,%.1e)'%\\\n (lambda_range[0],lambda_range[1]))\n # Trim lambda values\n if lambda_range[0]>0.0 or lambda_range[1]>0.0:\n lambdaCMS_list = lambdaCMS_list[min_lambda_index:max_lambda_index+1]\n\n plt.rc('text', usetex=True)\n plt.rc('font', family='serif')\n pp=PdfPages(fig_output_file)\n if len(checks)==0 or len(process_data_plot_dict[checks[0]][1])<=7:\n colorlist=['b','r','g','k','c','m','y']\n else:\n import matplotlib.colors as colors\n import matplotlib.cm as mplcm\n import matplotlib.colors as colors\n \n # Nice color maps here are 'gist_rainbow'\n cm = plt.get_cmap('gist_rainbow')\n cNorm = colors.Normalize(vmin=0, vmax=(len(data2)-1))\n scalarMap = mplcm.ScalarMappable(norm=cNorm, cmap=cm)\n # use vmax=(len(data1)-1)*0.9 to remove pink at the end of the spectrum\n colorlist = [scalarMap.to_rgba(i*0.9) for i in range(len(data2))]\n # Or it is also possible to alternate colors so as to make them \n # as distant as possible to one another\n # colorlist = sum([\n # [scalarMap.to_rgba(i),scalarMap.to_rgba(i+len(data2)//2)]\n # for i in range(len(data2)//2)],[])\n\n legend_size = 10\n for iproc, (process, resID) in enumerate(checks):\n data1,data2, info=process_data_plot_dict[(process,resID)]\n # Trim dataplot if necessary\n if lambda_range[0]>0.0 or lambda_range[1]>0.0:\n for i in range(len(data1)):\n data1[i][1]=data1[i][1][min_lambda_index:max_lambda_index+1]\n for i in range(len(data2)):\n data2[i][1]=data2[i][1][min_lambda_index:max_lambda_index+1]\n plt.figure(iproc+1)\n plt.subplot(211)\n minvalue=1e+99\n maxvalue=-1e+99\n for i, d1 in enumerate(data1):\n # Use the same color for NWA and CMS curve but different linestyle\n color=colorlist[i//2]\n data_plot=d1[1]\n minvalue=min(min(data_plot),minvalue)\n maxvalue=max(max(data_plot),maxvalue) \n plt.plot(lambdaCMS_list, data_plot, color=color, marker='', \\\n linestyle=('-' if i%2==0 else '--'), \n label=(d1[0] if (i%2==0 or i==1) else '_nolegend_'))\n ymin = minvalue-(maxvalue-minvalue)/5.\n ymax = maxvalue+(maxvalue-minvalue)/5.\n\n plt.yscale('linear')\n plt.xscale('log')\n plt.title(info['title'],fontsize=12,y=1.08)\n plt.ylabel(r'$\\displaystyle \\mathcal{M}$')\n #plt.xlabel('lambdaCMS')\n if ymax*len(data1)-sum(max(d1[1][-len(d1[1])//2:]) \\\n for d1 in data1) > 0.5*(ymax-ymin)*len(data1):\n plt.legend(prop={'size':legend_size},loc='upper left', frameon=False)\n else:\n plt.legend(prop={'size':legend_size},loc='lower left', frameon=False)\n \n plt.axis([min(lambdaCMS_list),max(lambdaCMS_list), ymin, ymax])\n \n plt.subplot(212)\n minvalue=1e+99\n maxvalue=-1e+99\n \n try:\n asymptot_index = [d2[0] for d2 in data2].index('Detected asymptot')\n plt.plot(lambdaCMS_list, data2[asymptot_index][1], \n color='0.75', marker='', linestyle='-', label='')\n except ValueError:\n pass\n \n color_ID = -1\n for d2 in data2:\n # Special setup for the reference asymptot straight line\n if d2[0]=='Detected asymptot':\n continue\n color_ID += 1\n color=colorlist[color_ID]\n data_plot=d2[1]\n minvalue=min(min(data_plot),minvalue)\n maxvalue=max(max(data_plot),maxvalue)\n plt.plot(lambdaCMS_list, data_plot, color=color, marker='',\\\n linestyle='-', label=d2[0])\n ymin = minvalue-(maxvalue-minvalue)/5.\n ymax = maxvalue+(maxvalue-minvalue)/5.\n\n plt.yscale('linear')\n plt.xscale('log')\n plt.ylabel(r'$\\displaystyle \\Delta$')\n plt.xlabel(r'$\\displaystyle \\lambda$')\n # The unreadable stuff below is just to check if the left of the \n # plot is stable or not\n sd = [sorted(d2[1][-len(d2[1])//2:]) for d2 in data2]\n left_stability = sum(abs(s[0]-s[-1]) for s in sd)\n sd = [sorted(d2[1][:-len(d2[1])//2]) for d2 in data2]\n right_stability = sum(abs(s[0]-s[-1]) for s in sd)\n left_stable = False if right_stability==0.0 else \\\n (left_stability/right_stability)<0.1\n \n if left_stable:\n if ymax*len(data2)-sum(max(d2[1][-len(d2[1])//2:]) \\\n for d2 in data2) > 0.5*(ymax-ymin)*len(data2):\n plt.legend(prop={'size':legend_size},loc='upper left', frameon=False)\n else:\n plt.legend(prop={'size':legend_size},loc='lower left', frameon=False) \n else:\n if ymax*len(data2)-sum(max(d2[1][:-len(d2[1])//2]) \\\n for d2 in data2) > 0.5*(ymax-ymin)*len(data2):\n plt.legend(prop={'size':legend_size},loc='upper right', frameon=False)\n else:\n plt.legend(prop={'size':legend_size},loc='lower right', frameon=False)\n\n plt.axis([min(lambdaCMS_list),max(lambdaCMS_list),\\\n minvalue-(maxvalue-minvalue)/5., maxvalue+(maxvalue-minvalue)/5.])\n \n plt.savefig(pp,format='pdf')\n\n pp.close()\n \n if len(checks)>0:\n logger.info('Complex Mass Scheme check plot output to file %s. '%fig_output_file)\n \n if sys.platform.startswith('linux'):\n misc.call([\"xdg-open\", fig_output_file])\n elif sys.platform.startswith('darwin'):\n misc.call([\"open\", fig_output_file])\n \n plt.close(\"all\")\n \n except Exception as e:\n if isinstance(e, ImportError):\n res_str += \"\\n= Install matplotlib to get a \"+\\\n \"graphical display of the results of the cms check.\"\n else:\n general_error = \"\\n= Could not produce the cms check plot because of \"+\\\n \"the following error: %s\"%str(e)\n try:\n import six.moves.tkinter\n if isinstance(e, six.moves.tkinter.TclError):\n res_str += \"\\n= Plots are not generated because your system\"+\\\n \" does not support graphical display.\"\n else:\n res_str += general_error\n except:\n res_str += general_error\n \n if options['reuse']:\n logFile.write(res_str)\n logFile.close()\n\n if output.endswith('text'):\n return res_str\n else:\n return failed_procs\n" ]
[ [ "matplotlib.cm.ScalarMappable", "matplotlib.pyplot.xscale", "matplotlib.pyplot.xlabel", "matplotlib.backends.backend_pdf.PdfPages", "matplotlib.pyplot.plot", "matplotlib.pyplot.legend", "matplotlib.pyplot.close", "matplotlib.pyplot.savefig", "matplotlib.pyplot.rc", "matplotlib.pyplot.get_cmap", "matplotlib.pyplot.figure", "matplotlib.pyplot.title", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.show", "matplotlib.pyplot.yscale", "matplotlib.pyplot.subplot" ] ]
sugartom/tf-pose-estimation
[ "84d3d87bd6c5a757e3106245440f1582b9ae0d69" ]
[ "run_test_export.py" ]
[ "import time\n\nimport sys\nsys.path.append('/home/yitao/Documents/fun-project/tensorflow-related/tf-pose-estimation/')\nfrom tf_pose import common\nimport cv2\nimport numpy as np\nfrom tf_pose.estimator import TfPoseEstimator\nfrom tf_pose.networks import get_graph_path, model_wh\n\n# Yitao-TLS-Begin\nimport tensorflow as tf\nimport os\nimport sys\nfrom tensorflow.python.saved_model import builder as saved_model_builder\nfrom tensorflow.python.saved_model import signature_constants\nfrom tensorflow.python.saved_model import signature_def_utils\nfrom tensorflow.python.saved_model import tag_constants\nfrom tensorflow.python.saved_model import utils\nfrom tensorflow.python.util import compat\n\ntf.app.flags.DEFINE_integer('model_version', 1, 'version number of the model.')\nFLAGS = tf.app.flags.FLAGS\n# Yitao-TLS-End\n\nif __name__ == '__main__':\n\n image_path = '/home/yitao/Documents/fun-project/tensorflow-related/tf-pose-estimation/images/p1.jpg'\n resize_out_ratio = 4.0\n # model_name = 'cmu'\n model_name = \"mobilenet_thin\"\n\n e = TfPoseEstimator(get_graph_path(model_name), target_size=(432, 368))\n \n iteration_list = [10]\n for iteration in iteration_list:\n for i in range(iteration):\n start = time.time()\n # estimate human poses from a single image !\n image = common.read_imgfile(image_path, None, None)\n # print(\"image shape = %s\" % str(image.shape))\n if image is None:\n sys.exit(-1)\n t = time.time()\n humans = e.inference(image, resize_to_default=False, upsample_size=resize_out_ratio)\n elapsed = time.time() - t\n\n end = time.time()\n print(\"It takes %s sec to run\" % (str(end - start)))\n\n # Yitao-TLS-Begin\n if (model_name == \"cmu\"):\n export_path_base = \"pose_openpose\"\n else:\n export_path_base = \"pose_thinpose\"\n export_path = os.path.join(\n compat.as_bytes(export_path_base),\n compat.as_bytes(str(FLAGS.model_version)))\n print('Exporting trained model to %s' % str(export_path))\n builder = saved_model_builder.SavedModelBuilder(export_path)\n\n tensor_info_x1 = tf.saved_model.utils.build_tensor_info(e.tensor_image)\n tensor_info_x2 = tf.saved_model.utils.build_tensor_info(e.upsample_size)\n # tensor_info_y = tf.saved_model.utils.build_tensor_info(e.tensor_output)\n tensor_info_y1 = tf.saved_model.utils.build_tensor_info(e.tensor_peaks)\n tensor_info_y2 = tf.saved_model.utils.build_tensor_info(e.tensor_heatMat_up)\n tensor_info_y3 = tf.saved_model.utils.build_tensor_info(e.tensor_pafMat_up)\n\n prediction_signature = tf.saved_model.signature_def_utils.build_signature_def(\n inputs={'tensor_image': tensor_info_x1,\n 'upsample_size': tensor_info_x2},\n # outputs = {'tensor_output': tensor_info_y},\n outputs={'tensor_peaks': tensor_info_y1,\n 'tensor_heatMat_up': tensor_info_y2,\n 'tensor_pafMat_up': tensor_info_y3},\n method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME)\n\n legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')\n builder.add_meta_graph_and_variables(\n e.persistent_sess, [tf.saved_model.tag_constants.SERVING],\n signature_def_map={\n 'predict_images':\n prediction_signature,\n },\n legacy_init_op=legacy_init_op)\n\n builder.save()\n\n print('Done exporting!')\n # Yitao-TLS-End\n\n print(image.shape)\n print(humans)\n" ]
[ [ "tensorflow.saved_model.signature_def_utils.build_signature_def", "tensorflow.app.flags.DEFINE_integer", "tensorflow.saved_model.utils.build_tensor_info", "tensorflow.python.saved_model.builder.SavedModelBuilder", "tensorflow.tables_initializer", "tensorflow.python.util.compat.as_bytes" ] ]
aringler-usgs/normal_mode_GSN
[ "e3e593466c0ed7a1741a3bd433a9ef2dc34a2acf" ]
[ "figure4.py" ]
[ "#!/usr/bin/env python\nimport glob\nimport matplotlib.pyplot as plt\nimport numpy as np\n\nimport matplotlib as mpl\n\nmpl.rc('font', family='serif')\nmpl.rc('font', serif='Times')\nmpl.rc('text', usetex=True)\nmpl.rc('font', size=18)\n\nmodes = ['0S2', '0S3', '0S4', '0S5']\nletters = ['(a)', '(b)', '(c)', '(d)']\nfig, ax = plt.subplots(2, 2, figsize=(16, 12))\nax = ax.flatten()\n\nfor midx, mode in enumerate(modes):\n stas, sign, eve, colors, symbols, sensors = [], [], [], [], [], []\n files = glob.glob('*' + mode + '*.csv')\n\n for cfile in files:\n f = open(cfile, 'r')\n jday = cfile.split('_')[2]\n print(jday)\n for line in f:\n line = line.split(',')\n stas.append(line[0])\n snr = 20 * np.log10(float(line[-2]))\n if snr > 20:\n sign.append(20)\n elif snr < -80:\n sign.append(-80)\n else:\n sign.append(snr)\n if 'STS-1' in line[-1]:\n c = 'C0'\n elif 'STS-6' in line[-1]:\n c = 'C1'\n elif 'KS' in line[-1]:\n c = 'C2'\n elif '360' in line[-1]:\n c = 'C3'\n else:\n c = 'C4'\n if '360' in line[-1]:\n sensors.append('T-360GSN')\n else:\n sensors.append(line[-1])\n colors.append(c)\n eve.append(jday)\n if jday == '91':\n symbols.append('s')\n else:\n symbols.append('o')\n f.close()\n\n sensorsuni = list(set(sensors))\n\n sign = np.array(sign)\n eve = np.array(eve)\n for idx, sen in enumerate(sensorsuni):\n gidx = [i for i, x in enumerate(sensors) if sen in x]\n sigval0, sigval1 = [], []\n for gidx2 in gidx:\n if eve[gidx2] == '91':\n sigval0.append(sign[gidx2])\n else:\n sigval1.append(sign[gidx2])\n if eve[gidx2] == '91':\n ax[midx].plot(sign[gidx2], idx + 0.2, symbols[gidx2],\n color=colors[gidx2], alpha=0.5)\n else:\n ax[midx].plot(sign[gidx2], idx - 0.2, symbols[gidx2],\n color=colors[gidx2], alpha=0.5)\n sigval0 = np.array(sigval0)\n sigval1 = np.array(sigval1)\n ax[midx].plot(np.mean(sigval0), idx + 0.2, 's', color=colors[gidx2],\n markersize=20, alpha=0.5, markeredgecolor='k')\n ax[midx].plot(np.mean(sigval1), idx - 0.2, 'o', color=colors[gidx2],\n markersize=20, alpha=0.5, markeredgecolor='k')\n ax[midx].set_title(letters[midx], loc='left')\n ax[midx].set_yticks(range(len(sensorsuni)))\n ax[midx].set_yticklabels(sensorsuni, fontsize=12)\n ax[midx].set_xlabel(\n 'Noise of $_' + mode[0] + mode[1] + '_' + mode[2] + '$ (dB)')\n\nplt.savefig('figure4again.pdf', format='PDF', dpi=400)\nplt.savefig('figure4again.png', format='PNG', dpi=400)\n" ]
[ [ "numpy.array", "matplotlib.pyplot.savefig", "numpy.mean", "matplotlib.pyplot.subplots", "matplotlib.rc" ] ]
universe-hcy/Elastic-Federated-Learning-Solution
[ "4e047fbbe6ae9809cd631499b7d3a3855dfe2208" ]
[ "efls-train/python/efl/example/paillier_mnist/leader_weight.py" ]
[ "# Copyright (C) 2016-2021 Alibaba Group Holding Limited\n# \n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\nimport os\nimport numpy as np\nimport tensorflow.compat.v1 as tf\nimport efl\n\ndef input_fn(model, mode):\n if mode == efl.MODE.TRAIN:\n dataio = efl.data.FederalDataIO(\"./leader_train\", 256, model.communicator, model.federal_role, 0, 1, data_mode='local')\n dataio.fixedlen_feature('sample_id', 1, dtype=tf.int64)\n dataio.fixedlen_feature('feature', 14*28, dtype=tf.float32)\n dataio.fixedlen_feature('label', 1, dtype=tf.float32)\n features = dataio.read()\n model.add_hooks([dataio.get_hook()])\n columns = {\n \"label\": [tf.feature_column.numeric_column('label', 1)],\n \"emb\": [tf.feature_column.numeric_column('feature', 14*28)]}\n return efl.FederalSample(features, columns, model.federal_role, model.communicator, sample_id_name='sample_id')\n\ndef model_fn(model, sample):\n inputs = sample['emb']\n if 'keypair' in model.keypairs:\n keypair = model.keypair('keypair')\n else:\n keypair = model.create_keypair('keypair', efl.privacy.Role.RECEIVER, n_bytes=128, group_size=10)\n fc1 = tf.layers.dense(inputs, 128,\n kernel_initializer=tf.truncated_normal_initializer(\n stddev=0.001, dtype=tf.float32))\n #fc1 = model.paillier_recver_weight(fc1, keypair, 'paillier_weight', 0.001, 128)\n fc1 = tf.layers.dense(\n fc1, 10, kernel_initializer=tf.truncated_normal_initializer(\n stddev=0.001, dtype=tf.float32))\n y = model.paillier_recver_weight(fc1, keypair, 'paillier_weight', 0.001, 10)\n pred = tf.argmax(y, axis=-1)\n _, accuracy = tf.metrics.accuracy(sample['label'], pred)\n model.add_metric('accuracy', accuracy)\n label = tf.cast(sample['label'], tf.int32)\n label = tf.reshape(label, [-1])\n label = tf.one_hot(label, 10)\n loss = tf.losses.softmax_cross_entropy(label, y)\n return loss\n\nCTR = efl.FederalModel()\nCTR.input_fn(input_fn)\nCTR.loss_fn(model_fn)\nCTR.optimizer_fn(efl.optimizer_fn.optimizer_setter(tf.train.GradientDescentOptimizer(0.001)))\nCTR.compile()\nCTR.fit(efl.procedure_fn.train(), \n log_step=1, \n project_name=\"train\")\n" ]
[ [ "tensorflow.compat.v1.cast", "tensorflow.compat.v1.reshape", "tensorflow.compat.v1.one_hot", "tensorflow.compat.v1.losses.softmax_cross_entropy", "tensorflow.compat.v1.train.GradientDescentOptimizer", "tensorflow.compat.v1.metrics.accuracy", "tensorflow.compat.v1.feature_column.numeric_column", "tensorflow.compat.v1.argmax", "tensorflow.compat.v1.truncated_normal_initializer" ] ]
yongdd/deep-langevin-fts
[ "70eebb2172b2fbc971ee9abaa441b2cb9675d262" ]
[ "run_simulation.py" ]
[ "import numpy as np\r\nimport yaml\r\nfrom scipy.io import *\r\nimport scipy.ndimage\r\nfrom langevinfts import *\r\nfrom saddle_net import *\r\nfrom deep_langevin_fts import *\r\n\r\n#os.environ[\"CUDA_VISIBLE_DEVICES\"]= \"2\"\r\n\r\n# -------------- read input parameters and data --------------\r\nwith open('input_parameters.yaml') as f:\r\n input_params = yaml.load(f, Loader=yaml.FullLoader)\r\ninput_data = loadmat(\"LastTrainingStep.mat\", squeeze_me=True)\r\n\r\n# Interpolate input data on input paramter grid\r\nw_plus = scipy.ndimage.zoom(np.reshape(input_data[\"w_plus\"], input_data[\"nx\"]), input_params[\"nx\"]/input_data[\"nx\"])\r\nw_minus = scipy.ndimage.zoom(np.reshape(input_data[\"w_minus\"], input_data[\"nx\"]), input_params[\"nx\"]/input_data[\"nx\"])\r\n\r\nprint(w_plus.shape)\r\nprint(w_minus.shape)\r\n\r\n# -------------- deep learning --------------\r\nuse_deep_learning = True\r\nmodel_file = \"pretrained_models/gyroid_atr_par_32.pth\"\r\n\r\ntorch.set_num_threads(1)\r\nif (use_deep_learning):\r\n net = SaddleNet(dim=3, features=32)\r\n net.load_state_dict(torch.load(model_file), strict=True)\r\nelse:\r\n net = None\r\n \r\n# -------------- langevin fts --------------\r\ndeepfts = DeepLangevinFTS(input_params)\r\n\r\n# np.random.seed(5489)\r\n(total_saddle_iter, saddle_iter_per, time_duration_per,\r\ntime_pseudo_ratio, time_neural_net_ratio, total_net_failed) \\\r\n = deepfts.run(\r\n w_plus = np.reshape(w_plus, np.prod(input_params[\"nx\"])),\r\n w_minus = np.reshape(w_minus, np.prod(input_params[\"nx\"])),\r\n saddle_max_iter = input_params['saddle']['max_iter'],\r\n saddle_tolerance = float(input_params['saddle']['tolerance']),\r\n dt = input_params['langevin']['dt'],\r\n nbar = input_params['langevin']['nbar'],\r\n max_step = input_params['simulation_data']['max_step'],\r\n path_dir = input_params['simulation_data']['dir'],\r\n recording_period = input_params['simulation_data']['recording_period'],\r\n sf_computing_period = input_params['simulation_data']['sf_computing_period'],\r\n sf_recording_period = input_params['simulation_data']['sf_recording_period'],\r\n net=net)\r\n \r\n# estimate execution time\r\nprint( \"Total iterations for saddle points: %d, iter per step: %f\" %\r\n (total_saddle_iter, saddle_iter_per))\r\nprint( \"Total time: %f, time per step: %f\" %\r\n (time_duration_per*total_saddle_iter, time_duration_per) )\r\nprint( \"Pseudo time ratio: %f, deep learning time ratio: %f\" %\r\n (time_pseudo_ratio, time_neural_net_ratio) )\r\nprint( \"The number of times that the neural-net could not reduce the incompressible error and switched to Anderson mixing: %d times\" % (total_net_failed) )\r\n" ]
[ [ "numpy.prod", "numpy.reshape" ] ]
SOLINSIGHT/solinsight
[ "b0398c48e33a1f43a2ec4528477cc07e0b692bd6" ]
[ "tool/picture.py" ]
[ "'''\nDescription: \nLastEditTime: 2021-10-20 22:50:54\n'''\n# Output columnar stacking diagram of various vulnerabilities\n\nimport matplotlib.pyplot as plt\nimport matplotlib.ticker as mtick \nfrom math import *\nimport numpy as np\nimport pandas as pd\nimport os\n\n\ncsv = pd.read_csv(\"\")# read a csv file\ntime = np.array(csv.parsetime)\nnum = np.array(csv.num)\nnum = [log(i) for i in num]\n\nplt.xlabel('Time (s)')\nplt.ylabel('Online Decompiler (Log)')\nplt.bar(time[:-2], num[:-2])\nplt.savefig(\"\", dpi=300)\n\nplt.show() \n\n# ===========================================\ncsv = pd.read_csv(\"\") # read a csv file\ntime = np.array(csv.parsetime)\nnum = np.array(csv.num)\nnum = [log(i) for i in num]\n\nplt.xlabel('Time (s)')\nplt.ylabel('Offline Decompiler (Log)')\nplt.bar(time, num)\nplt.savefig('', dpi=300) # save a picture file in local dir\n\nplt.show() \n\n\n\n\n\n# from matplotlib.colors import LogNorm\n# import matplotlib.pyplot as plt\n# import numpy as np\n\n\n# plt.hist2d(time, num)#, bins=40, norm=LogNorm())\n# plt.colorbar()\n# plt.show()\n" ]
[ [ "numpy.array", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.savefig", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.show", "pandas.read_csv", "matplotlib.pyplot.bar" ] ]
mbkumar/pyoculus
[ "8c925b139e27fd016b37aa4fe4653276af67e0a5" ]
[ "pyoculus/solvers/fixed_point.py" ]
[ "## @file fixed_point.py\n# @brief class for finding fixed points\n# @author Zhisong Qu ([email protected])\n#\n\nfrom .base_solver import BaseSolver\nimport numpy as np\n\n## Class that used to setup the fixed point finder.\nclass FixedPoint(BaseSolver):\n def __init__(\n self, problem, params=dict(), integrator=None, integrator_params=dict()\n ):\n \"\"\"! Set up the class of the fixed point finder\n @param problem must inherit pyoculus.problems.BaseProblem, the problem to solve\n @param params dict, the parameters for the solver\n @param integrator the integrator to use, must inherit \\pyoculus.integrators.BaseIntegrator, if set to None by default using RKIntegrator\n @param integrator_params dict, the parmaters passed to the integrator\n\n <code> params['niter']=100 </code> -- the maximum number of Newton iterations\n\n <code> params['theta']=None </code>-- if we look for fixed point on some symmetry line\n =None : theta is also a free variable to look for\n =somenumber : only look for theta with this number\n\n <code> params['zeta']=0.0 </code>-- the toroidal plane we are after\n\n <code> params['nrestart']=1 </code>-- if search failed, the number of time to restart (randomly within the domain)\n \"\"\"\n\n if \"niter\" not in params.keys():\n params[\"niter\"] = 100\n\n if \"theta\" not in params.keys():\n params[\"theta\"] = None\n\n if \"zeta\" not in params.keys():\n params[\"zeta\"] = 0.0\n\n if \"nrestart\" not in params.keys():\n params[\"nrestart\"] = 1\n\n integrator_params[\"ode\"] = problem.f_tangent\n\n super().__init__(\n problem=problem,\n params=params,\n integrator=integrator,\n integrator_params=integrator_params,\n )\n\n self.Nfp = problem.Nfp\n self.niter = params[\"niter\"]\n self.nrestart = params[\"nrestart\"]\n if params[\"theta\"] is None:\n self.is_theta_fixed = False\n else:\n self.is_theta_fixed = True\n\n def compute(self, guess, pp, qq, sbegin=-1.0, send=1.0, tol=None):\n \"\"\"! Looks for the fixed point with rotation number pp/qq\n @param guess the initial guess, `[s, theta]`, if `params['theta'] == None`, `[s]`, if `params['theta'] ==` somevalue\n @param pp integer, the numerator of the rotation number\n @param qq integer, the denominator of the rotation number\n @param sbegin=-1.0 the allowed minimum s\n @param send=1.0 the allowed maximum s\n @param tol=self._integrator_params['rtol']*qq -- the tolerance of the fixed point\n\n @returns rdata a class that contains the results that contains\n `rdata.x,rdata.y,rdata,z` -- the fixed points in xyz coordinates\n\n `rdata.s,rdata,theta,rdata,zeta` -- the fixed points in s,theta,zeta coordinates\n\n `rdata.jacobian` -- the Jacobian of the fixed point constructed by following the tangent map\n\n `rdata.GreenesResidue` -- the Greene's Residue of the fixed point\n\n `rdata.MeanResidue` -- the 'Average Residue' f as defined by Greene\n \"\"\"\n\n if not isinstance(pp, int) or not isinstance(qq, int):\n raise ValueError(\"pp and qq should be integers\")\n\n if tol is None:\n tol = self._integrator_params[\"rtol\"] * qq\n\n if pp * qq >= 0:\n pp = int(np.abs(pp))\n qq = int(np.abs(qq))\n else:\n pp = -int(np.abs(pp))\n qq = int(np.abs(qq))\n\n self.pp = pp\n self.qq = qq\n self.dzeta = 2 * np.pi / self.Nfp\n\n # arrays that save the data\n self.s = np.zeros([qq + 1], dtype=np.float64)\n self.theta = np.zeros([qq + 1], dtype=np.float64)\n self.zeta = np.zeros([qq + 1], dtype=np.float64)\n self.x = np.zeros([qq + 1], dtype=np.float64)\n self.y = np.zeros([qq + 1], dtype=np.float64)\n self.z = np.zeros([qq + 1], dtype=np.float64)\n\n self.history = []\n\n # set up the guess\n if isinstance(guess, float):\n s_guess = guess\n else:\n guess = np.array(guess, dtype=np.float64)\n s_guess = guess[0]\n\n if self._params[\"theta\"] is None:\n theta_guess = guess[1]\n else:\n theta_guess = self._params[\"theta\"]\n\n # run the Newton's method\n for ii in range(self._params[\"nrestart\"] + 1):\n try: # run the solver, if failed, try a different random initial condition\n if self.is_theta_fixed:\n result = self._newton_method_1(\n pp,\n qq,\n s_guess,\n sbegin,\n send,\n theta_guess,\n self._params[\"zeta\"],\n self.dzeta,\n self.niter,\n tol,\n )\n else:\n result = self._newton_method_2(\n pp,\n qq,\n s_guess,\n sbegin,\n send,\n theta_guess,\n self._params[\"zeta\"],\n self.dzeta,\n self.niter,\n tol,\n )\n except:\n result = None\n\n if result is not None: # if it is successful:\n break\n else: # not successful, change to a random initial condition\n print(\"Search failed: starting from a random initial guesss!\")\n random_guess = np.random.rand(2)\n s_guess = sbegin + (send - sbegin) * random_guess[0]\n if self._params[\"theta\"] is None:\n theta_guess = random_guess[1] * 2 * np.pi\n\n # now we go and get all the fixed points by iterating the map\n if result is not None:\n t = self.zeta[0]\n dt = 2 * np.pi / self.Nfp\n\n self.s[0] = result[0]\n self.theta[0] = result[1]\n self.zeta[0] = self._params[\"zeta\"]\n\n ic = np.array([result[0], result[1], 1.0, 0.0, 0.0, 1.0], dtype=np.float64)\n self._integrator.set_initial_value(t, ic)\n\n # integrate to get a series of fixed points\n for jj in range(1, qq + 1):\n\n # run the integrator\n st = self._integrator.integrate(t + dt)\n\n # extract the result to s theta zeta\n self.s[jj] = st[0]\n self.theta[jj] = st[1]\n self.zeta[jj] = t + dt\n\n # advance in time\n t = t + dt\n\n # convert coordinates\n for jj in range(0, qq + 1):\n stz = np.array(\n [self.s[jj], self.theta[jj], self.zeta[jj]], dtype=np.float64\n )\n xyz = self._problem.convert_coords(stz)\n self.x[jj] = xyz[0]\n self.y[jj] = xyz[1]\n self.z[jj] = xyz[2]\n\n rdata = FixedPoint.OutputData()\n rdata.x = self.x.copy()\n rdata.y = self.y.copy()\n rdata.z = self.z.copy()\n rdata.s = self.s.copy()\n rdata.theta = self.theta.copy()\n rdata.zeta = self.zeta.copy()\n\n # the jacobian\n rdata.jacobian = np.array(\n [[st[2], st[4]], [st[3], st[5]]], dtype=np.float64\n )\n\n # Greene's Residue\n rdata.GreenesResidue = 0.25 * (2.0 - np.trace(rdata.jacobian))\n rdata.MeanResidue = np.power(\n np.abs(rdata.GreenesResidue) / 0.25, 1 / float(qq)\n )\n self.GreenesResidue = rdata.GreenesResidue\n self.MeanResidue = rdata.MeanResidue\n\n # set the successful flag\n self.successful = True\n\n else:\n rdata = None\n print(\"Fixed point search unsuccessful for pp/qq=\", pp, \"/\", qq)\n\n return rdata\n\n def plot(\n self, plottype=None, xlabel=None, ylabel=None, xlim=None, ylim=None, **kwargs\n ):\n \"\"\"! Generates the plot for fixed points\n @param plottype which variables to plot: 'RZ' or 'yx', by default using \"poincare_plot_type\" in problem\n @param xlabel,ylabel what to put for the xlabel and ylabel, by default using \"poincare_plot_xlabel\" in problem\n @param xlim, ylim the range of plotting, by default plotting the range of all data\n @param **kwargs passed to the plotting routine \"plot\"\n \"\"\"\n import matplotlib.pyplot as plt\n\n if not self.successful:\n raise Exception(\"A successful call of compute() is needed\")\n\n # default setting\n if plottype is None:\n plottype = self._problem.poincare_plot_type\n if xlabel is None:\n xlabel = self._problem.poincare_plot_xlabel\n if ylabel is None:\n ylabel = self._problem.poincare_plot_ylabel\n\n if plottype == \"RZ\":\n xdata = self.x\n ydata = self.z\n elif plottype == \"yx\":\n xdata = self.y\n ydata = self.x\n elif plottype == \"st\":\n xdata = np.mod(self.theta, 2 * np.pi)\n ydata = self.s\n else:\n raise ValueError(\"Choose the correct type for plottype\")\n\n if plt.get_fignums():\n fig = plt.gcf()\n ax = plt.gca()\n newfig = False\n else:\n fig, ax = plt.subplots()\n newfig = True\n\n # set default plotting parameters\n # use x\n if kwargs.get(\"marker\") is None:\n kwargs.update({\"marker\": \"x\"})\n # use gray color\n if kwargs.get(\"c\") is None:\n kwargs.update({\"c\": \"black\"})\n\n xs = ax.plot(xdata, ydata, linestyle=\"None\", **kwargs)\n\n if not newfig:\n if plottype == \"RZ\":\n plt.axis(\"equal\")\n if plottype == \"yx\":\n pass\n\n plt.xlabel(xlabel, fontsize=20)\n plt.ylabel(ylabel, fontsize=20)\n plt.xticks(fontsize=16)\n plt.yticks(fontsize=16)\n\n if xlim is not None:\n plt.xlim(xlim)\n if ylim is not None:\n plt.ylim(ylim)\n\n def _newton_method_1(\n self, pp, qq, s_guess, sbegin, send, theta, zeta, dzeta, niter, tol\n ):\n \"\"\"driver to run Newton's method for one variable s\n pp,qq -- integers, the numerator and denominator of the rotation number\n s_guess -- the guess of s\n sbegin -- the allowed minimum s\n send -- the allowed maximum s\n theta -- the theta value (fixed)\n zeta -- the toroidal plain to investigate\n dzeta -- period in zeta\n niter -- the maximum number of iterations\n tol -- the tolerance of finding a fixed point\n \"\"\"\n\n s = s_guess\n\n # set up the initial condition\n ic = np.array([s, theta, 1.0, 0.0, 0.0, 1.0], dtype=np.float64)\n self.history.append(ic[0:1].copy())\n\n t0 = zeta\n dt = dzeta\n\n succeeded = False\n\n for ii in range(niter):\n t = t0\n self._integrator.set_initial_value(t0, ic)\n\n for jj in range(qq):\n output = self._integrator.integrate(t + dt)\n t = t + dt\n\n dtheta = output[1] - theta - dzeta * pp\n jacobian = output[3]\n\n # if the resolution is good enough\n if abs(dtheta) < tol:\n succeeded = True\n break\n s_new = s - dtheta / jacobian\n s = s_new\n\n if s > send or s < sbegin: # search failed, return None\n return None\n\n ic = np.array([s, theta, 1.0, 0.0, 0.0, 1.0], dtype=np.float64)\n self.history.append(ic[0:1].copy())\n\n if succeeded:\n return np.array([s, theta, zeta], dtype=np.float64)\n else:\n return None\n\n def _newton_method_2(\n self, pp, qq, s_guess, sbegin, send, theta_guess, zeta, dzeta, niter, tol\n ):\n \"\"\"driver to run Newton's method for two variable (s,theta)\n pp,qq -- integers, the numerator and denominator of the rotation number\n s_guess -- the guess of s\n sbegin -- the allowed minimum s\n send -- the allowed maximum s\n theta_guess -- the guess of theta\n zeta -- the toroidal plain to investigate\n dzeta -- period in zeta\n niter -- the maximum number of iterations\n tol -- the tolerance of finding a fixed point\n \"\"\"\n\n self.successful = False\n\n s = s_guess\n theta = theta_guess\n\n # set up the initial condition\n ic = np.array([s, theta, 1.0, 0.0, 0.0, 1.0], dtype=np.float64)\n self.history.append(ic[0:1].copy())\n\n t0 = zeta\n dt = dzeta\n\n succeeded = False\n\n st = np.array([s, theta], dtype=np.float64)\n\n for ii in range(niter):\n t = t0\n self._integrator.set_initial_value(t0, ic)\n for jj in range(qq):\n output = self._integrator.integrate(t + dt)\n t = t + dt\n\n dtheta = output[1] - theta - dzeta * pp\n ds = output[0] - s\n dst = np.array([ds, dtheta], dtype=np.float64)\n jacobian = np.array(\n [[output[2], output[4]], [output[3], output[5]]], dtype=np.float64\n )\n\n # if the resolution is good enough\n if np.sqrt(dtheta ** 2 + ds ** 2) < tol:\n succeeded = True\n break\n\n # Newton's step\n st_new = st - np.matmul(np.linalg.inv(jacobian - np.eye(2)), dst)\n s = st_new[0]\n theta = st_new[1]\n st = st_new\n\n if s > send or s < sbegin: # search failed, return None\n return None\n\n ic = np.array([s, theta, 1.0, 0.0, 0.0, 1.0], dtype=np.float64)\n self.history.append(ic[0:1].copy())\n\n if succeeded:\n self.successful = True\n return np.array([s, theta, zeta], dtype=np.float64)\n else:\n return None" ]
[ [ "numpy.random.rand", "matplotlib.pyplot.xlim", "matplotlib.pyplot.gcf", "matplotlib.pyplot.xticks", "matplotlib.pyplot.subplots", "numpy.eye", "numpy.sqrt", "matplotlib.pyplot.gca", "numpy.mod", "matplotlib.pyplot.axis", "numpy.array", "numpy.zeros", "matplotlib.pyplot.yticks", "numpy.trace", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.ylim", "matplotlib.pyplot.get_fignums", "matplotlib.pyplot.ylabel", "numpy.abs" ] ]
unluckydan/deep_metric_learning
[ "5c2bb9e8a921ea6d492b2ab264d163bbd4764a4c" ]
[ "lib/datasets/cub200_2011_converter.py" ]
[ "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Dec 20 20:11:40 2016\n\n@author: sakurai\n\"\"\"\n\nimport os\nimport tarfile\nimport subprocess\nimport numpy as np\nfrom scipy.io import loadmat\nimport matplotlib.pyplot as plt\nimport h5py\nimport fuel\nfrom fuel.datasets.hdf5 import H5PYDataset\nfrom tqdm import tqdm\nimport cv2\n\n\ndef extract_class_label(filename):\n \"\"\"\n arg:\n filename: string, e.g.\n 'images/001.Black_footed_Albatross/Black_footed_Albatross_0001_2950163169.jpg'\n\n return:\n A class label as integer, e.g. 1\n \"\"\"\n _, class_dir, _ = filename.split(\"/\")\n return int(class_dir.split(\".\")[0])\n\n\ndef preprocess(hwc_bgr_image, size):\n hwc_rgb_image = cv2.cvtColor(hwc_bgr_image, cv2.COLOR_BGR2RGB)\n resized = cv2.resize(hwc_rgb_image, (size))\n chw_image = np.transpose(resized, axes=(2, 0, 1))\n return chw_image\n\n\nif __name__ == '__main__':\n dataset_name = \"cub200_2011\"\n archive_basename = \"CUB_200_2011\"\n\n fuel_root_path = os.path.normpath(fuel.config.config[\"data_path\"][\"yaml\"])\n fuel_data_path = os.path.join(fuel_root_path, dataset_name)\n extracted_dir_path = os.path.join(fuel_data_path, archive_basename)\n archive_filepath = extracted_dir_path + \".tgz\"\n images_dir_path = os.path.join(extracted_dir_path, \"images\")\n label_filepath = os.path.join(extracted_dir_path, \"image_class_labels.txt\")\n image_list_filepath = os.path.join(extracted_dir_path, \"images.txt\")\n\n # Extract CUB_200_2011.tgz if CUB_200_2011 directory does not exist\n if not os.path.exists(os.path.join(fuel_data_path, archive_basename)):\n subprocess.call([\"tar\", \"zxvf\", archive_filepath.replace(\"\\\\\", \"/\"),\n \"-C\", fuel_data_path.replace(\"\\\\\", \"/\"),\n \"--force-local\"])\n\n id_name_pairs = np.loadtxt(image_list_filepath, np.str)\n assert np.array_equal(\n [int(i) for i in id_name_pairs[:, 0].tolist()], range(1, 11789))\n id_label_pairs = np.loadtxt(label_filepath, np.str)\n assert np.array_equal(\n [int(i) for i in id_label_pairs[:, 0].tolist()], range(1, 11789))\n jpg_filenames = id_name_pairs[:, 1].tolist()\n class_labels = [int(i) for i in id_label_pairs[:, 1].tolist()]\n num_examples = len(jpg_filenames)\n num_clases = 200\n assert np.array_equal(np.unique(class_labels), range(1, num_clases + 1))\n\n # open hdf5 file\n hdf5_filename = dataset_name + \".hdf5\"\n hdf5_filepath = os.path.join(fuel_data_path, hdf5_filename)\n hdf5 = h5py.File(hdf5_filepath, mode=\"w\")\n\n # store images\n image_size = (256, 256)\n array_shape = (num_examples, 3) + image_size\n ds_images = hdf5.create_dataset(\"images\", array_shape, dtype=np.uint8)\n ds_images.dims[0].label = \"batch\"\n ds_images.dims[1].label = \"channel\"\n ds_images.dims[2].label = \"height\"\n ds_images.dims[3].label = \"width\"\n\n # write images to the disk\n for i, filename in tqdm(enumerate(jpg_filenames), total=num_examples,\n desc=hdf5_filepath):\n raw_image = cv2.imread(os.path.join(images_dir_path, filename),\n cv2.IMREAD_COLOR) # BGR image\n image = preprocess(raw_image, image_size)\n ds_images[i] = image\n\n # store the targets (class labels)\n targets = np.array(class_labels, np.int32).reshape(num_examples, 1)\n ds_targets = hdf5.create_dataset(\"targets\", data=targets)\n ds_targets.dims[0].label = \"batch\"\n ds_targets.dims[1].label = \"class_labels\"\n\n # specify the splits (labels 1~100 for train, 101~200 for test)\n test_head = class_labels.index(101)\n split_train, split_test = (0, test_head), (test_head, num_examples)\n split_dict = dict(train=dict(images=split_train, targets=split_train),\n test=dict(images=split_test, targets=split_test))\n hdf5.attrs[\"split\"] = H5PYDataset.create_split_array(split_dict)\n\n hdf5.flush()\n hdf5.close()\n" ]
[ [ "numpy.array", "numpy.loadtxt", "numpy.transpose", "numpy.unique" ] ]
njanakiev/onnx-tensorflow
[ "5a8f36e06187d8482830adeede8e5a3c9543dbd5" ]
[ "example/train_onnx_model.py" ]
[ "import os\nimport logging\nimport onnx\nimport numpy as np\n\nimport tensorflow as tf\nfrom tensorflow.keras import datasets, layers, models, regularizers\nimport onnx_tf\ntf_compat = tf.compat.v1\n\nbatch_size = 32\nepochs = 2\n\nsaved_model_path = './saved_model/'\nonnx_model_file = './onnx_model/model.onnx'\ntrained_onnx_model = './onnx_model/trained.onnx'\nonnx_model_path = os.path.dirname(onnx_model_file)\nuse_dataset = 'mnist' # mnist or cifar10\nvgg_model = False\n\n\ndef get_dataset():\n if use_dataset == 'mnist':\n dataset = datasets.mnist\n else:\n dataset = datasets.cifar10\n\n (x_train, y_train), (x_test, y_test) = dataset.load_data()\n x_train, x_test = x_train / 255.0, x_test / 255.0\n if use_dataset == 'mnist':\n x_train = x_train[..., tf.newaxis]\n x_test = x_test[..., tf.newaxis]\n\n train_ds = tf.data.Dataset.from_tensor_slices(\n (x_train, y_train)).shuffle(10000).batch(batch_size, drop_remainder=True)\n test_ds = tf.data.Dataset.from_tensor_slices(\n (x_test, y_test)).batch(batch_size, drop_remainder=True)\n return train_ds, test_ds\n\n\ndef save_trained_onnx(tensor_dict, onnx_model, sess):\n print('Update onnx model....')\n # Collect retrained parameters.\n retrained_params = {}\n for name, tensor in tensor_dict.items():\n if isinstance(tensor, tf.Variable):\n retrained_params[name] = sess.run(tensor)\n\n # Update onnx model using new parameters:\n for tensor in onnx_model.graph.initializer:\n if tensor.name in retrained_params:\n print(\"Updating {}.\".format(tensor.name))\n assert tensor.HasField(\"raw_data\")\n tensor.raw_data = retrained_params[tensor.name].tobytes()\n\n onnx.save(onnx_model, trained_onnx_model)\n print('Save trained onnx model {}'.format(trained_onnx_model))\n\n\nclass VGG16(models.Model):\n\n def __init__(self, input_shape):\n \"\"\"\n :param input_shape: [32, 32, 3]\n \"\"\"\n super(VGG16, self).__init__()\n\n weight_decay = 0.000\n self.num_classes = 10\n\n model = models.Sequential()\n\n model.add(\n layers.Conv2D(64, (3, 3),\n padding='same',\n input_shape=input_shape,\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Dropout(0.3))\n\n model.add(\n layers.Conv2D(64, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n\n model.add(layers.MaxPooling2D(pool_size=(2, 2)))\n\n model.add(\n layers.Conv2D(128, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Dropout(0.4))\n\n model.add(\n layers.Conv2D(128, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n\n model.add(layers.MaxPooling2D(pool_size=(2, 2)))\n\n model.add(\n layers.Conv2D(256, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Dropout(0.4))\n\n model.add(\n layers.Conv2D(256, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Dropout(0.4))\n\n model.add(\n layers.Conv2D(256, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n\n model.add(layers.MaxPooling2D(pool_size=(2, 2)))\n\n model.add(\n layers.Conv2D(512, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Dropout(0.4))\n\n model.add(\n layers.Conv2D(512, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Dropout(0.4))\n\n model.add(\n layers.Conv2D(512, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n\n model.add(layers.MaxPooling2D(pool_size=(2, 2)))\n\n model.add(\n layers.Conv2D(512, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Dropout(0.4))\n\n model.add(\n layers.Conv2D(512, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Dropout(0.4))\n\n model.add(\n layers.Conv2D(512, (3, 3),\n padding='same',\n kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n\n model.add(layers.MaxPooling2D(pool_size=(2, 2)))\n model.add(layers.Dropout(0.5))\n\n model.add(layers.Flatten())\n model.add(\n layers.Dense(512, kernel_regularizer=regularizers.l2(weight_decay)))\n model.add(layers.Activation('relu'))\n model.add(layers.BatchNormalization())\n\n model.add(layers.Dropout(0.5))\n model.add(layers.Dense(self.num_classes))\n # model.add(layers.Activation('softmax'))\n\n self.model = model\n\n def call(self, x):\n\n x = self.model(x)\n\n return x\n\n\ndef train_tf_model():\n if use_dataset == 'mnist':\n input_shape = (28, 28, 1)\n ds = datasets.mnist\n else:\n ds = datasets.cifar10\n input_shape = (32, 32, 3)\n\n if vgg_model:\n model = VGG16([32, 32, 3])\n model.build(input_shape=(None, 32, 32, 3))\n else:\n model = models.Sequential()\n model.add(\n layers.Conv2D(32, (3, 3), activation='relu', input_shape=input_shape))\n model.add(layers.MaxPooling2D((2, 2)))\n model.add(layers.Conv2D(64, (3, 3), activation='relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.MaxPooling2D((2, 2)))\n model.add(layers.Conv2D(64, (3, 3), activation='relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Flatten())\n model.add(layers.Dense(64, activation='relu'))\n model.add(layers.BatchNormalization())\n model.add(layers.Dense(10))\n model.summary()\n\n (train_images, train_labels), (test_images, test_labels) = ds.load_data()\n train_images, test_images = train_images / 255.0, test_images / 255.0\n if use_dataset == 'mnist':\n train_images = train_images[..., tf.newaxis]\n test_images = test_images[..., tf.newaxis]\n\n model.compile(\n optimizer='adam',\n loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),\n metrics=['accuracy'])\n\n model.fit(train_images,\n train_labels,\n epochs=2,\n validation_data=(test_images, test_labels))\n model.evaluate(test_images, test_labels, verbose=2)\n if not os.path.exists(saved_model_path):\n os.mkdir(saved_model_path)\n model.save(saved_model_path)\n print(\"tf saved model: {}\".format(saved_model_path))\n\n\ndef convert_tf2onnx():\n if not os.path.exists(onnx_model_path):\n os.mkdir(onnx_model_path)\n os.system('python -m tf2onnx.convert --saved-model {} --output {}'.format(\n saved_model_path, onnx_model_file))\n print('onnx model: {}'.format(onnx_model_file))\n\n\ndef train_onnx_model():\n onnx_model = onnx.load(onnx_model_file)\n tf_rep = onnx_tf.backend.prepare(onnx_model,\n training_mode=True,\n logging_level=logging.ERROR)\n training_flag_placeholder = tf_rep.tensor_dict[\n onnx_tf.backend.training_flag_name]\n input_name = onnx_model.graph.input[0].name\n output_name = onnx_model.graph.output[0].name\n\n with tf_rep.graph.as_default():\n with tf_compat.Session() as sess:\n y_truth = tf_compat.placeholder(tf.int64, [None], name='y-input')\n tf_rep.tensor_dict[\"y_truth\"] = y_truth\n loss_op = tf.reduce_mean(\n tf_compat.losses.sparse_softmax_cross_entropy(\n labels=tf_rep.tensor_dict['y_truth'],\n logits=tf_rep.tensor_dict[output_name]))\n opt_op = tf_compat.train.AdamOptimizer().minimize(loss_op)\n eval_op = tf.reduce_mean(input_tensor=tf.cast(\n tf.equal(tf.argmax(input=tf_rep.tensor_dict[output_name], axis=1),\n tf_rep.tensor_dict['y_truth']), tf.float32))\n\n train_data, test_data = get_dataset()\n sess.run(tf_compat.global_variables_initializer())\n print(\"==> Train the model..\")\n\n for epoch in range(1, epochs + 1):\n step = 1\n next_batch = tf_compat.data.make_one_shot_iterator(\n train_data).get_next()\n while True:\n try:\n next_batch_value = sess.run(next_batch)\n feed_dict = {\n #tf_rep.tensor_dict[input_name]: next_batch_value[0].transpose((0, 3, 1, 2)),#for pytorch model\n tf_rep.tensor_dict[input_name]:\n next_batch_value[0],\n tf_rep.tensor_dict['y_truth']:\n next_batch_value[1].flatten()\n }\n feed_dict[training_flag_placeholder] = True\n loss, accuracy, _ = sess.run([loss_op, eval_op, opt_op],\n feed_dict=feed_dict)\n if (step % 100) == 0:\n print('Epoch {}, train step {}, loss:{}, accuracy:{}'.format(\n epoch, step, loss, accuracy))\n step += 1\n except tf.errors.OutOfRangeError:\n step = 1\n next_batch = tf_compat.data.make_one_shot_iterator(\n test_data).get_next()\n while True:\n try:\n next_batch_value = sess.run(next_batch)\n feed_dict = {\n #tf_rep.tensor_dict[input_name]: next_batch_value[0].transpose((0, 3, 1, 2)),#for pytorch model\n tf_rep.tensor_dict[input_name]:\n next_batch_value[0],\n tf_rep.tensor_dict['y_truth']:\n next_batch_value[1].flatten()\n }\n feed_dict[training_flag_placeholder] = False\n loss, accuracy = sess.run([loss_op, eval_op],\n feed_dict=feed_dict)\n if (step % 100) == 0:\n print('Epoch {}, test* step {}, loss:{}, accuracy:{}'.format(\n epoch, step, loss, accuracy))\n step += 1\n except tf.errors.OutOfRangeError:\n break\n break\n save_trained_onnx(tf_rep.tensor_dict, onnx_model, sess)\n\n\ndef run_onnx_model(onnx_file):\n print('Run onnx model....')\n onnx_model = onnx.load(onnx_file)\n tf_rep = onnx_tf.backend.prepare(onnx_model, logging_level=logging.ERROR)\n input_name = tf_rep.inputs[0]\n _, test_data = get_dataset()\n\n labels = []\n preds = []\n for img, label in test_data:\n input_value = img.numpy().astype('float32')\n gt = label.numpy()\n output = tf_rep.run({input_name: input_value})\n pred = np.argmax(output[0], axis=1).tolist()\n labels += gt.flatten().tolist()\n preds += pred\n\n correct_prediction = np.equal(preds, labels)\n acc = np.mean(correct_prediction)\n print('Accuracy: {}'.format(acc))\n\n\nif __name__ == \"__main__\":\n train_tf_model()\n convert_tf2onnx()\n run_onnx_model(onnx_model_file)\n train_onnx_model()\n run_onnx_model(trained_onnx_model)\n" ]
[ [ "numpy.equal", "tensorflow.data.Dataset.from_tensor_slices", "tensorflow.keras.layers.Flatten", "tensorflow.argmax", "tensorflow.keras.layers.Activation", "numpy.mean", "tensorflow.keras.layers.Dense", "tensorflow.keras.layers.Dropout", "tensorflow.keras.layers.Conv2D", "tensorflow.keras.layers.MaxPooling2D", "tensorflow.keras.losses.SparseCategoricalCrossentropy", "numpy.argmax", "tensorflow.keras.models.Sequential", "tensorflow.keras.regularizers.l2", "tensorflow.keras.layers.BatchNormalization" ] ]
comet-syt/BAPS
[ "96a347d759863510a81955122158f9fa6e42e005" ]
[ "deep_learning/code/VectorWithPytorch.py" ]
[ "import torch\n\nt = torch.FloatTensor([0., 1., 2., 3., 4., 5., 6.])\nprint(t)\n\nprint(f\"rank = {t.dim()}\") # -> 1\nprint(f\"shape = {t.shape}\") # -> print(torch.Size([7]))\nprint(f\"shape = {t.size()}\") # -> print(torch.Size([7]))\n\nprint(\"==============================\")\n\nprint(t[0], t[1], t[-1]) # -> tensor(0.) tensor(1.) tensor(6.)\nprint(t[2:5], t[4: -1]) # -> tensor([2., 3., 4.]) tensor([4., 5.])" ]
[ [ "torch.FloatTensor" ] ]
hawkingbeck/SARSCoV2HeronPipeline
[ "38b00daf68ee6c821b900684c77c728787cee42b" ]
[ "heronPipeline/src/images/mergeSampleExportFiles/app.py" ]
[ "import os\nimport json\nimport gzip\nimport shutil\nimport pandas as pd\nfrom sys import exit, stderr\nfrom datetime import datetime\nimport boto3\nfrom botocore.exceptions import ClientError\nfrom botocore.config import Config\nfrom boto3.dynamodb.conditions import Key\n\n\ndef extractValue(dict, param, key):\n if param in dict.keys():\n paramDict = dict[param]\n if key in paramDict.keys():\n return paramDict[key]\n else:\n return \"N/A\"\n else:\n return \"N/A\"\n\ndef createDict(dynamoItem):\n dynamoItem = json.loads(dynamoItem)\n dynamoItem = dynamoItem['Item']\n \n newDict = {\n 'cogUkId': extractValue(dynamoItem, 'cogUkId', 'S'),\n 'runMetaData': extractValue(dynamoItem, 'runMetaData', 'S'),\n 'consensusFastaHash': extractValue(dynamoItem, 'consensusFastaHash', 'S'),\n 'runCompleteDate': extractValue(dynamoItem, 'runCompleteDate', 'N'),\n 'lastChangedDate': extractValue(dynamoItem, 'lastChangedDate', 'N'),\n 'run' : extractValue(dynamoItem, 'run','N'),\n 'lane': extractValue(dynamoItem, 'lane', 'N'),\n 'tag' : extractValue(dynamoItem, 'tag', 'N')\n }\n\n return newDict\n\ndef main():\n exportArn = os.getenv(\"EXPORT_ARN\")\n s3Prefix = os.getenv(\"S3_PREFIX\")\n heronBucketName = os.getenv(\"HERON_BUCKET\") \n exportFolder = os.path.basename(exportArn)\n\n exportManifestS3Key = f\"{s3Prefix}/AWSDynamoDB/{exportFolder}/manifest-files.json\"\n exportManifestLocalPath = \"/tmp/manifest.json\"\n concatenatedLocalFilePath = \"/tmp/concatenated.csv\"\n concatenatedFileS3Key = f\"{s3Prefix}/AWSDynamoDB/{exportFolder}/exported.csv\"\n \n print(f\"exportManifestS3Key: {exportManifestS3Key}\")\n print(f\"concatenatedFileS3Key: {concatenatedFileS3Key}\")\n\n\n #download manifest file\n s3 = boto3.resource('s3', region_name='eu-west-1')\n bucket = s3.Bucket(heronBucketName)\n\n bucket.download_file(exportManifestS3Key, exportManifestLocalPath)\n\n with open(exportManifestLocalPath) as file:\n manifestFiles = file.readlines()\n\n allDicts = []\n for manifestLine in manifestFiles:\n manifestItem = json.loads(manifestLine)\n dataFileKey = manifestItem['dataFileS3Key']\n\n # Download and unzip file\n localDataFilePathZipped = f\"/tmp/{os.path.basename(dataFileKey)}\"\n localDataFilePathUnZipped, ex = os.path.splitext(localDataFilePathZipped)\n \n bucket.download_file(dataFileKey, localDataFilePathZipped)\n with gzip.open(localDataFilePathZipped, 'rb') as f_in:\n with open(localDataFilePathUnZipped, 'wb') as f_out:\n shutil.copyfileobj(f_in, f_out)\n\n\n with open(localDataFilePathUnZipped) as f:\n dynamoLines = f.readlines()\n\n frames = [createDict(f) for f in dynamoLines if f != '\\n']\n allDicts.extend(frames)\n\n # Save the resulting dataframe back into S3\n exportDf = pd.DataFrame(allDicts)\n exportDf.to_csv(concatenatedLocalFilePath, index=False)\n bucket.upload_file(concatenatedLocalFilePath, concatenatedFileS3Key)\n\n\nif __name__ == '__main__':\n main()\n\n print(\"Finished\")" ]
[ [ "pandas.DataFrame" ] ]
ntucllab/imbalanced-DL
[ "726e45ffb7a567d57a19688f1410e6b40d14f3df" ]
[ "imbalanceddl/net/network.py" ]
[ "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.nn import Parameter\nimport imbalanceddl.net as backbone\n\nmodel_names = sorted(name for name in backbone.__dict__\n if name.islower() and not name.startswith(\"__\")\n and callable(backbone.__dict__[name]))\n\n\nclass NormedLinear(nn.Module):\n def __init__(self, in_features, out_features):\n super(NormedLinear, self).__init__()\n self.weight = Parameter(torch.Tensor(in_features, out_features))\n self.weight.data.uniform_(-1, 1).renorm_(2, 1, 1e-5).mul_(1e5)\n\n def forward(self, x):\n out = F.normalize(x, dim=1).mm(F.normalize(self.weight, dim=0))\n return out\n\n\nclass Network(nn.Module):\n def __init__(self, cfg):\n super(Network, self).__init__()\n # config\n self.cfg = cfg\n self.num_classes = self._get_num_classes()\n self.feature_len = self._get_feature_len()\n self.backbone = self._get_backbone()\n self.classifier = self._get_classifier()\n\n def forward(self, x, **kwargs):\n hidden = self.backbone(x)\n out = self.classifier(hidden)\n return out, hidden\n\n def _get_feature_len(self):\n if self.cfg.backbone == 'resnet32':\n return 64\n elif self.cfg.backbone == 'resnet18':\n return 512\n else:\n raise ValueError(\"[Warning] Backbone not supported !\")\n\n def _get_num_classes(self):\n if self.cfg.dataset == 'cifar10' or self.cfg.dataset == 'cinic10' \\\n or self.cfg.dataset == 'svhn10':\n return 10\n elif self.cfg.dataset == 'cifar100':\n return 100\n elif self.cfg.dataset == 'tiny200':\n return 200\n else:\n raise NotImplementedError\n\n def _get_backbone(self):\n if self.cfg.backbone is not None:\n print(\"=> Initializing backbone : {}\".format(self.cfg.backbone))\n my_backbone = backbone.__dict__[self.cfg.backbone]()\n return my_backbone\n else:\n raise ValueError(\"=> No backbone is specified !\")\n\n def _get_classifier(self):\n if self.cfg.classifier is not None:\n if self.cfg.strategy == 'LDAM_DRW':\n print(\"=> Due to LDAM, change classifier to \\\n cosine similarity classifier !\")\n self.cfg.classifier = 'cosine_similarity_classifier'\n print(\"=> Initializing classifier: {}\".format(self.cfg.classifier))\n if self.cfg.classifier == 'dot_product_classifier':\n return nn.Linear(self.feature_len,\n self.num_classes,\n bias=False)\n elif self.cfg.classifier == 'cosine_similarity_classifier':\n return NormedLinear(self.feature_len, self.num_classes)\n else:\n raise NotImplementedError\n else:\n raise ValueError(\"=> No classifier is specified !\")\n\n\ndef build_model(cfg):\n model = Network(cfg)\n\n if cfg.gpu is not None:\n print(\"=> Use GPU {} for training\".format(cfg.gpu))\n torch.cuda.set_device(cfg.gpu)\n model = model.cuda(cfg.gpu)\n else:\n print(\"=> Use DataParallel for training\")\n model = torch.nn.DataParallel(model).cuda()\n return model\n" ]
[ [ "torch.nn.Linear", "torch.nn.functional.normalize", "torch.cuda.set_device", "torch.Tensor", "torch.nn.DataParallel" ] ]
anvelezec/TensorFlowTTS
[ "ec2bbd4ebc20fc78b28ae718fab5a6504dad15d1" ]
[ "test/test_tacotron2.py" ]
[ "# -*- coding: utf-8 -*-\n# Copyright 2020 Minh Nguyen (@dathudeptrai)\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nimport os\nimport time\nimport yaml\n\nimport numpy as np\nimport pytest\nimport tensorflow as tf\n\nfrom tensorflow_tts.configs import Tacotron2Config\nfrom tensorflow_tts.models import TFTacotron2\nfrom tensorflow_tts.utils import return_strategy\n\nfrom examples_tts.tacotron2.train_tacotron2 import Tacotron2Trainer\n\nos.environ[\"CUDA_VISIBLE_DEVICES\"] = \"-1\"\n\nlogging.basicConfig(\n level=logging.WARNING,\n format=\"%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s\",\n)\n\n\[email protected](\n \"var_train_expr, config_path\",\n [\n (\"embeddings|decoder_cell\", \"./examples_tts/tacotron2/conf/tacotron2.v1.yaml\"),\n (None, \"./examples_tts/tacotron2/conf/tacotron2.v1.yaml\"),\n (\n \"embeddings|decoder_cell\",\n \"./examples_tts/tacotron2/conf/tacotron2.baker.v1.yaml\",\n ),\n (\"embeddings|decoder_cell\", \"./examples_tts/tacotron2/conf/tacotron2.kss.v1.yaml\"),\n ],\n)\ndef test_tacotron2_train_some_layers(var_train_expr, config_path):\n config = Tacotron2Config(n_speakers=5, reduction_factor=1)\n model = TFTacotron2(config, training=True)\n model._build()\n optimizer = tf.keras.optimizers.Adam(lr=0.001)\n\n with open(config_path) as f:\n config = yaml.load(f, Loader=yaml.Loader)\n\n config.update({\"outdir\": \"./\"})\n config.update({\"var_train_expr\": var_train_expr})\n\n STRATEGY = return_strategy()\n\n trainer = Tacotron2Trainer(\n config=config, strategy=STRATEGY, steps=0, epochs=0, is_mixed_precision=False,\n )\n trainer.compile(model, optimizer)\n\n len_trainable_vars = len(trainer._trainable_variables)\n all_trainable_vars = len(model.trainable_variables)\n\n if var_train_expr is None:\n tf.debugging.assert_equal(len_trainable_vars, all_trainable_vars)\n else:\n tf.debugging.assert_less(len_trainable_vars, all_trainable_vars)\n\n\[email protected](\n \"n_speakers, n_chars, max_input_length, max_mel_length, batch_size\",\n [(2, 15, 25, 50, 2),],\n)\ndef test_tacotron2_trainable(\n n_speakers, n_chars, max_input_length, max_mel_length, batch_size\n):\n config = Tacotron2Config(n_speakers=n_speakers, reduction_factor=1)\n model = TFTacotron2(config, training=True)\n # model._build()\n\n # fake input\n input_ids = tf.random.uniform(\n [batch_size, max_input_length], maxval=n_chars, dtype=tf.int32\n )\n speaker_ids = tf.convert_to_tensor([0] * batch_size, tf.int32)\n mel_gts = tf.random.uniform(shape=[batch_size, max_mel_length, 80])\n mel_lengths = np.random.randint(\n max_mel_length, high=max_mel_length + 1, size=[batch_size]\n )\n mel_lengths[-1] = max_mel_length\n mel_lengths = tf.convert_to_tensor(mel_lengths, dtype=tf.int32)\n\n stop_tokens = np.zeros((batch_size, max_mel_length), np.float32)\n stop_tokens = tf.convert_to_tensor(stop_tokens)\n\n optimizer = tf.keras.optimizers.Adam(lr=0.001)\n\n binary_crossentropy = tf.keras.losses.BinaryCrossentropy(from_logits=True)\n\n @tf.function(experimental_relax_shapes=True)\n def one_step_training(input_ids, speaker_ids, mel_gts, mel_lengths):\n with tf.GradientTape() as tape:\n mel_preds, post_mel_preds, stop_preds, alignment_history = model(\n input_ids,\n tf.constant([max_input_length, max_input_length]),\n speaker_ids,\n mel_gts,\n mel_lengths,\n training=True,\n )\n loss_before = tf.keras.losses.MeanSquaredError()(mel_gts, mel_preds)\n loss_after = tf.keras.losses.MeanSquaredError()(mel_gts, post_mel_preds)\n\n stop_gts = tf.expand_dims(\n tf.range(tf.reduce_max(mel_lengths), dtype=tf.int32), 0\n ) # [1, max_len]\n stop_gts = tf.tile(stop_gts, [tf.shape(mel_lengths)[0], 1]) # [B, max_len]\n stop_gts = tf.cast(\n tf.math.greater_equal(stop_gts, tf.expand_dims(mel_lengths, 1) - 1),\n tf.float32,\n )\n\n # calculate stop_token loss\n stop_token_loss = binary_crossentropy(stop_gts, stop_preds)\n\n loss = stop_token_loss + loss_before + loss_after\n\n gradients = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(gradients, model.trainable_variables))\n return loss, alignment_history\n\n for i in range(2):\n if i == 1:\n start = time.time()\n loss, alignment_history = one_step_training(\n input_ids, speaker_ids, mel_gts, mel_lengths\n )\n print(f\" > loss: {loss}\")\n total_runtime = time.time() - start\n print(f\" > Total run-time: {total_runtime}\")\n print(f\" > Avg run-time: {total_runtime/10}\")\n" ]
[ [ "tensorflow.convert_to_tensor", "tensorflow.shape", "tensorflow.GradientTape", "numpy.zeros", "tensorflow.expand_dims", "tensorflow.keras.losses.MeanSquaredError", "tensorflow.random.uniform", "tensorflow.function", "tensorflow.debugging.assert_less", "tensorflow.constant", "tensorflow.reduce_max", "numpy.random.randint", "tensorflow.debugging.assert_equal", "tensorflow.keras.losses.BinaryCrossentropy", "tensorflow.keras.optimizers.Adam" ] ]
Ervin66/geopandas
[ "2a4601642d300433bb33224a6b5e4eda27eab783" ]
[ "geopandas/tests/test_plotting.py" ]
[ "import itertools\nimport warnings\n\nimport numpy as np\nimport pandas as pd\n\nfrom shapely.affinity import rotate\nfrom shapely.geometry import (\n MultiPolygon,\n Polygon,\n LineString,\n LinearRing,\n Point,\n MultiPoint,\n MultiLineString,\n GeometryCollection,\n)\n\n\nfrom geopandas import GeoDataFrame, GeoSeries, read_file\nfrom geopandas.datasets import get_path\n\nimport pytest\n\nmatplotlib = pytest.importorskip(\"matplotlib\")\nmatplotlib.use(\"Agg\")\nimport matplotlib.pyplot as plt # noqa\n\n\[email protected](autouse=True)\ndef close_figures(request):\n yield\n plt.close(\"all\")\n\n\ntry:\n cycle = matplotlib.rcParams[\"axes.prop_cycle\"].by_key()\n MPL_DFT_COLOR = cycle[\"color\"][0]\nexcept KeyError:\n MPL_DFT_COLOR = matplotlib.rcParams[\"axes.color_cycle\"][0]\n\n\nclass TestPointPlotting:\n def setup_method(self):\n self.N = 10\n self.points = GeoSeries(Point(i, i) for i in range(self.N))\n\n values = np.arange(self.N)\n\n self.df = GeoDataFrame({\"geometry\": self.points, \"values\": values})\n self.df[\"exp\"] = (values * 10) ** 3\n\n multipoint1 = MultiPoint(self.points)\n multipoint2 = rotate(multipoint1, 90)\n self.df2 = GeoDataFrame(\n {\"geometry\": [multipoint1, multipoint2], \"values\": [0, 1]}\n )\n\n def test_figsize(self):\n\n ax = self.points.plot(figsize=(1, 1))\n np.testing.assert_array_equal(ax.figure.get_size_inches(), (1, 1))\n\n ax = self.df.plot(figsize=(1, 1))\n np.testing.assert_array_equal(ax.figure.get_size_inches(), (1, 1))\n\n def test_default_colors(self):\n\n # # without specifying values -> uniform color\n\n # GeoSeries\n ax = self.points.plot()\n _check_colors(\n self.N, ax.collections[0].get_facecolors(), [MPL_DFT_COLOR] * self.N\n )\n\n # GeoDataFrame\n ax = self.df.plot()\n _check_colors(\n self.N, ax.collections[0].get_facecolors(), [MPL_DFT_COLOR] * self.N\n )\n\n # # with specifying values -> different colors for all 10 values\n ax = self.df.plot(column=\"values\")\n cmap = plt.get_cmap()\n expected_colors = cmap(np.arange(self.N) / (self.N - 1))\n _check_colors(self.N, ax.collections[0].get_facecolors(), expected_colors)\n\n def test_series_color_no_index(self):\n\n # Color order with ordered index\n colors_ord = pd.Series([\"a\", \"b\", \"c\", \"a\", \"b\", \"c\", \"a\", \"b\", \"c\", \"a\"])\n\n # Plot using Series as color\n ax1 = self.df.plot(colors_ord)\n\n # Correct answer: Add as column to df and plot\n self.df[\"colors_ord\"] = colors_ord\n ax2 = self.df.plot(\"colors_ord\")\n\n # Confirm out-of-order index re-sorted\n point_colors1 = ax1.collections[0].get_facecolors()\n point_colors2 = ax2.collections[0].get_facecolors()\n np.testing.assert_array_equal(point_colors1[1], point_colors2[1])\n\n def test_series_color_index(self):\n\n # Color order with out-of-order index\n colors_ord = pd.Series(\n [\"a\", \"a\", \"a\", \"a\", \"b\", \"b\", \"b\", \"c\", \"c\", \"c\"],\n index=[0, 3, 6, 9, 1, 4, 7, 2, 5, 8],\n )\n\n # Plot using Series as color\n ax1 = self.df.plot(colors_ord)\n\n # Correct answer: Add as column to df and plot\n self.df[\"colors_ord\"] = colors_ord\n ax2 = self.df.plot(\"colors_ord\")\n\n # Confirm out-of-order index re-sorted\n point_colors1 = ax1.collections[0].get_facecolors()\n point_colors2 = ax2.collections[0].get_facecolors()\n np.testing.assert_array_equal(point_colors1[1], point_colors2[1])\n\n def test_colormap(self):\n\n # without specifying values but cmap specified -> no uniform color\n # but different colors for all points\n\n # GeoSeries\n ax = self.points.plot(cmap=\"RdYlGn\")\n cmap = plt.get_cmap(\"RdYlGn\")\n exp_colors = cmap(np.arange(self.N) / (self.N - 1))\n _check_colors(self.N, ax.collections[0].get_facecolors(), exp_colors)\n\n ax = self.df.plot(cmap=\"RdYlGn\")\n _check_colors(self.N, ax.collections[0].get_facecolors(), exp_colors)\n\n # # with specifying values -> different colors for all 10 values\n ax = self.df.plot(column=\"values\", cmap=\"RdYlGn\")\n cmap = plt.get_cmap(\"RdYlGn\")\n _check_colors(self.N, ax.collections[0].get_facecolors(), exp_colors)\n\n # when using a cmap with specified lut -> limited number of different\n # colors\n ax = self.points.plot(cmap=plt.get_cmap(\"Set1\", lut=5))\n cmap = plt.get_cmap(\"Set1\", lut=5)\n exp_colors = cmap(list(range(5)) * 3)\n _check_colors(self.N, ax.collections[0].get_facecolors(), exp_colors)\n\n def test_single_color(self):\n\n ax = self.points.plot(color=\"green\")\n _check_colors(self.N, ax.collections[0].get_facecolors(), [\"green\"] * self.N)\n\n ax = self.df.plot(color=\"green\")\n _check_colors(self.N, ax.collections[0].get_facecolors(), [\"green\"] * self.N)\n\n # check rgba tuple GH1178\n ax = self.df.plot(color=(0.5, 0.5, 0.5))\n _check_colors(\n self.N, ax.collections[0].get_facecolors(), [(0.5, 0.5, 0.5)] * self.N\n )\n ax = self.df.plot(color=(0.5, 0.5, 0.5, 0.5))\n _check_colors(\n self.N, ax.collections[0].get_facecolors(), [(0.5, 0.5, 0.5, 0.5)] * self.N\n )\n with pytest.raises((ValueError, TypeError)):\n self.df.plot(color=\"not color\")\n\n with warnings.catch_warnings(record=True) as _: # don't print warning\n # 'color' overrides 'column'\n ax = self.df.plot(column=\"values\", color=\"green\")\n _check_colors(\n self.N, ax.collections[0].get_facecolors(), [\"green\"] * self.N\n )\n\n def test_markersize(self):\n\n ax = self.points.plot(markersize=10)\n assert ax.collections[0].get_sizes() == [10]\n\n ax = self.df.plot(markersize=10)\n assert ax.collections[0].get_sizes() == [10]\n\n ax = self.df.plot(column=\"values\", markersize=10)\n assert ax.collections[0].get_sizes() == [10]\n\n ax = self.df.plot(markersize=\"values\")\n assert (ax.collections[0].get_sizes() == self.df[\"values\"]).all()\n\n ax = self.df.plot(column=\"values\", markersize=\"values\")\n assert (ax.collections[0].get_sizes() == self.df[\"values\"]).all()\n\n def test_markerstyle(self):\n ax = self.df2.plot(marker=\"+\")\n expected = _style_to_vertices(\"+\")\n np.testing.assert_array_equal(\n expected, ax.collections[0].get_paths()[0].vertices\n )\n\n def test_style_kwargs(self):\n\n ax = self.points.plot(edgecolors=\"k\")\n assert (ax.collections[0].get_edgecolor() == [0, 0, 0, 1]).all()\n\n def test_style_kwargs_alpha(self):\n ax = self.df.plot(alpha=0.7)\n np.testing.assert_array_equal([0.7], ax.collections[0].get_alpha())\n with pytest.raises(TypeError): # no list allowed for alpha\n ax = self.df.plot(alpha=[0.7, 0.2])\n\n def test_legend(self):\n with warnings.catch_warnings(record=True) as _: # don't print warning\n # legend ignored if color is given.\n ax = self.df.plot(column=\"values\", color=\"green\", legend=True)\n assert len(ax.get_figure().axes) == 1 # no separate legend axis\n\n # legend ignored if no column is given.\n ax = self.df.plot(legend=True)\n assert len(ax.get_figure().axes) == 1 # no separate legend axis\n\n # # Continuous legend\n # the colorbar matches the Point colors\n ax = self.df.plot(column=\"values\", cmap=\"RdYlGn\", legend=True)\n point_colors = ax.collections[0].get_facecolors()\n cbar_colors = ax.get_figure().axes[1].collections[-1].get_facecolors()\n # first point == bottom of colorbar\n np.testing.assert_array_equal(point_colors[0], cbar_colors[0])\n # last point == top of colorbar\n np.testing.assert_array_equal(point_colors[-1], cbar_colors[-1])\n\n # # Categorical legend\n # the colorbar matches the Point colors\n ax = self.df.plot(column=\"values\", categorical=True, legend=True)\n point_colors = ax.collections[0].get_facecolors()\n cbar_colors = ax.get_legend().axes.collections[-1].get_facecolors()\n # first point == bottom of colorbar\n np.testing.assert_array_equal(point_colors[0], cbar_colors[0])\n # last point == top of colorbar\n np.testing.assert_array_equal(point_colors[-1], cbar_colors[-1])\n\n # # Normalized legend\n # the colorbar matches the Point colors\n norm = matplotlib.colors.LogNorm(\n vmin=self.df[1:].exp.min(), vmax=self.df[1:].exp.max()\n )\n ax = self.df[1:].plot(column=\"exp\", cmap=\"RdYlGn\", legend=True, norm=norm)\n point_colors = ax.collections[0].get_facecolors()\n cbar_colors = ax.get_figure().axes[1].collections[-1].get_facecolors()\n # first point == bottom of colorbar\n np.testing.assert_array_equal(point_colors[0], cbar_colors[0])\n # last point == top of colorbar\n np.testing.assert_array_equal(point_colors[-1], cbar_colors[-1])\n # colorbar generated proper long transition\n assert cbar_colors.shape == (256, 4)\n\n def test_subplots_norm(self):\n # colors of subplots are the same as for plot (norm is applied)\n cmap = matplotlib.cm.viridis_r\n norm = matplotlib.colors.Normalize(vmin=0, vmax=20)\n ax = self.df.plot(column=\"values\", cmap=cmap, norm=norm)\n actual_colors_orig = ax.collections[0].get_facecolors()\n exp_colors = cmap(np.arange(10) / (20))\n np.testing.assert_array_equal(exp_colors, actual_colors_orig)\n fig, ax = plt.subplots()\n self.df[1:].plot(column=\"values\", ax=ax, norm=norm, cmap=cmap)\n actual_colors_sub = ax.collections[0].get_facecolors()\n np.testing.assert_array_equal(actual_colors_orig[1], actual_colors_sub[0])\n\n def test_empty_plot(self):\n s = GeoSeries([])\n with pytest.warns(UserWarning):\n ax = s.plot()\n assert len(ax.collections) == 0\n df = GeoDataFrame([])\n with pytest.warns(UserWarning):\n ax = df.plot()\n assert len(ax.collections) == 0\n\n def test_multipoints(self):\n\n # MultiPoints\n ax = self.df2.plot()\n _check_colors(4, ax.collections[0].get_facecolors(), [MPL_DFT_COLOR] * 4)\n\n ax = self.df2.plot(column=\"values\")\n cmap = plt.get_cmap()\n expected_colors = [cmap(0)] * self.N + [cmap(1)] * self.N\n _check_colors(2, ax.collections[0].get_facecolors(), expected_colors)\n\n ax = self.df2.plot(color=[\"r\", \"b\"])\n # colors are repeated for all components within a MultiPolygon\n _check_colors(2, ax.collections[0].get_facecolors(), [\"r\"] * 10 + [\"b\"] * 10)\n\n def test_multipoints_alpha(self):\n ax = self.df2.plot(alpha=0.7)\n np.testing.assert_array_equal([0.7], ax.collections[0].get_alpha())\n with pytest.raises(TypeError): # no list allowed for alpha\n ax = self.df2.plot(alpha=[0.7, 0.2])\n\n def test_categories(self):\n self.df[\"cats_object\"] = [\"cat1\", \"cat2\"] * 5\n self.df[\"nums\"] = [1, 2] * 5\n self.df[\"singlecat_object\"] = [\"cat2\"] * 10\n self.df[\"cats\"] = pd.Categorical([\"cat1\", \"cat2\"] * 5)\n self.df[\"singlecat\"] = pd.Categorical(\n [\"cat2\"] * 10, categories=[\"cat1\", \"cat2\"]\n )\n self.df[\"cats_ordered\"] = pd.Categorical(\n [\"cat2\", \"cat1\"] * 5, categories=[\"cat2\", \"cat1\"]\n )\n\n ax1 = self.df.plot(\"cats_object\", legend=True)\n ax2 = self.df.plot(\"cats\", legend=True)\n ax3 = self.df.plot(\"singlecat_object\", categories=[\"cat1\", \"cat2\"], legend=True)\n ax4 = self.df.plot(\"singlecat\", legend=True)\n ax5 = self.df.plot(\"cats_ordered\", legend=True)\n ax6 = self.df.plot(\"nums\", categories=[1, 2], legend=True)\n\n point_colors1 = ax1.collections[0].get_facecolors()\n for ax in [ax2, ax3, ax4, ax5, ax6]:\n point_colors2 = ax.collections[0].get_facecolors()\n np.testing.assert_array_equal(point_colors1[1], point_colors2[1])\n\n legend1 = [x.get_markerfacecolor() for x in ax1.get_legend().get_lines()]\n for ax in [ax2, ax3, ax4, ax5, ax6]:\n legend2 = [x.get_markerfacecolor() for x in ax.get_legend().get_lines()]\n np.testing.assert_array_equal(legend1, legend2)\n\n with pytest.raises(TypeError):\n self.df.plot(column=\"cats_object\", categories=\"non_list\")\n\n with pytest.raises(\n ValueError, match=\"Column contains values not listed in categories.\"\n ):\n self.df.plot(column=\"cats_object\", categories=[\"cat1\"])\n\n with pytest.raises(\n ValueError, match=\"Cannot specify 'categories' when column has\"\n ):\n self.df.plot(column=\"cats\", categories=[\"cat1\"])\n\n def test_misssing(self):\n self.df.loc[0, \"values\"] = np.nan\n ax = self.df.plot(\"values\")\n cmap = plt.get_cmap()\n expected_colors = cmap(np.arange(self.N - 1) / (self.N - 2))\n _check_colors(self.N - 1, ax.collections[0].get_facecolors(), expected_colors)\n\n ax = self.df.plot(\"values\", missing_kwds={\"color\": \"r\"})\n cmap = plt.get_cmap()\n expected_colors = cmap(np.arange(self.N - 1) / (self.N - 2))\n _check_colors(1, ax.collections[1].get_facecolors(), [\"r\"])\n _check_colors(self.N - 1, ax.collections[0].get_facecolors(), expected_colors)\n\n ax = self.df.plot(\n \"values\", missing_kwds={\"color\": \"r\"}, categorical=True, legend=True\n )\n _check_colors(1, ax.collections[1].get_facecolors(), [\"r\"])\n point_colors = ax.collections[0].get_facecolors()\n nan_color = ax.collections[1].get_facecolors()\n leg_colors = ax.get_legend().axes.collections[0].get_facecolors()\n leg_colors1 = ax.get_legend().axes.collections[1].get_facecolors()\n np.testing.assert_array_equal(point_colors[0], leg_colors[0])\n np.testing.assert_array_equal(nan_color[0], leg_colors1[0])\n\n\nclass TestPointZPlotting:\n def setup_method(self):\n self.N = 10\n self.points = GeoSeries(Point(i, i, i) for i in range(self.N))\n values = np.arange(self.N)\n self.df = GeoDataFrame({\"geometry\": self.points, \"values\": values})\n\n def test_plot(self):\n # basic test that points with z coords don't break plotting\n self.df.plot()\n\n\nclass TestLineStringPlotting:\n def setup_method(self):\n self.N = 10\n values = np.arange(self.N)\n self.lines = GeoSeries(\n [LineString([(0, i), (4, i + 0.5), (9, i)]) for i in range(self.N)],\n index=list(\"ABCDEFGHIJ\"),\n )\n self.df = GeoDataFrame({\"geometry\": self.lines, \"values\": values})\n\n multiline1 = MultiLineString(self.lines.loc[\"A\":\"B\"].values)\n multiline2 = MultiLineString(self.lines.loc[\"C\":\"D\"].values)\n self.df2 = GeoDataFrame(\n {\"geometry\": [multiline1, multiline2], \"values\": [0, 1]}\n )\n\n self.linearrings = GeoSeries(\n [LinearRing([(0, i), (4, i + 0.5), (9, i)]) for i in range(self.N)],\n index=list(\"ABCDEFGHIJ\"),\n )\n self.df3 = GeoDataFrame({\"geometry\": self.linearrings, \"values\": values})\n\n def test_single_color(self):\n\n ax = self.lines.plot(color=\"green\")\n _check_colors(self.N, ax.collections[0].get_colors(), [\"green\"] * self.N)\n\n ax = self.df.plot(color=\"green\")\n _check_colors(self.N, ax.collections[0].get_colors(), [\"green\"] * self.N)\n\n ax = self.linearrings.plot(color=\"green\")\n _check_colors(self.N, ax.collections[0].get_colors(), [\"green\"] * self.N)\n\n ax = self.df3.plot(color=\"green\")\n _check_colors(self.N, ax.collections[0].get_colors(), [\"green\"] * self.N)\n\n # check rgba tuple GH1178\n ax = self.df.plot(color=(0.5, 0.5, 0.5, 0.5))\n _check_colors(\n self.N, ax.collections[0].get_colors(), [(0.5, 0.5, 0.5, 0.5)] * self.N\n )\n ax = self.df.plot(color=(0.5, 0.5, 0.5, 0.5))\n _check_colors(\n self.N, ax.collections[0].get_colors(), [(0.5, 0.5, 0.5, 0.5)] * self.N\n )\n with pytest.raises((TypeError, ValueError)):\n self.df.plot(color=\"not color\")\n\n with warnings.catch_warnings(record=True) as _: # don't print warning\n # 'color' overrides 'column'\n ax = self.df.plot(column=\"values\", color=\"green\")\n _check_colors(self.N, ax.collections[0].get_colors(), [\"green\"] * self.N)\n\n def test_style_kwargs_linestyle(self):\n # single\n for ax in [\n self.lines.plot(linestyle=\":\", linewidth=1),\n self.df.plot(linestyle=\":\", linewidth=1),\n self.df.plot(column=\"values\", linestyle=\":\", linewidth=1),\n ]:\n assert [(0.0, [1.0, 1.65])] == ax.collections[0].get_linestyle()\n\n # tuple\n ax = self.lines.plot(linestyle=(0, (3, 10, 1, 15)), linewidth=1)\n assert [(0, [3, 10, 1, 15])] == ax.collections[0].get_linestyle()\n\n # multiple\n ls = [(\"dashed\", \"dotted\", \"dashdot\", \"solid\")[k % 4] for k in range(self.N)]\n exp_ls = [_style_to_linestring_onoffseq(st, 1) for st in ls]\n for ax in [\n self.lines.plot(linestyle=ls, linewidth=1),\n self.lines.plot(linestyles=ls, linewidth=1),\n self.df.plot(linestyle=ls, linewidth=1),\n self.df.plot(column=\"values\", linestyle=ls, linewidth=1),\n ]:\n np.testing.assert_array_equal(exp_ls, ax.collections[0].get_linestyle())\n\n def test_style_kwargs_linewidth(self):\n # single\n for ax in [\n self.lines.plot(linewidth=2),\n self.df.plot(linewidth=2),\n self.df.plot(column=\"values\", linewidth=2),\n ]:\n np.testing.assert_array_equal([2], ax.collections[0].get_linewidths())\n\n # multiple\n lw = [(0, 1, 2, 5.5, 10)[k % 5] for k in range(self.N)]\n for ax in [\n self.lines.plot(linewidth=lw),\n self.lines.plot(linewidths=lw),\n self.df.plot(linewidth=lw),\n self.df.plot(column=\"values\", linewidth=lw),\n ]:\n np.testing.assert_array_equal(lw, ax.collections[0].get_linewidths())\n\n def test_style_kwargs_alpha(self):\n ax = self.df.plot(alpha=0.7)\n np.testing.assert_array_equal([0.7], ax.collections[0].get_alpha())\n with pytest.raises(TypeError): # no list allowed for alpha\n ax = self.df.plot(alpha=[0.7, 0.2])\n\n def test_subplots_norm(self):\n # colors of subplots are the same as for plot (norm is applied)\n cmap = matplotlib.cm.viridis_r\n norm = matplotlib.colors.Normalize(vmin=0, vmax=20)\n ax = self.df.plot(column=\"values\", cmap=cmap, norm=norm)\n actual_colors_orig = ax.collections[0].get_edgecolors()\n exp_colors = cmap(np.arange(10) / (20))\n np.testing.assert_array_equal(exp_colors, actual_colors_orig)\n fig, ax = plt.subplots()\n self.df[1:].plot(column=\"values\", ax=ax, norm=norm, cmap=cmap)\n actual_colors_sub = ax.collections[0].get_edgecolors()\n np.testing.assert_array_equal(actual_colors_orig[1], actual_colors_sub[0])\n\n def test_multilinestrings(self):\n\n # MultiLineStrings\n ax = self.df2.plot()\n assert len(ax.collections[0].get_paths()) == 4\n _check_colors(4, ax.collections[0].get_facecolors(), [MPL_DFT_COLOR] * 4)\n\n ax = self.df2.plot(\"values\")\n cmap = plt.get_cmap(lut=2)\n # colors are repeated for all components within a MultiLineString\n expected_colors = [cmap(0), cmap(0), cmap(1), cmap(1)]\n _check_colors(4, ax.collections[0].get_facecolors(), expected_colors)\n\n ax = self.df2.plot(color=[\"r\", \"b\"])\n # colors are repeated for all components within a MultiLineString\n _check_colors(4, ax.collections[0].get_facecolors(), [\"r\", \"r\", \"b\", \"b\"])\n\n\nclass TestPolygonPlotting:\n def setup_method(self):\n\n t1 = Polygon([(0, 0), (1, 0), (1, 1)])\n t2 = Polygon([(1, 0), (2, 0), (2, 1)])\n self.polys = GeoSeries([t1, t2], index=list(\"AB\"))\n self.df = GeoDataFrame({\"geometry\": self.polys, \"values\": [0, 1]})\n\n multipoly1 = MultiPolygon([t1, t2])\n multipoly2 = rotate(multipoly1, 180)\n self.df2 = GeoDataFrame(\n {\"geometry\": [multipoly1, multipoly2], \"values\": [0, 1]}\n )\n\n t3 = Polygon([(2, 0), (3, 0), (3, 1)])\n df_nan = GeoDataFrame({\"geometry\": t3, \"values\": [np.nan]})\n self.df3 = self.df.append(df_nan)\n\n def test_single_color(self):\n\n ax = self.polys.plot(color=\"green\")\n _check_colors(2, ax.collections[0].get_facecolors(), [\"green\"] * 2)\n # color only sets facecolor\n _check_colors(2, ax.collections[0].get_edgecolors(), [\"k\"] * 2)\n\n ax = self.df.plot(color=\"green\")\n _check_colors(2, ax.collections[0].get_facecolors(), [\"green\"] * 2)\n _check_colors(2, ax.collections[0].get_edgecolors(), [\"k\"] * 2)\n\n # check rgba tuple GH1178\n ax = self.df.plot(color=(0.5, 0.5, 0.5))\n _check_colors(2, ax.collections[0].get_facecolors(), [(0.5, 0.5, 0.5)] * 2)\n ax = self.df.plot(color=(0.5, 0.5, 0.5, 0.5))\n _check_colors(2, ax.collections[0].get_facecolors(), [(0.5, 0.5, 0.5, 0.5)] * 2)\n with pytest.raises((TypeError, ValueError)):\n self.df.plot(color=\"not color\")\n\n with warnings.catch_warnings(record=True) as _: # don't print warning\n # 'color' overrides 'values'\n ax = self.df.plot(column=\"values\", color=\"green\")\n _check_colors(2, ax.collections[0].get_facecolors(), [\"green\"] * 2)\n\n def test_vmin_vmax(self):\n # when vmin == vmax, all polygons should be the same color\n\n # non-categorical\n ax = self.df.plot(column=\"values\", categorical=False, vmin=0, vmax=0)\n actual_colors = ax.collections[0].get_facecolors()\n np.testing.assert_array_equal(actual_colors[0], actual_colors[1])\n\n # categorical\n ax = self.df.plot(column=\"values\", categorical=True, vmin=0, vmax=0)\n actual_colors = ax.collections[0].get_facecolors()\n np.testing.assert_array_equal(actual_colors[0], actual_colors[1])\n\n # vmin vmax set correctly for array with NaN (GitHub issue 877)\n ax = self.df3.plot(column=\"values\")\n actual_colors = ax.collections[0].get_facecolors()\n assert np.any(np.not_equal(actual_colors[0], actual_colors[1]))\n\n def test_style_kwargs_color(self):\n\n # facecolor overrides default cmap when color is not set\n ax = self.polys.plot(facecolor=\"k\")\n _check_colors(2, ax.collections[0].get_facecolors(), [\"k\"] * 2)\n\n # facecolor overrides more general-purpose color when both are set\n ax = self.polys.plot(color=\"red\", facecolor=\"k\")\n # TODO with new implementation, color overrides facecolor\n # _check_colors(2, ax.collections[0], ['k']*2, alpha=0.5)\n\n # edgecolor\n ax = self.polys.plot(edgecolor=\"red\")\n np.testing.assert_array_equal(\n [(1, 0, 0, 1)], ax.collections[0].get_edgecolors()\n )\n\n ax = self.df.plot(\"values\", edgecolor=\"red\")\n np.testing.assert_array_equal(\n [(1, 0, 0, 1)], ax.collections[0].get_edgecolors()\n )\n\n # alpha sets both edge and face\n ax = self.polys.plot(facecolor=\"g\", edgecolor=\"r\", alpha=0.4)\n _check_colors(2, ax.collections[0].get_facecolors(), [\"g\"] * 2, alpha=0.4)\n _check_colors(2, ax.collections[0].get_edgecolors(), [\"r\"] * 2, alpha=0.4)\n\n # check rgba tuple GH1178 for face and edge\n ax = self.df.plot(facecolor=(0.5, 0.5, 0.5), edgecolor=(0.4, 0.5, 0.6))\n _check_colors(2, ax.collections[0].get_facecolors(), [(0.5, 0.5, 0.5)] * 2)\n _check_colors(2, ax.collections[0].get_edgecolors(), [(0.4, 0.5, 0.6)] * 2)\n\n ax = self.df.plot(\n facecolor=(0.5, 0.5, 0.5, 0.5), edgecolor=(0.4, 0.5, 0.6, 0.5)\n )\n _check_colors(2, ax.collections[0].get_facecolors(), [(0.5, 0.5, 0.5, 0.5)] * 2)\n _check_colors(2, ax.collections[0].get_edgecolors(), [(0.4, 0.5, 0.6, 0.5)] * 2)\n\n def test_style_kwargs_linestyle(self):\n # single\n ax = self.df.plot(linestyle=\":\", linewidth=1)\n assert [(0.0, [1.0, 1.65])] == ax.collections[0].get_linestyle()\n\n # tuple\n ax = self.df.plot(linestyle=(0, (3, 10, 1, 15)), linewidth=1)\n assert [(0, [3, 10, 1, 15])] == ax.collections[0].get_linestyle()\n\n # multiple\n ls = [\"dashed\", \"dotted\"]\n exp_ls = [_style_to_linestring_onoffseq(st, 1) for st in ls]\n for ax in [\n self.df.plot(linestyle=ls, linewidth=1),\n self.df.plot(linestyles=ls, linewidth=1),\n ]:\n assert exp_ls == ax.collections[0].get_linestyle()\n\n def test_style_kwargs_linewidth(self):\n # single\n ax = self.df.plot(linewidth=2)\n np.testing.assert_array_equal([2], ax.collections[0].get_linewidths())\n # multiple\n for ax in [self.df.plot(linewidth=[2, 4]), self.df.plot(linewidths=[2, 4])]:\n np.testing.assert_array_equal([2, 4], ax.collections[0].get_linewidths())\n\n # alpha\n ax = self.df.plot(alpha=0.7)\n np.testing.assert_array_equal([0.7], ax.collections[0].get_alpha())\n with pytest.raises(TypeError): # no list allowed for alpha\n ax = self.df.plot(alpha=[0.7, 0.2])\n\n def test_legend_kwargs(self):\n\n ax = self.df.plot(\n column=\"values\",\n categorical=True,\n legend=True,\n legend_kwds={\"frameon\": False},\n )\n assert ax.get_legend().get_frame_on() is False\n\n def test_colorbar_kwargs(self):\n # Test if kwargs are passed to colorbar\n\n label_txt = \"colorbar test\"\n\n ax = self.df.plot(\n column=\"values\",\n categorical=False,\n legend=True,\n legend_kwds={\"label\": label_txt},\n )\n\n assert ax.get_figure().axes[1].get_ylabel() == label_txt\n\n ax = self.df.plot(\n column=\"values\",\n categorical=False,\n legend=True,\n legend_kwds={\"label\": label_txt, \"orientation\": \"horizontal\"},\n )\n\n assert ax.get_figure().axes[1].get_xlabel() == label_txt\n\n def test_fmt_ignore(self):\n # test if fmt is removed if scheme is not passed (it would raise Error)\n # GH #1253\n\n self.df.plot(\n column=\"values\",\n categorical=True,\n legend=True,\n legend_kwds={\"fmt\": \"{:.0f}\"},\n )\n\n self.df.plot(column=\"values\", legend=True, legend_kwds={\"fmt\": \"{:.0f}\"})\n\n def test_multipolygons_color(self):\n\n # MultiPolygons\n ax = self.df2.plot()\n assert len(ax.collections[0].get_paths()) == 4\n _check_colors(4, ax.collections[0].get_facecolors(), [MPL_DFT_COLOR] * 4)\n\n ax = self.df2.plot(\"values\")\n cmap = plt.get_cmap(lut=2)\n # colors are repeated for all components within a MultiPolygon\n expected_colors = [cmap(0), cmap(0), cmap(1), cmap(1)]\n _check_colors(4, ax.collections[0].get_facecolors(), expected_colors)\n\n ax = self.df2.plot(color=[\"r\", \"b\"])\n # colors are repeated for all components within a MultiPolygon\n _check_colors(4, ax.collections[0].get_facecolors(), [\"r\", \"r\", \"b\", \"b\"])\n\n def test_multipolygons_linestyle(self):\n # single\n ax = self.df2.plot(linestyle=\":\", linewidth=1)\n assert [(0.0, [1.0, 1.65])] == ax.collections[0].get_linestyle()\n\n # tuple\n ax = self.df2.plot(linestyle=(0, (3, 10, 1, 15)), linewidth=1)\n assert [(0, [3, 10, 1, 15])] == ax.collections[0].get_linestyle()\n\n # multiple\n ls = [\"dashed\", \"dotted\"]\n exp_ls = [_style_to_linestring_onoffseq(st, 1) for st in ls for i in range(2)]\n for ax in [\n self.df2.plot(linestyle=ls, linewidth=1),\n self.df2.plot(linestyles=ls, linewidth=1),\n ]:\n assert exp_ls == ax.collections[0].get_linestyle()\n\n def test_multipolygons_linewidth(self):\n # single\n ax = self.df2.plot(linewidth=2)\n np.testing.assert_array_equal([2], ax.collections[0].get_linewidths())\n\n # multiple\n for ax in [self.df2.plot(linewidth=[2, 4]), self.df2.plot(linewidths=[2, 4])]:\n np.testing.assert_array_equal(\n [2, 2, 4, 4], ax.collections[0].get_linewidths()\n )\n\n def test_multipolygons_alpha(self):\n ax = self.df2.plot(alpha=0.7)\n np.testing.assert_array_equal([0.7], ax.collections[0].get_alpha())\n with pytest.raises(TypeError): # no list allowed for alpha\n ax = self.df2.plot(alpha=[0.7, 0.2])\n\n def test_subplots_norm(self):\n # colors of subplots are the same as for plot (norm is applied)\n cmap = matplotlib.cm.viridis_r\n norm = matplotlib.colors.Normalize(vmin=0, vmax=10)\n ax = self.df.plot(column=\"values\", cmap=cmap, norm=norm)\n actual_colors_orig = ax.collections[0].get_facecolors()\n exp_colors = cmap(np.arange(2) / (10))\n np.testing.assert_array_equal(exp_colors, actual_colors_orig)\n fig, ax = plt.subplots()\n self.df[1:].plot(column=\"values\", ax=ax, norm=norm, cmap=cmap)\n actual_colors_sub = ax.collections[0].get_facecolors()\n np.testing.assert_array_equal(actual_colors_orig[1], actual_colors_sub[0])\n\n\nclass TestPolygonZPlotting:\n def setup_method(self):\n\n t1 = Polygon([(0, 0, 0), (1, 0, 0), (1, 1, 1)])\n t2 = Polygon([(1, 0, 0), (2, 0, 0), (2, 1, 1)])\n self.polys = GeoSeries([t1, t2], index=list(\"AB\"))\n self.df = GeoDataFrame({\"geometry\": self.polys, \"values\": [0, 1]})\n\n multipoly1 = MultiPolygon([t1, t2])\n multipoly2 = rotate(multipoly1, 180)\n self.df2 = GeoDataFrame(\n {\"geometry\": [multipoly1, multipoly2], \"values\": [0, 1]}\n )\n\n def test_plot(self):\n # basic test that points with z coords don't break plotting\n self.df.plot()\n\n\nclass TestGeometryCollectionPlotting:\n def setup_method(self):\n coll1 = GeometryCollection(\n [\n Polygon([(1, 0), (2, 0), (2, 1)]),\n MultiLineString([((0.5, 0.5), (1, 1)), ((1, 0.5), (1.5, 1))]),\n ]\n )\n coll2 = GeometryCollection(\n [Point(0.75, 0.25), Polygon([(2, 2), (3, 2), (2, 3)])]\n )\n\n self.series = GeoSeries([coll1, coll2])\n self.df = GeoDataFrame({\"geometry\": self.series, \"values\": [1, 2]})\n\n def test_colors(self):\n # default uniform color\n ax = self.series.plot()\n _check_colors(1, ax.collections[0].get_facecolors(), [MPL_DFT_COLOR]) # poly\n _check_colors(2, ax.collections[1].get_edgecolors(), [MPL_DFT_COLOR]) # line\n _check_colors(2, ax.collections[2].get_facecolors(), [MPL_DFT_COLOR]) # point\n\n def test_values(self):\n ax = self.df.plot(\"values\")\n cmap = plt.get_cmap()\n exp_colors = cmap(np.arange(2) / 1)\n _check_colors(1, ax.collections[0].get_facecolors(), exp_colors) # poly\n _check_colors(2, ax.collections[1].get_edgecolors(), [exp_colors[0]]) # line\n _check_colors(2, ax.collections[2].get_facecolors(), [exp_colors[1]]) # point\n\n\nclass TestNonuniformGeometryPlotting:\n def setup_method(self):\n pytest.importorskip(\"matplotlib\", \"1.5.0\")\n\n poly = Polygon([(1, 0), (2, 0), (2, 1)])\n line = LineString([(0.5, 0.5), (1, 1), (1, 0.5), (1.5, 1)])\n point = Point(0.75, 0.25)\n self.series = GeoSeries([poly, line, point])\n self.df = GeoDataFrame({\"geometry\": self.series, \"values\": [1, 2, 3]})\n\n def test_colors(self):\n # default uniform color\n ax = self.series.plot()\n _check_colors(1, ax.collections[0].get_facecolors(), [MPL_DFT_COLOR])\n _check_colors(1, ax.collections[1].get_edgecolors(), [MPL_DFT_COLOR])\n _check_colors(1, ax.collections[2].get_facecolors(), [MPL_DFT_COLOR])\n\n # colormap: different colors\n ax = self.series.plot(cmap=\"RdYlGn\")\n cmap = plt.get_cmap(\"RdYlGn\")\n exp_colors = cmap(np.arange(3) / (3 - 1))\n _check_colors(1, ax.collections[0].get_facecolors(), [exp_colors[0]])\n _check_colors(1, ax.collections[1].get_edgecolors(), [exp_colors[1]])\n _check_colors(1, ax.collections[2].get_facecolors(), [exp_colors[2]])\n\n def test_style_kwargs(self):\n ax = self.series.plot(markersize=10)\n assert ax.collections[2].get_sizes() == [10]\n ax = self.df.plot(markersize=10)\n assert ax.collections[2].get_sizes() == [10]\n\n def test_style_kwargs_linestyle(self):\n # single\n for ax in [\n self.series.plot(linestyle=\":\", linewidth=1),\n self.df.plot(linestyle=\":\", linewidth=1),\n ]:\n assert [(0.0, [1.0, 1.65])] == ax.collections[0].get_linestyle()\n\n # tuple\n ax = self.series.plot(linestyle=(0, (3, 10, 1, 15)), linewidth=1)\n assert [(0, [3, 10, 1, 15])] == ax.collections[0].get_linestyle()\n\n @pytest.mark.skip(\n reason=\"array-like style_kwds not supported for mixed geometry types (#1379)\"\n )\n def test_style_kwargs_linestyle_listlike(self):\n # multiple\n ls = [\"solid\", \"dotted\", \"dashdot\"]\n exp_ls = [_style_to_linestring_onoffseq(style, 1) for style in ls]\n for ax in [\n self.series.plot(linestyle=ls, linewidth=1),\n self.series.plot(linestyles=ls, linewidth=1),\n self.df.plot(linestyles=ls, linewidth=1),\n ]:\n np.testing.assert_array_equal(exp_ls, ax.collections[0].get_linestyle())\n\n def test_style_kwargs_linewidth(self):\n # single\n ax = self.df.plot(linewidth=2)\n np.testing.assert_array_equal([2], ax.collections[0].get_linewidths())\n\n @pytest.mark.skip(\n reason=\"array-like style_kwds not supported for mixed geometry types (#1379)\"\n )\n def test_style_kwargs_linewidth_listlike(self):\n # multiple\n for ax in [\n self.series.plot(linewidths=[2, 4, 5.5]),\n self.series.plot(linewidths=[2, 4, 5.5]),\n self.df.plot(linewidths=[2, 4, 5.5]),\n ]:\n np.testing.assert_array_equal(\n [2, 4, 5.5], ax.collections[0].get_linewidths()\n )\n\n def test_style_kwargs_alpha(self):\n ax = self.df.plot(alpha=0.7)\n np.testing.assert_array_equal([0.7], ax.collections[0].get_alpha())\n with pytest.raises(TypeError): # no list allowed for alpha\n ax = self.df.plot(alpha=[0.7, 0.2, 0.9])\n\n\nclass TestGeographicAspect:\n def setup_class(self):\n pth = get_path(\"naturalearth_lowres\")\n df = read_file(pth)\n self.north = df.loc[df.continent == \"North America\"]\n self.north_proj = self.north.to_crs(\"ESRI:102008\")\n bounds = self.north.total_bounds\n y_coord = np.mean([bounds[1], bounds[3]])\n self.exp = 1 / np.cos(y_coord * np.pi / 180)\n\n def test_auto(self):\n ax = self.north.geometry.plot()\n assert ax.get_aspect() == self.exp\n ax2 = self.north_proj.geometry.plot()\n assert ax2.get_aspect() in [\"equal\", 1.0]\n ax = self.north.plot()\n assert ax.get_aspect() == self.exp\n ax2 = self.north_proj.plot()\n assert ax2.get_aspect() in [\"equal\", 1.0]\n ax3 = self.north.plot(\"pop_est\")\n assert ax3.get_aspect() == self.exp\n ax4 = self.north_proj.plot(\"pop_est\")\n assert ax4.get_aspect() in [\"equal\", 1.0]\n\n def test_manual(self):\n ax = self.north.geometry.plot(aspect=\"equal\")\n assert ax.get_aspect() in [\"equal\", 1.0]\n self.north.geometry.plot(ax=ax, aspect=None)\n assert ax.get_aspect() in [\"equal\", 1.0]\n ax2 = self.north.geometry.plot(aspect=0.5)\n assert ax2.get_aspect() == 0.5\n self.north.geometry.plot(ax=ax2, aspect=None)\n assert ax2.get_aspect() == 0.5\n ax3 = self.north_proj.geometry.plot(aspect=0.5)\n assert ax3.get_aspect() == 0.5\n self.north_proj.geometry.plot(ax=ax3, aspect=None)\n assert ax3.get_aspect() == 0.5\n ax = self.north.plot(aspect=\"equal\")\n assert ax.get_aspect() in [\"equal\", 1.0]\n self.north.plot(ax=ax, aspect=None)\n assert ax.get_aspect() in [\"equal\", 1.0]\n ax2 = self.north.plot(aspect=0.5)\n assert ax2.get_aspect() == 0.5\n self.north.plot(ax=ax2, aspect=None)\n assert ax2.get_aspect() == 0.5\n ax3 = self.north_proj.plot(aspect=0.5)\n assert ax3.get_aspect() == 0.5\n self.north_proj.plot(ax=ax3, aspect=None)\n assert ax3.get_aspect() == 0.5\n ax = self.north.plot(\"pop_est\", aspect=\"equal\")\n assert ax.get_aspect() in [\"equal\", 1.0]\n self.north.plot(\"pop_est\", ax=ax, aspect=None)\n assert ax.get_aspect() in [\"equal\", 1.0]\n ax2 = self.north.plot(\"pop_est\", aspect=0.5)\n assert ax2.get_aspect() == 0.5\n self.north.plot(\"pop_est\", ax=ax2, aspect=None)\n assert ax2.get_aspect() == 0.5\n ax3 = self.north_proj.plot(\"pop_est\", aspect=0.5)\n assert ax3.get_aspect() == 0.5\n self.north_proj.plot(\"pop_est\", ax=ax3, aspect=None)\n assert ax3.get_aspect() == 0.5\n\n\nclass TestMapclassifyPlotting:\n @classmethod\n def setup_class(cls):\n try:\n import mapclassify # noqa\n except ImportError:\n pytest.importorskip(\"mapclassify\")\n cls.classifiers = list(mapclassify.classifiers.CLASSIFIERS)\n cls.classifiers.remove(\"UserDefined\")\n pth = get_path(\"naturalearth_lowres\")\n cls.df = read_file(pth)\n cls.df[\"NEGATIVES\"] = np.linspace(-10, 10, len(cls.df.index))\n\n def test_legend(self):\n with warnings.catch_warnings(record=True) as _: # don't print warning\n # warning coming from scipy.stats\n ax = self.df.plot(\n column=\"pop_est\", scheme=\"QUANTILES\", k=3, cmap=\"OrRd\", legend=True\n )\n labels = [t.get_text() for t in ax.get_legend().get_texts()]\n expected = [\n u\"[ 140.00, 5217064.00]\",\n u\"( 5217064.00, 19532732.33]\",\n u\"( 19532732.33, 1379302771.00]\",\n ]\n assert labels == expected\n\n def test_bin_labels(self):\n ax = self.df.plot(\n column=\"pop_est\",\n scheme=\"QUANTILES\",\n k=3,\n cmap=\"OrRd\",\n legend=True,\n legend_kwds={\"labels\": [\"foo\", \"bar\", \"baz\"]},\n )\n labels = [t.get_text() for t in ax.get_legend().get_texts()]\n expected = [\"foo\", \"bar\", \"baz\"]\n assert labels == expected\n\n def test_invalid_labels_length(self):\n with pytest.raises(ValueError):\n self.df.plot(\n column=\"pop_est\",\n scheme=\"QUANTILES\",\n k=3,\n cmap=\"OrRd\",\n legend=True,\n legend_kwds={\"labels\": [\"foo\", \"bar\"]},\n )\n\n def test_negative_legend(self):\n ax = self.df.plot(\n column=\"NEGATIVES\", scheme=\"FISHER_JENKS\", k=3, cmap=\"OrRd\", legend=True\n )\n labels = [t.get_text() for t in ax.get_legend().get_texts()]\n expected = [u\"[-10.00, -3.41]\", u\"( -3.41, 3.30]\", u\"( 3.30, 10.00]\"]\n assert labels == expected\n\n def test_fmt(self):\n ax = self.df.plot(\n column=\"NEGATIVES\",\n scheme=\"FISHER_JENKS\",\n k=3,\n cmap=\"OrRd\",\n legend=True,\n legend_kwds={\"fmt\": \"{:.0f}\"},\n )\n labels = [t.get_text() for t in ax.get_legend().get_texts()]\n expected = [u\"[-10, -3]\", u\"( -3, 3]\", u\"( 3, 10]\"]\n assert labels == expected\n\n @pytest.mark.parametrize(\"scheme\", [\"FISHER_JENKS\", \"FISHERJENKS\"])\n def test_scheme_name_compat(self, scheme):\n ax = self.df.plot(column=\"NEGATIVES\", scheme=scheme, k=3, legend=True)\n assert len(ax.get_legend().get_texts()) == 3\n\n def test_schemes(self):\n # test if all available classifiers pass\n for scheme in self.classifiers:\n self.df.plot(column=\"pop_est\", scheme=scheme, legend=True)\n\n def test_classification_kwds(self):\n ax = self.df.plot(\n column=\"pop_est\",\n scheme=\"percentiles\",\n k=3,\n classification_kwds={\"pct\": [50, 100]},\n cmap=\"OrRd\",\n legend=True,\n )\n labels = [t.get_text() for t in ax.get_legend().get_texts()]\n expected = [\"[ 140.00, 9961396.00]\", \"( 9961396.00, 1379302771.00]\"]\n assert labels == expected\n\n def test_invalid_scheme(self):\n with pytest.raises(ValueError):\n scheme = \"invalid_scheme_*#&)(*#\"\n self.df.plot(\n column=\"gdp_md_est\", scheme=scheme, k=3, cmap=\"OrRd\", legend=True\n )\n\n def test_cax_legend_passing(self):\n \"\"\"Pass a 'cax' argument to 'df.plot(.)', that is valid only if 'ax' is\n passed as well (if not, a new figure is created ad hoc, and 'cax' is\n ignored)\n \"\"\"\n ax = plt.axes()\n from mpl_toolkits.axes_grid1 import make_axes_locatable\n\n divider = make_axes_locatable(ax)\n cax = divider.append_axes(\"right\", size=\"5%\", pad=0.1)\n with pytest.raises(ValueError):\n ax = self.df.plot(column=\"pop_est\", cmap=\"OrRd\", legend=True, cax=cax)\n\n def test_cax_legend_height(self):\n \"\"\"Pass a cax argument to 'df.plot(.)', the legend location must be\n aligned with those of main plot\n \"\"\"\n # base case\n with warnings.catch_warnings(record=True) as _: # don't print warning\n ax = self.df.plot(column=\"pop_est\", cmap=\"OrRd\", legend=True)\n plot_height = ax.get_figure().get_axes()[0].get_position().height\n legend_height = ax.get_figure().get_axes()[1].get_position().height\n assert abs(plot_height - legend_height) >= 1e-6\n # fix heights with cax argument\n ax2 = plt.axes()\n from mpl_toolkits.axes_grid1 import make_axes_locatable\n\n divider = make_axes_locatable(ax2)\n cax = divider.append_axes(\"right\", size=\"5%\", pad=0.1)\n with warnings.catch_warnings(record=True) as _:\n ax2 = self.df.plot(\n column=\"pop_est\", cmap=\"OrRd\", legend=True, cax=cax, ax=ax2\n )\n plot_height = ax2.get_figure().get_axes()[0].get_position().height\n legend_height = ax2.get_figure().get_axes()[1].get_position().height\n assert abs(plot_height - legend_height) < 1e-6\n\n\nclass TestPlotCollections:\n def setup_method(self):\n self.N = 3\n self.values = np.arange(self.N)\n self.points = GeoSeries(Point(i, i) for i in range(self.N))\n self.lines = GeoSeries(\n [LineString([(0, i), (4, i + 0.5), (9, i)]) for i in range(self.N)]\n )\n self.polygons = GeoSeries(\n [Polygon([(0, i), (4, i + 0.5), (9, i)]) for i in range(self.N)]\n )\n\n def test_points(self):\n # failing with matplotlib 1.4.3 (edge stays black even when specified)\n pytest.importorskip(\"matplotlib\", \"1.5.0\")\n\n from geopandas.plotting import _plot_point_collection, plot_point_collection\n from matplotlib.collections import PathCollection\n\n fig, ax = plt.subplots()\n coll = _plot_point_collection(ax, self.points)\n assert isinstance(coll, PathCollection)\n ax.cla()\n\n # default: single default matplotlib color\n coll = _plot_point_collection(ax, self.points)\n _check_colors(self.N, coll.get_facecolors(), [MPL_DFT_COLOR] * self.N)\n # edgecolor depends on matplotlib version\n # _check_colors(self.N, coll.get_edgecolors(), [MPL_DFT_COLOR]*self.N)\n ax.cla()\n\n # specify single other color\n coll = _plot_point_collection(ax, self.points, color=\"g\")\n _check_colors(self.N, coll.get_facecolors(), [\"g\"] * self.N)\n _check_colors(self.N, coll.get_edgecolors(), [\"g\"] * self.N)\n ax.cla()\n\n # specify edgecolor/facecolor\n coll = _plot_point_collection(ax, self.points, facecolor=\"g\", edgecolor=\"r\")\n _check_colors(self.N, coll.get_facecolors(), [\"g\"] * self.N)\n _check_colors(self.N, coll.get_edgecolors(), [\"r\"] * self.N)\n ax.cla()\n\n # list of colors\n coll = _plot_point_collection(ax, self.points, color=[\"r\", \"g\", \"b\"])\n _check_colors(self.N, coll.get_facecolors(), [\"r\", \"g\", \"b\"])\n _check_colors(self.N, coll.get_edgecolors(), [\"r\", \"g\", \"b\"])\n ax.cla()\n\n coll = _plot_point_collection(\n ax,\n self.points,\n color=[(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)],\n )\n _check_colors(\n self.N,\n coll.get_facecolors(),\n [(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)],\n )\n _check_colors(\n self.N,\n coll.get_edgecolors(),\n [(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)],\n )\n ax.cla()\n\n # not a color\n with pytest.raises((TypeError, ValueError)):\n _plot_point_collection(ax, self.points, color=\"not color\")\n\n # check DeprecationWarning\n with pytest.warns(DeprecationWarning):\n plot_point_collection(ax, self.points)\n\n def test_points_values(self):\n from geopandas.plotting import _plot_point_collection\n\n # default colormap\n fig, ax = plt.subplots()\n coll = _plot_point_collection(ax, self.points, self.values)\n fig.canvas.draw_idle()\n cmap = plt.get_cmap()\n expected_colors = cmap(np.arange(self.N) / (self.N - 1))\n _check_colors(self.N, coll.get_facecolors(), expected_colors)\n # edgecolor depends on matplotlib version\n # _check_colors(self.N, coll.get_edgecolors(), expected_colors)\n\n def test_linestrings(self):\n from geopandas.plotting import (\n _plot_linestring_collection,\n plot_linestring_collection,\n )\n from matplotlib.collections import LineCollection\n\n fig, ax = plt.subplots()\n coll = _plot_linestring_collection(ax, self.lines)\n assert isinstance(coll, LineCollection)\n ax.cla()\n\n # default: single default matplotlib color\n coll = _plot_linestring_collection(ax, self.lines)\n _check_colors(self.N, coll.get_color(), [MPL_DFT_COLOR] * self.N)\n ax.cla()\n\n # specify single other color\n coll = _plot_linestring_collection(ax, self.lines, color=\"g\")\n _check_colors(self.N, coll.get_colors(), [\"g\"] * self.N)\n ax.cla()\n\n # specify edgecolor / facecolor\n coll = _plot_linestring_collection(ax, self.lines, facecolor=\"g\", edgecolor=\"r\")\n _check_colors(self.N, coll.get_facecolors(), [\"g\"] * self.N)\n _check_colors(self.N, coll.get_edgecolors(), [\"r\"] * self.N)\n ax.cla()\n\n # list of colors\n coll = _plot_linestring_collection(ax, self.lines, color=[\"r\", \"g\", \"b\"])\n _check_colors(self.N, coll.get_colors(), [\"r\", \"g\", \"b\"])\n ax.cla()\n\n coll = _plot_linestring_collection(\n ax,\n self.lines,\n color=[(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)],\n )\n _check_colors(\n self.N,\n coll.get_colors(),\n [(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)],\n )\n ax.cla()\n\n # pass through of kwargs\n coll = _plot_linestring_collection(ax, self.lines, linestyle=\"--\", linewidth=1)\n exp_ls = _style_to_linestring_onoffseq(\"dashed\", 1)\n res_ls = coll.get_linestyle()[0]\n assert res_ls[0] == exp_ls[0]\n assert res_ls[1] == exp_ls[1]\n ax.cla()\n\n # not a color\n with pytest.raises((TypeError, ValueError)):\n _plot_linestring_collection(ax, self.lines, color=\"not color\")\n # check DeprecationWarning\n with pytest.warns(DeprecationWarning):\n plot_linestring_collection(ax, self.lines)\n\n def test_linestrings_values(self):\n from geopandas.plotting import _plot_linestring_collection\n\n fig, ax = plt.subplots()\n\n # default colormap\n coll = _plot_linestring_collection(ax, self.lines, self.values)\n fig.canvas.draw_idle()\n cmap = plt.get_cmap()\n expected_colors = cmap(np.arange(self.N) / (self.N - 1))\n _check_colors(self.N, coll.get_color(), expected_colors)\n ax.cla()\n\n # specify colormap\n coll = _plot_linestring_collection(ax, self.lines, self.values, cmap=\"RdBu\")\n fig.canvas.draw_idle()\n cmap = plt.get_cmap(\"RdBu\")\n expected_colors = cmap(np.arange(self.N) / (self.N - 1))\n _check_colors(self.N, coll.get_color(), expected_colors)\n ax.cla()\n\n # specify vmin/vmax\n coll = _plot_linestring_collection(ax, self.lines, self.values, vmin=3, vmax=5)\n fig.canvas.draw_idle()\n cmap = plt.get_cmap()\n expected_colors = cmap([0])\n _check_colors(self.N, coll.get_color(), expected_colors)\n ax.cla()\n\n def test_polygons(self):\n from geopandas.plotting import _plot_polygon_collection, plot_polygon_collection\n from matplotlib.collections import PatchCollection\n\n fig, ax = plt.subplots()\n coll = _plot_polygon_collection(ax, self.polygons)\n assert isinstance(coll, PatchCollection)\n ax.cla()\n\n # default: single default matplotlib color\n coll = _plot_polygon_collection(ax, self.polygons)\n _check_colors(self.N, coll.get_facecolor(), [MPL_DFT_COLOR] * self.N)\n _check_colors(self.N, coll.get_edgecolor(), [\"k\"] * self.N)\n ax.cla()\n\n # default: color sets both facecolor and edgecolor\n coll = _plot_polygon_collection(ax, self.polygons, color=\"g\")\n _check_colors(self.N, coll.get_facecolor(), [\"g\"] * self.N)\n _check_colors(self.N, coll.get_edgecolor(), [\"g\"] * self.N)\n ax.cla()\n\n # default: color can be passed as a list\n coll = _plot_polygon_collection(ax, self.polygons, color=[\"g\", \"b\", \"r\"])\n _check_colors(self.N, coll.get_facecolor(), [\"g\", \"b\", \"r\"])\n _check_colors(self.N, coll.get_edgecolor(), [\"g\", \"b\", \"r\"])\n ax.cla()\n\n coll = _plot_polygon_collection(\n ax,\n self.polygons,\n color=[(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)],\n )\n _check_colors(\n self.N,\n coll.get_facecolor(),\n [(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)],\n )\n _check_colors(\n self.N,\n coll.get_edgecolor(),\n [(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)],\n )\n ax.cla()\n\n # only setting facecolor keeps default for edgecolor\n coll = _plot_polygon_collection(ax, self.polygons, facecolor=\"g\")\n _check_colors(self.N, coll.get_facecolor(), [\"g\"] * self.N)\n _check_colors(self.N, coll.get_edgecolor(), [\"k\"] * self.N)\n ax.cla()\n\n # custom facecolor and edgecolor\n coll = _plot_polygon_collection(ax, self.polygons, facecolor=\"g\", edgecolor=\"r\")\n _check_colors(self.N, coll.get_facecolor(), [\"g\"] * self.N)\n _check_colors(self.N, coll.get_edgecolor(), [\"r\"] * self.N)\n ax.cla()\n\n # not a color\n with pytest.raises((TypeError, ValueError)):\n _plot_polygon_collection(ax, self.polygons, color=\"not color\")\n # check DeprecationWarning\n with pytest.warns(DeprecationWarning):\n plot_polygon_collection(ax, self.polygons)\n\n def test_polygons_values(self):\n from geopandas.plotting import _plot_polygon_collection\n\n fig, ax = plt.subplots()\n\n # default colormap, edge is still black by default\n coll = _plot_polygon_collection(ax, self.polygons, self.values)\n fig.canvas.draw_idle()\n cmap = plt.get_cmap()\n exp_colors = cmap(np.arange(self.N) / (self.N - 1))\n _check_colors(self.N, coll.get_facecolor(), exp_colors)\n # edgecolor depends on matplotlib version\n # _check_colors(self.N, coll.get_edgecolor(), ['k'] * self.N)\n ax.cla()\n\n # specify colormap\n coll = _plot_polygon_collection(ax, self.polygons, self.values, cmap=\"RdBu\")\n fig.canvas.draw_idle()\n cmap = plt.get_cmap(\"RdBu\")\n exp_colors = cmap(np.arange(self.N) / (self.N - 1))\n _check_colors(self.N, coll.get_facecolor(), exp_colors)\n ax.cla()\n\n # specify vmin/vmax\n coll = _plot_polygon_collection(ax, self.polygons, self.values, vmin=3, vmax=5)\n fig.canvas.draw_idle()\n cmap = plt.get_cmap()\n exp_colors = cmap([0])\n _check_colors(self.N, coll.get_facecolor(), exp_colors)\n ax.cla()\n\n # override edgecolor\n coll = _plot_polygon_collection(ax, self.polygons, self.values, edgecolor=\"g\")\n fig.canvas.draw_idle()\n cmap = plt.get_cmap()\n exp_colors = cmap(np.arange(self.N) / (self.N - 1))\n _check_colors(self.N, coll.get_facecolor(), exp_colors)\n _check_colors(self.N, coll.get_edgecolor(), [\"g\"] * self.N)\n ax.cla()\n\n\ndef test_column_values():\n \"\"\"\n Check that the dataframe plot method returns same values with an\n input string (column in df), pd.Series, or np.array\n \"\"\"\n # Build test data\n t1 = Polygon([(0, 0), (1, 0), (1, 1)])\n t2 = Polygon([(1, 0), (2, 0), (2, 1)])\n polys = GeoSeries([t1, t2], index=list(\"AB\"))\n df = GeoDataFrame({\"geometry\": polys, \"values\": [0, 1]})\n\n # Test with continous values\n ax = df.plot(column=\"values\")\n colors = ax.collections[0].get_facecolors()\n ax = df.plot(column=df[\"values\"])\n colors_series = ax.collections[0].get_facecolors()\n np.testing.assert_array_equal(colors, colors_series)\n ax = df.plot(column=df[\"values\"].values)\n colors_array = ax.collections[0].get_facecolors()\n np.testing.assert_array_equal(colors, colors_array)\n\n # Test with categorical values\n ax = df.plot(column=\"values\", categorical=True)\n colors = ax.collections[0].get_facecolors()\n ax = df.plot(column=df[\"values\"], categorical=True)\n colors_series = ax.collections[0].get_facecolors()\n np.testing.assert_array_equal(colors, colors_series)\n ax = df.plot(column=df[\"values\"].values, categorical=True)\n colors_array = ax.collections[0].get_facecolors()\n np.testing.assert_array_equal(colors, colors_array)\n\n # Check raised error: is df rows number equal to column legth?\n with pytest.raises(ValueError, match=\"different number of rows\"):\n ax = df.plot(column=np.array([1, 2, 3]))\n\n\ndef _check_colors(N, actual_colors, expected_colors, alpha=None):\n \"\"\"\n Asserts that the members of `collection` match the `expected_colors`\n (in order)\n\n Parameters\n ----------\n N : int\n The number of geometries believed to be in collection.\n matplotlib.collection is implemented such that the number of geoms in\n `collection` doesn't have to match the number of colors assignments in\n the collection: the colors will cycle to meet the needs of the geoms.\n `N` helps us resolve this.\n collection : matplotlib.collections.Collection\n The colors of this collection's patches are read from\n `collection.get_facecolors()`\n expected_colors : sequence of RGBA tuples\n alpha : float (optional)\n If set, this alpha transparency will be applied to the\n `expected_colors`. (Any transparency on the `collection` is assumed\n to be set in its own facecolor RGBA tuples.)\n \"\"\"\n import matplotlib.colors as colors\n\n conv = colors.colorConverter\n\n # Convert 2D numpy array to a list of RGBA tuples.\n actual_colors = map(tuple, actual_colors)\n all_actual_colors = list(itertools.islice(itertools.cycle(actual_colors), N))\n\n for actual, expected in zip(all_actual_colors, expected_colors):\n assert actual == conv.to_rgba(expected, alpha=alpha), \"{} != {}\".format(\n actual, conv.to_rgba(expected, alpha=alpha)\n )\n\n\ndef _style_to_linestring_onoffseq(linestyle, linewidth):\n \"\"\"Converts a linestyle string representation, namely one of:\n ['dashed', 'dotted', 'dashdot', 'solid'],\n documented in `Collections.set_linestyle`,\n to the form `onoffseq`.\n \"\"\"\n offset, dashes = matplotlib.lines._get_dash_pattern(linestyle)\n return matplotlib.lines._scale_dashes(offset, dashes, linewidth)\n\n\ndef _style_to_vertices(markerstyle):\n \"\"\" Converts a markerstyle string to a path. \"\"\"\n # TODO: Vertices values are twice the actual path; unclear, why.\n path = matplotlib.markers.MarkerStyle(markerstyle).get_path()\n return path.vertices / 2\n" ]
[ [ "numpy.array", "numpy.not_equal", "matplotlib.pyplot.get_cmap", "numpy.testing.assert_array_equal", "matplotlib.pyplot.close", "matplotlib.pyplot.subplots", "numpy.mean", "pandas.Categorical", "numpy.arange", "numpy.cos", "pandas.Series", "matplotlib.pyplot.axes" ] ]
jspenmar/DeFeat-Net
[ "c3dc5e143c7f484b86cf1703a290c0f07256e3e7" ]
[ "main.py" ]
[ "from pathlib import Path\nimport sys\n\nfrom PIL import Image\nimport matplotlib.pyplot as plt\nimport torch\nimport numpy as np\n\nroot_path = Path(__file__).resolve().parent\nif str(root_path) not in sys.path:\n sys.path.insert(0, str(root_path))\n\nfrom networks.defeat_net import DeFeatNet\nfrom utils import ops\n\n\nif __name__ == '__main__':\n device = ops.get_device()\n ckpt_file = root_path / 'ckpts' / 'ckpt_seasons.pt'\n\n model = DeFeatNet.from_ckpt(ckpt_file, key=lambda x: x['model']).to(device)\n model = model.eval()\n\n imfiles = ['image1.png', 'image2.png']\n\n def load_image(file):\n image = Image.open(root_path / 'images' / file).convert('RGB')\n image = image.resize([480, 352])\n image = ops.img2torch(np.array(image), batched=True).to(device)\n return image\n\n images = torch.cat([load_image(file) for file in imfiles])\n\n with torch.no_grad():\n disp = model.depth_net(images)[('disp', 0)]\n dense_features = model.feat_net(images)\n\n disp_np = disp.squeeze(1).cpu().numpy()\n\n _, (axs, axs2) = plt.subplots(2, len(imfiles))\n plt.tight_layout()\n\n for ax, img in zip(axs, ops.torch2np(images)):\n ax.set_xticks([]), ax.set_yticks([])\n ax.imshow(img)\n\n for ax, d in zip(axs2, disp_np):\n ax.set_xticks([]), ax.set_yticks([])\n ax.imshow(d, cmap='magma', vmax=np.percentile(d, 95))\n\n plt.show()\n" ]
[ [ "numpy.array", "numpy.percentile", "torch.no_grad", "matplotlib.pyplot.tight_layout", "matplotlib.pyplot.show" ] ]
csarron/lxmert
[ "b58933c61d2715391831a0a4dc4f5dbba7e433ba" ]
[ "src/lxrt/modeling.py" ]
[ "# coding=utf-8\n# Copyright 2019 project LXRT.\n# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.\n# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"PyTorch LXRT model.\"\"\"\n\nimport copy\nimport json\nimport logging\nimport math\nimport os\nimport shutil\nimport tarfile\nimport tempfile\nimport sys\nfrom io import open\n\nimport torch\nfrom torch import nn\nfrom torch.nn import CrossEntropyLoss, SmoothL1Loss\n\nfrom param import timed\nfrom .file_utils import cached_path\n\nlogger = logging.getLogger(__name__)\n\nPRETRAINED_MODEL_ARCHIVE_MAP = {\n 'bert-base-uncased': \"https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased.tar.gz\",\n 'bert-large-uncased': \"https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased.tar.gz\",\n 'bert-base-cased': \"https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased.tar.gz\",\n 'bert-large-cased': \"https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased.tar.gz\",\n 'bert-base-multilingual-uncased': \"https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-uncased.tar.gz\",\n 'bert-base-multilingual-cased': \"https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-cased.tar.gz\",\n 'bert-base-chinese': \"https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese.tar.gz\",\n}\nCONFIG_NAME = 'bert_config.json'\nWEIGHTS_NAME = 'pytorch_model.bin'\nTF_WEIGHTS_NAME = 'model.ckpt'\n\ndef load_tf_weights_in_bert(model, tf_checkpoint_path):\n \"\"\" Load tf checkpoints in a pytorch model\n \"\"\"\n try:\n import re\n import numpy as np\n import tensorflow as tf\n except Importtokenization:\n print(\"Loading a TensorFlow models in PyTorch, requires TensorFlow to be installed. Please see \"\n \"https://www.tensorflow.org/install/ for installation instructions.\")\n raise\n tf_path = os.path.abspath(tf_checkpoint_path)\n print(\"Converting TensorFlow checkpoint from {}\".format(tf_path))\n # Load weights from TF model\n init_vars = tf.train.list_variables(tf_path)\n names = []\n arrays = []\n for name, shape in init_vars:\n print(\"Loading TF weight {} with shape {}\".format(name, shape))\n array = tf.train.load_variable(tf_path, name)\n names.append(name)\n arrays.append(array)\n\n for name, array in zip(names, arrays):\n name = name.split('/')\n # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v\n # which are not required for using pretrained model\n if any(n in [\"adam_v\", \"adam_m\"] for n in name):\n print(\"Skipping {}\".format(\"/\".join(name)))\n continue\n pointer = model\n for m_name in name:\n if re.fullmatch(r'[A-Za-z]+_\\d+', m_name):\n l = re.split(r'_(\\d+)', m_name)\n else:\n l = [m_name]\n if l[0] == 'kernel' or l[0] == 'gamma':\n pointer = getattr(pointer, 'weight')\n elif l[0] == 'output_bias' or l[0] == 'beta':\n pointer = getattr(pointer, 'bias')\n elif l[0] == 'output_weights':\n pointer = getattr(pointer, 'weight')\n else:\n pointer = getattr(pointer, l[0])\n if len(l) >= 2:\n num = int(l[1])\n pointer = pointer[num]\n if m_name[-11:] == '_embeddings':\n pointer = getattr(pointer, 'weight')\n elif m_name == 'kernel':\n array = np.transpose(array)\n try:\n assert pointer.shape == array.shape\n except AssertionError as e:\n e.args += (pointer.shape, array.shape)\n raise\n print(\"Initialize PyTorch weight {}\".format(name))\n pointer.data = torch.from_numpy(array)\n return model\n\n\ndef gelu(x):\n \"\"\"Implementation of the gelu activation function.\n For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):\n 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))\n Also see https://arxiv.org/abs/1606.08415\n \"\"\"\n return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))\n\n\nclass GeLU(nn.Module):\n \"\"\"Implementation of the gelu activation function.\n For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):\n 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))\n Also see https://arxiv.org/abs/1606.08415\n \"\"\"\n def __init__(self):\n super().__init__()\n\n def forward(self, x):\n return gelu(x)\n\n\ndef swish(x):\n return x * torch.sigmoid(x)\n\n\nACT2FN = {\"gelu\": gelu, \"relu\": torch.nn.functional.relu, \"swish\": swish}\n\n\nclass VisualConfig(object):\n VISUAL_LOSSES = ['obj', 'attr', 'feat']\n def __init__(self,\n l_layers=12,\n x_layers=5,\n r_layers=0):\n self.l_layers = l_layers\n self.x_layers = x_layers\n self.r_layers = r_layers\n # 128 for s, 192 for m,\n self.visual_base_dim = int(os.environ.get('VBD', 128))\n self.visual_feat_dim = 2048\n self.visual_scale_dims = [self.visual_base_dim*i for i in [1, 2, 4]]\n self.visual_pos_dim = 4\n\n self.obj_id_num = 1600\n self.attr_id_num = 400\n\n self.visual_losses = self.VISUAL_LOSSES\n self.visual_loss_config = {\n 'obj': (self.obj_id_num, 'ce', (-1,), 1/0.15),\n 'attr': (self.attr_id_num, 'ce', (-1,), 1/0.15),\n 'feat': (2048, 'l2', (-1, 2048), 1/0.15),\n }\n\n def set_visual_dims(self, feat_dim, pos_dim):\n self.visual_feat_dim = feat_dim\n self.visual_pos_dim = pos_dim\n\n\nVISUAL_CONFIG = VisualConfig()\n\n\nclass BertConfig(object):\n \"\"\"Configuration class to store the configuration of a `BertModel`.\n \"\"\"\n def __init__(self,\n vocab_size_or_config_json_file,\n hidden_size=768,\n num_hidden_layers=12,\n num_attention_heads=12,\n intermediate_size=3072,\n hidden_act=\"gelu\",\n hidden_dropout_prob=0.1,\n attention_probs_dropout_prob=0.1,\n max_position_embeddings=512,\n type_vocab_size=2,\n initializer_range=0.02):\n \"\"\"Constructs BertConfig.\n\n Args:\n vocab_size_or_config_json_file: Vocabulary size of `inputs_ids` in `BertModel`.\n hidden_size: Size of the encoder layers and the pooler layer.\n num_hidden_layers: Number of hidden layers in the Transformer encoder.\n num_attention_heads: Number of attention heads for each attention layer in\n the Transformer encoder.\n intermediate_size: The size of the \"intermediate\" (i.e., feed-forward)\n layer in the Transformer encoder.\n hidden_act: The non-linear activation function (function or string) in the\n encoder and pooler. If string, \"gelu\", \"relu\" and \"swish\" are supported.\n hidden_dropout_prob: The dropout probabilitiy for all fully connected\n layers in the embeddings, encoder, and pooler.\n attention_probs_dropout_prob: The dropout ratio for the attention\n probabilities.\n max_position_embeddings: The maximum sequence length that this model might\n ever be used with. Typically set this to something large just in case\n (e.g., 512 or 1024 or 2048).\n type_vocab_size: The vocabulary size of the `token_type_ids` passed into\n `BertModel`.\n initializer_range: The sttdev of the truncated_normal_initializer for\n initializing all weight matrices.\n \"\"\"\n if isinstance(vocab_size_or_config_json_file, str) or (sys.version_info[0] == 2\n and isinstance(vocab_size_or_config_json_file, unicode)):\n with open(vocab_size_or_config_json_file, \"r\", encoding='utf-8') as reader:\n json_config = json.loads(reader.read())\n for key, value in json_config.items():\n self.__dict__[key] = value\n elif isinstance(vocab_size_or_config_json_file, int):\n self.vocab_size = vocab_size_or_config_json_file\n self.hidden_size = hidden_size\n self.num_hidden_layers = num_hidden_layers\n self.num_attention_heads = num_attention_heads\n self.hidden_act = hidden_act\n self.intermediate_size = intermediate_size\n self.hidden_dropout_prob = hidden_dropout_prob\n self.attention_probs_dropout_prob = attention_probs_dropout_prob\n self.max_position_embeddings = max_position_embeddings\n self.type_vocab_size = type_vocab_size\n self.initializer_range = initializer_range\n else:\n raise ValueError(\"First argument must be either a vocabulary size (int)\"\n \"or the path to a pretrained model config file (str)\")\n\n @classmethod\n def from_dict(cls, json_object):\n \"\"\"Constructs a `BertConfig` from a Python dictionary of parameters.\"\"\"\n config = BertConfig(vocab_size_or_config_json_file=-1)\n for key, value in json_object.items():\n config.__dict__[key] = value\n return config\n\n @classmethod\n def from_json_file(cls, json_file):\n \"\"\"Constructs a `BertConfig` from a json file of parameters.\"\"\"\n with open(json_file, \"r\", encoding='utf-8') as reader:\n text = reader.read()\n return cls.from_dict(json.loads(text))\n\n def __repr__(self):\n return str(self.to_json_string())\n\n def to_dict(self):\n \"\"\"Serializes this instance to a Python dictionary.\"\"\"\n output = copy.deepcopy(self.__dict__)\n return output\n\n def to_json_string(self):\n \"\"\"Serializes this instance to a JSON string.\"\"\"\n return json.dumps(self.to_dict(), indent=2, sort_keys=True) + \"\\n\"\n\n\nBertLayerNorm = torch.nn.LayerNorm\n\n\nclass BertEmbeddings(nn.Module):\n \"\"\"Construct the embeddings from word, position and token_type embeddings.\n \"\"\"\n def __init__(self, config):\n super(BertEmbeddings, self).__init__()\n self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=0)\n self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size, padding_idx=0)\n self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size, padding_idx=0)\n\n # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load\n # any TensorFlow checkpoint file\n self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12)\n self.dropout = nn.Dropout(config.hidden_dropout_prob)\n\n def forward(self, input_ids, token_type_ids=None):\n seq_length = input_ids.size(1)\n position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device)\n position_ids = position_ids.unsqueeze(0).expand_as(input_ids)\n if token_type_ids is None:\n token_type_ids = torch.zeros_like(input_ids)\n\n words_embeddings = self.word_embeddings(input_ids)\n position_embeddings = self.position_embeddings(position_ids)\n token_type_embeddings = self.token_type_embeddings(token_type_ids)\n\n embeddings = words_embeddings + position_embeddings + token_type_embeddings\n embeddings = self.LayerNorm(embeddings)\n embeddings = self.dropout(embeddings)\n return embeddings\n\n\nclass BertAttention(nn.Module):\n def __init__(self, config, ctx_dim=None):\n super().__init__()\n if config.hidden_size % config.num_attention_heads != 0:\n raise ValueError(\n \"The hidden size (%d) is not a multiple of the number of attention \"\n \"heads (%d)\" % (config.hidden_size, config.num_attention_heads))\n self.num_attention_heads = config.num_attention_heads\n self.attention_head_size = int(config.hidden_size / config.num_attention_heads)\n self.all_head_size = self.num_attention_heads * self.attention_head_size\n\n # visual_dim = 2048\n if ctx_dim is None:\n ctx_dim =config.hidden_size\n self.query = nn.Linear(config.hidden_size, self.all_head_size)\n self.key = nn.Linear(ctx_dim, self.all_head_size)\n self.value = nn.Linear(ctx_dim, self.all_head_size)\n\n self.dropout = nn.Dropout(config.attention_probs_dropout_prob)\n\n def transpose_for_scores(self, x):\n new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)\n x = x.view(*new_x_shape)\n return x.permute(0, 2, 1, 3)\n\n def forward(self, hidden_states, context, attention_mask=None):\n mixed_query_layer = self.query(hidden_states)\n mixed_key_layer = self.key(context)\n mixed_value_layer = self.value(context)\n\n query_layer = self.transpose_for_scores(mixed_query_layer)\n key_layer = self.transpose_for_scores(mixed_key_layer)\n value_layer = self.transpose_for_scores(mixed_value_layer)\n\n # Take the dot product between \"query\" and \"key\" to get the raw attention scores.\n attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))\n attention_scores = attention_scores / math.sqrt(self.attention_head_size)\n # Apply the attention mask is (precomputed for all layers in BertModel forward() function)\n if attention_mask is not None:\n attention_scores = attention_scores + attention_mask\n\n # Normalize the attention scores to probabilities.\n attention_probs = nn.Softmax(dim=-1)(attention_scores)\n\n # This is actually dropping out entire tokens to attend to, which might\n # seem a bit unusual, but is taken from the original Transformer paper.\n attention_probs = self.dropout(attention_probs)\n\n context_layer = torch.matmul(attention_probs, value_layer)\n context_layer = context_layer.permute(0, 2, 1, 3).contiguous()\n new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)\n context_layer = context_layer.view(*new_context_layer_shape)\n return context_layer\n\n\nclass BertAttOutput(nn.Module):\n def __init__(self, config):\n super(BertAttOutput, self).__init__()\n self.dense = nn.Linear(config.hidden_size, config.hidden_size)\n self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12)\n self.dropout = nn.Dropout(config.hidden_dropout_prob)\n\n def forward(self, hidden_states, input_tensor):\n hidden_states = self.dense(hidden_states)\n hidden_states = self.dropout(hidden_states)\n hidden_states = self.LayerNorm(hidden_states + input_tensor)\n return hidden_states\n\n\nclass BertCrossattLayer(nn.Module):\n def __init__(self, config):\n super().__init__()\n self.att = BertAttention(config)\n self.output = BertAttOutput(config)\n\n def forward(self, input_tensor, ctx_tensor, ctx_att_mask=None):\n output = self.att(input_tensor, ctx_tensor, ctx_att_mask)\n attention_output = self.output(output, input_tensor)\n return attention_output\n\n\nclass BertSelfattLayer(nn.Module):\n def __init__(self, config):\n super(BertSelfattLayer, self).__init__()\n self.self = BertAttention(config)\n self.output = BertAttOutput(config)\n\n def forward(self, input_tensor, attention_mask):\n # Self attention attends to itself, thus keys and querys are the same (input_tensor).\n self_output = self.self(input_tensor, input_tensor, attention_mask)\n attention_output = self.output(self_output, input_tensor)\n return attention_output\n\n\nclass BertIntermediate(nn.Module):\n def __init__(self, config):\n super(BertIntermediate, self).__init__()\n self.dense = nn.Linear(config.hidden_size, config.intermediate_size)\n if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)):\n self.intermediate_act_fn = ACT2FN[config.hidden_act]\n else:\n self.intermediate_act_fn = config.hidden_act\n\n def forward(self, hidden_states):\n hidden_states = self.dense(hidden_states)\n hidden_states = self.intermediate_act_fn(hidden_states)\n return hidden_states\n\n\nclass BertOutput(nn.Module):\n def __init__(self, config):\n super(BertOutput, self).__init__()\n self.dense = nn.Linear(config.intermediate_size, config.hidden_size)\n self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12)\n self.dropout = nn.Dropout(config.hidden_dropout_prob)\n\n def forward(self, hidden_states, input_tensor):\n hidden_states = self.dense(hidden_states)\n hidden_states = self.dropout(hidden_states)\n hidden_states = self.LayerNorm(hidden_states + input_tensor)\n return hidden_states\n\n\nclass BertLayer(nn.Module):\n def __init__(self, config):\n super(BertLayer, self).__init__()\n self.attention = BertSelfattLayer(config)\n self.intermediate = BertIntermediate(config)\n self.output = BertOutput(config)\n\n def forward(self, hidden_states, attention_mask):\n attention_output = self.attention(hidden_states, attention_mask)\n intermediate_output = self.intermediate(attention_output)\n layer_output = self.output(intermediate_output, attention_output)\n return layer_output\n\n\n\"\"\"\n---------------------------------------------------------------------------------------\n Above modules are copied from BERT (pytorch-transformer) with modifications.\n---------------------------------------------------------------------------------------\n\"\"\"\n\n\nclass LXRTXLayer(nn.Module):\n def __init__(self, config):\n super().__init__()\n # The cross-attention Layer\n self.visual_attention = BertCrossattLayer(config)\n\n # Self-attention Layers\n self.lang_self_att = BertSelfattLayer(config)\n self.visn_self_att = BertSelfattLayer(config)\n\n # Intermediate and Output Layers (FFNs)\n self.lang_inter = BertIntermediate(config)\n self.lang_output = BertOutput(config)\n self.visn_inter = BertIntermediate(config)\n self.visn_output = BertOutput(config)\n\n def cross_att(self, lang_input, lang_attention_mask, visn_input, visn_attention_mask):\n # Cross Attention\n lang_att_output = self.visual_attention(lang_input, visn_input, ctx_att_mask=visn_attention_mask)\n visn_att_output = self.visual_attention(visn_input, lang_input, ctx_att_mask=lang_attention_mask)\n return lang_att_output, visn_att_output\n\n def self_att(self, lang_input, lang_attention_mask, visn_input, visn_attention_mask):\n # Self Attention\n lang_att_output = self.lang_self_att(lang_input, lang_attention_mask)\n visn_att_output = self.visn_self_att(visn_input, visn_attention_mask)\n return lang_att_output, visn_att_output\n\n def output_fc(self, lang_input, visn_input):\n # FC layers\n lang_inter_output = self.lang_inter(lang_input)\n visn_inter_output = self.visn_inter(visn_input)\n\n # Layer output\n lang_output = self.lang_output(lang_inter_output, lang_input)\n visn_output = self.visn_output(visn_inter_output, visn_input)\n return lang_output, visn_output\n\n def forward(self, lang_feats, lang_attention_mask,\n visn_feats, visn_attention_mask):\n lang_att_output = lang_feats\n visn_att_output = visn_feats\n\n lang_att_output, visn_att_output = self.cross_att(lang_att_output, lang_attention_mask,\n visn_att_output, visn_attention_mask)\n lang_att_output, visn_att_output = self.self_att(lang_att_output, lang_attention_mask,\n visn_att_output, visn_attention_mask)\n lang_output, visn_output = self.output_fc(lang_att_output, visn_att_output)\n\n return lang_output, visn_output\n\n\nclass VisualFeatEncoder(nn.Module):\n def __init__(self, config):\n super().__init__()\n feat_dim = VISUAL_CONFIG.visual_feat_dim\n pos_dim = VISUAL_CONFIG.visual_pos_dim\n scale_dims = VISUAL_CONFIG.visual_scale_dims\n self.feat_fc = nn.ModuleList(\n [nn.Linear(dim, config.hidden_size) for dim in scale_dims]\n )\n self.feat_norm = nn.ModuleList(\n [BertLayerNorm(config.hidden_size, eps=1e-12) for _ in scale_dims]\n )\n self.box_fc = nn.ModuleList(\n [nn.Linear(pos_dim, config.hidden_size) for _ in scale_dims]\n )\n self.box_norm = nn.ModuleList(\n [BertLayerNorm(config.hidden_size, eps=1e-12) for _ in scale_dims]\n )\n # Object feature encoding\n # self.visn_fc = nn.Linear(feat_dim, config.hidden_size)\n # self.visn_layer_norm = BertLayerNorm(config.hidden_size, eps=1e-12)\n #\n # # Box position encoding\n # self.box_fc = nn.Linear(pos_dim, config.hidden_size)\n # self.box_layer_norm = BertLayerNorm(config.hidden_size, eps=1e-12)\n\n self.dropout = nn.Dropout(config.hidden_dropout_prob)\n\n def forward(self, visn_input):\n feats, boxes = visn_input\n # modified to support multi scale feats and boxes\n # e.g. feats = [8x128, 8x256, 8x512], boxes = [8x4, 8x4, 8x4]\n outputs = []\n for i, (feat, box) in enumerate(zip(feats, boxes)):\n if feat is None:\n continue\n x = self.feat_fc[i](feat)\n x = self.feat_norm[i](x)\n y = self.box_fc[i](box)\n y = self.box_norm[i](y)\n output = (x + y) / 2\n outputs.append(output)\n outputs = torch.cat(outputs, 1)\n outputs = self.dropout(outputs)\n return outputs\n # x = self.visn_fc(feats)\n # x = self.visn_layer_norm(x)\n # y = self.box_fc(boxes)\n # y = self.box_layer_norm(y)\n # output = (x + y) / 2\n #\n # output = self.dropout(output)\n # return output\n\n\nclass LXRTEncoder(nn.Module):\n def __init__(self, config):\n super().__init__()\n\n # Obj-level image embedding layer\n self.visn_fc = VisualFeatEncoder(config)\n\n # Number of layers\n self.num_l_layers = VISUAL_CONFIG.l_layers\n self.num_x_layers = VISUAL_CONFIG.x_layers\n self.num_r_layers = VISUAL_CONFIG.r_layers\n print(\"LXRT encoder with %d l_layers, %d x_layers, and %d r_layers.\" %\n (self.num_l_layers, self.num_x_layers, self.num_r_layers))\n\n # Layers\n # Using self.layer instead of self.l_layer to support loading BERT weights.\n self.layer = nn.ModuleList(\n [BertLayer(config) for _ in range(self.num_l_layers)]\n )\n self.x_layers = nn.ModuleList(\n [LXRTXLayer(config) for _ in range(self.num_x_layers)]\n )\n self.r_layers = nn.ModuleList(\n [BertLayer(config) for _ in range(self.num_r_layers)]\n )\n\n @timed\n def image_encoder(self, visn_feats, visn_attention_mask):\n visn_feats = self.visn_fc(visn_feats)\n\n # Run relational layers\n for layer_module in self.r_layers:\n visn_feats = layer_module(visn_feats, visn_attention_mask)\n return visn_feats\n\n @timed\n def ques_encoder(self, lang_feats, lang_attention_mask):\n # Run language layers\n for layer_module in self.layer:\n lang_feats = layer_module(lang_feats, lang_attention_mask)\n return lang_feats\n\n @timed\n def cross_encoder(self, lang_feats, lang_attention_mask,\n visn_feats, visn_attention_mask):\n # Run cross-modality layers\n for layer_module in self.x_layers:\n lang_feats, visn_feats = layer_module(\n lang_feats, lang_attention_mask, visn_feats,\n visn_attention_mask)\n return lang_feats, visn_feats\n\n def forward(self, lang_feats, lang_attention_mask,\n visn_feats, visn_attention_mask=None):\n # Run visual embedding layer\n # Note: Word embedding layer was executed outside this module.\n # Keep this design to allow loading BERT weights.\n visn_feats = self.image_encoder(visn_feats, visn_attention_mask)\n lang_feats = self.ques_encoder(lang_feats, lang_attention_mask)\n lang_feats, visn_feats = self.cross_encoder(\n lang_feats, lang_attention_mask, visn_feats, visn_attention_mask)\n return lang_feats, visn_feats\n\n\nclass BertPooler(nn.Module):\n def __init__(self, config):\n super(BertPooler, self).__init__()\n self.dense = nn.Linear(config.hidden_size, config.hidden_size)\n self.activation = nn.Tanh()\n\n def forward(self, hidden_states):\n # We \"pool\" the model by simply taking the hidden state corresponding\n # to the first token.\n first_token_tensor = hidden_states[:, 0]\n pooled_output = self.dense(first_token_tensor)\n pooled_output = self.activation(pooled_output)\n return pooled_output\n\n\nclass BertPredictionHeadTransform(nn.Module):\n def __init__(self, config):\n super(BertPredictionHeadTransform, self).__init__()\n self.dense = nn.Linear(config.hidden_size, config.hidden_size)\n if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)):\n self.transform_act_fn = ACT2FN[config.hidden_act]\n else:\n self.transform_act_fn = config.hidden_act\n self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12)\n\n def forward(self, hidden_states):\n hidden_states = self.dense(hidden_states)\n hidden_states = self.transform_act_fn(hidden_states)\n hidden_states = self.LayerNorm(hidden_states)\n return hidden_states\n\n\nclass BertLMPredictionHead(nn.Module):\n def __init__(self, config, bert_model_embedding_weights):\n super(BertLMPredictionHead, self).__init__()\n self.transform = BertPredictionHeadTransform(config)\n\n # The output weights are the same as the input embeddings, but there is\n # an output-only bias for each token.\n self.decoder = nn.Linear(bert_model_embedding_weights.size(1),\n bert_model_embedding_weights.size(0),\n bias=False)\n self.decoder.weight = bert_model_embedding_weights\n self.bias = nn.Parameter(torch.zeros(bert_model_embedding_weights.size(0)))\n\n def forward(self, hidden_states):\n hidden_states = self.transform(hidden_states)\n hidden_states = self.decoder(hidden_states) + self.bias\n return hidden_states\n\n\nclass BertVisualAnswerHead(nn.Module):\n def __init__(self, config, num_answers):\n super().__init__()\n hid_dim = config.hidden_size\n self.logit_fc = nn.Sequential(\n nn.Linear(hid_dim, hid_dim * 2),\n GeLU(),\n BertLayerNorm(hid_dim * 2, eps=1e-12),\n nn.Linear(hid_dim * 2, num_answers)\n )\n\n def forward(self, hidden_states):\n return self.logit_fc(hidden_states)\n\n\nclass BertVisualObjHead(nn.Module):\n def __init__(self, config, visual_losses):\n super().__init__()\n self.transform = BertPredictionHeadTransform(config)\n\n # Decide the use of visual losses\n visual_losses = visual_losses.split(\",\")\n for loss in visual_losses:\n assert loss in VISUAL_CONFIG.VISUAL_LOSSES\n self.visual_losses = visual_losses\n\n # The output weights are the same as the input embeddings, but there is\n # an output-only bias for each token.\n self.decoder_dict = nn.ModuleDict({\n key: nn.Linear(config.hidden_size, VISUAL_CONFIG.visual_loss_config[key][0])\n for key in self.visual_losses\n })\n\n def forward(self, hidden_states):\n hidden_states = self.transform(hidden_states)\n output = {}\n for key in self.visual_losses:\n output[key] = self.decoder_dict[key](hidden_states)\n return output\n\n\nclass BertPreTrainingHeads(nn.Module):\n def __init__(self, config, bert_model_embedding_weights):\n super(BertPreTrainingHeads, self).__init__()\n self.predictions = BertLMPredictionHead(config, bert_model_embedding_weights)\n self.seq_relationship = nn.Linear(config.hidden_size, 2)\n\n def forward(self, sequence_output, pooled_output):\n prediction_scores = self.predictions(sequence_output)\n seq_relationship_score = self.seq_relationship(pooled_output)\n return prediction_scores, seq_relationship_score\n\n\nclass BertPreTrainedModel(nn.Module):\n \"\"\" An abstract class to handle weights initialization and\n a simple interface for dowloading and loading pretrained models.\n \"\"\"\n def __init__(self, config, *inputs, **kwargs):\n super(BertPreTrainedModel, self).__init__()\n if not isinstance(config, BertConfig):\n raise ValueError(\n \"Parameter config in `{}(config)` should be an instance of class `BertConfig`. \"\n \"To create a model from a Google pretrained model use \"\n \"`model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`\".format(\n self.__class__.__name__, self.__class__.__name__\n ))\n self.config = config\n\n def init_bert_weights(self, module):\n \"\"\" Initialize the weights.\n \"\"\"\n if isinstance(module, (nn.Linear, nn.Embedding)):\n # Slightly different from the TF version which uses truncated_normal for initialization\n # cf https://github.com/pytorch/pytorch/pull/5617\n module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)\n elif isinstance(module, BertLayerNorm):\n module.bias.data.zero_()\n module.weight.data.fill_(1.0)\n if isinstance(module, nn.Linear) and module.bias is not None:\n module.bias.data.zero_()\n\n @classmethod\n def from_pretrained(cls, pretrained_model_name_or_path, state_dict=None, cache_dir=None,\n from_tf=False, *inputs, **kwargs):\n \"\"\"\n Instantiate a BertPreTrainedModel from a pre-trained model file or a pytorch state dict.\n Download and cache the pre-trained model file if needed.\n\n Params:\n pretrained_model_name_or_path: either:\n - a str with the name of a pre-trained model to load selected in the list of:\n . `bert-base-uncased`\n . `bert-large-uncased`\n . `bert-base-cased`\n . `bert-large-cased`\n . `bert-base-multilingual-uncased`\n . `bert-base-multilingual-cased`\n . `bert-base-chinese`\n - a path or url to a pretrained model archive containing:\n . `bert_config.json` a configuration file for the model\n . `pytorch_model.bin` a PyTorch dump of a BertForPreTraining instance\n - a path or url to a pretrained model archive containing:\n . `bert_config.json` a configuration file for the model\n . `model.chkpt` a TensorFlow checkpoint\n from_tf: should we load the weights from a locally saved TensorFlow checkpoint\n cache_dir: an optional path to a folder in which the pre-trained models will be cached.\n state_dict: an optional state dictionnary (collections.OrderedDict object) to use instead of Google pre-trained models\n *inputs, **kwargs: additional input for the specific Bert class\n (ex: num_labels for BertForSequenceClassification)\n \"\"\"\n if pretrained_model_name_or_path in PRETRAINED_MODEL_ARCHIVE_MAP:\n archive_file = PRETRAINED_MODEL_ARCHIVE_MAP[pretrained_model_name_or_path]\n else:\n archive_file = pretrained_model_name_or_path\n # redirect to the cache, if necessary\n try:\n resolved_archive_file = cached_path(archive_file, cache_dir=cache_dir)\n except EnvironmentError:\n if pretrained_model_name_or_path == 'bert-base-uncased':\n try:\n print(\"The BERT-weight-downloading query to AWS was time-out;\" \n \"trying to download from UNC servers\")\n archive_file = \"https://nlp.cs.unc.edu/data/bert/bert-base-uncased.tar.gz\"\n resolved_archive_file = cached_path(archive_file, cache_dir=cache_dir)\n except EnvironmentError:\n print(\"The weight-downloading still crashed with link: %s, \"\n \"please check your network connection\" % archive_file)\n return None\n else:\n logger.error(\n \"Model name '{}' was not found in model name list ({}). \"\n \"We assumed '{}' was a path or url but couldn't find any file \"\n \"associated to this path or url.\".format(\n pretrained_model_name_or_path,\n ', '.join(PRETRAINED_MODEL_ARCHIVE_MAP.keys()),\n archive_file))\n if resolved_archive_file == archive_file:\n logger.info(\"loading archive file {}\".format(archive_file))\n else:\n logger.info(\"loading archive file {} from cache at {}\".format(\n archive_file, resolved_archive_file))\n tempdir = None\n if os.path.isdir(resolved_archive_file) or from_tf:\n serialization_dir = resolved_archive_file\n else:\n # Extract archive to temp dir\n tempdir = tempfile.mkdtemp()\n logger.info(\"extracting archive file {} to temp dir {}\".format(\n resolved_archive_file, tempdir))\n with tarfile.open(resolved_archive_file, 'r:gz') as archive:\n archive.extractall(tempdir)\n serialization_dir = tempdir\n # Load config\n config_file = os.path.join(serialization_dir, CONFIG_NAME)\n config = BertConfig.from_json_file(config_file)\n logger.info(\"Model config {}\".format(config))\n # Instantiate model.\n model = cls(config, *inputs, **kwargs)\n if state_dict is None and not from_tf:\n weights_path = os.path.join(serialization_dir, WEIGHTS_NAME)\n state_dict = torch.load(weights_path, map_location='cpu' if not torch.cuda.is_available() else None)\n if tempdir:\n # Clean up temp dir\n shutil.rmtree(tempdir)\n if from_tf:\n # Directly load from a TensorFlow checkpoint\n weights_path = os.path.join(serialization_dir, TF_WEIGHTS_NAME)\n return load_tf_weights_in_bert(model, weights_path)\n # Load from a PyTorch state_dict\n old_keys = []\n new_keys = []\n for key in state_dict.keys():\n new_key = None\n if 'gamma' in key:\n new_key = key.replace('gamma', 'weight')\n if 'beta' in key:\n new_key = key.replace('beta', 'bias')\n if new_key:\n old_keys.append(key)\n new_keys.append(new_key)\n for old_key, new_key in zip(old_keys, new_keys):\n state_dict[new_key] = state_dict.pop(old_key)\n\n missing_keys = []\n unexpected_keys = []\n error_msgs = []\n # copy state_dict so _load_from_state_dict can modify it\n metadata = getattr(state_dict, '_metadata', None)\n state_dict = state_dict.copy()\n if metadata is not None:\n state_dict._metadata = metadata\n\n def load(module, prefix=''):\n local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})\n module._load_from_state_dict(\n state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs)\n for name, child in module._modules.items():\n if child is not None:\n load(child, prefix + name + '.')\n start_prefix = ''\n if not hasattr(model, 'bert') and any(s.startswith('bert.') for s in state_dict.keys()):\n start_prefix = 'bert.'\n load(model, prefix=start_prefix)\n # if len(missing_keys) > 0:\n # logger.info(\"Weights of {} not initialized from pretrained model: {}\".format(\n # model.__class__.__name__, missing_keys))\n # if len(unexpected_keys) > 0:\n # logger.info(\"Weights from pretrained model not used in {}: {}\".format(\n # model.__class__.__name__, unexpected_keys))\n if len(error_msgs) > 0:\n raise RuntimeError('Error(s) in loading state_dict for {}:\\n\\t{}'.format(\n model.__class__.__name__, \"\\n\\t\".join(error_msgs)))\n return model\n\n\nclass LXRTModel(BertPreTrainedModel):\n \"\"\"LXRT Model.\"\"\"\n\n def __init__(self, config):\n super().__init__(config)\n self.embeddings = BertEmbeddings(config)\n self.encoder = LXRTEncoder(config)\n self.pooler = BertPooler(config)\n self.apply(self.init_bert_weights)\n\n def forward(self, input_ids, token_type_ids=None, attention_mask=None,\n visual_feats=None, visual_attention_mask=None):\n if attention_mask is None:\n attention_mask = torch.ones_like(input_ids)\n if token_type_ids is None:\n token_type_ids = torch.zeros_like(input_ids)\n\n # We create a 3D attention mask from a 2D tensor mask.\n # Sizes are [batch_size, 1, 1, to_seq_length]\n # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]\n # this attention mask is more simple than the triangular masking of causal attention\n # used in OpenAI GPT, we just need to prepare the broadcast dimension here.\n extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)\n\n # Since attention_mask is 1.0 for positions we want to attend and 0.0 for\n # masked positions, this operation will create a tensor which is 0.0 for\n # positions we want to attend and -10000.0 for masked positions.\n # Since we are adding it to the raw scores before the softmax, this is\n # effectively the same as removing these entirely.\n extended_attention_mask = extended_attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility\n extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0\n\n # Process the visual attention mask\n if visual_attention_mask is not None:\n extended_visual_attention_mask = visual_attention_mask.unsqueeze(1).unsqueeze(2)\n extended_visual_attention_mask = extended_visual_attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility\n extended_visual_attention_mask = (1.0 - extended_visual_attention_mask) * -10000.0\n else:\n extended_visual_attention_mask = None\n\n # Positional Word Embeddings\n embedding_output = self.embeddings(input_ids, token_type_ids)\n\n # Run LXRT backbone\n lang_feats, visn_feats = self.encoder(\n embedding_output,\n extended_attention_mask,\n visn_feats=visual_feats,\n visn_attention_mask=extended_visual_attention_mask)\n pooled_output = self.pooler(lang_feats)\n\n return (lang_feats, visn_feats), pooled_output\n\n\nclass LXRTPretraining(BertPreTrainedModel):\n def __init__(self,\n config,\n task_mask_lm=True,\n task_matched=True,\n task_obj_predict=True,\n visual_losses='',\n task_qa=True,\n num_answers=2):\n super().__init__(config)\n # Configuration\n self.config = config\n self.num_answers = num_answers\n\n # Use of pre-training tasks\n self.task_mask_lm = task_mask_lm\n self.task_obj_predict = task_obj_predict\n self.task_matched = task_matched\n self.task_qa = task_qa\n\n # LXRT backbone\n self.bert = LXRTModel(config)\n\n # Pre-training heads\n self.cls = BertPreTrainingHeads(config, self.bert.embeddings.word_embeddings.weight)\n if self.task_obj_predict:\n self.obj_predict_head = BertVisualObjHead(config, visual_losses)\n if self.task_qa:\n self.answer_head = BertVisualAnswerHead(config, self.num_answers)\n\n # Weight initialization\n self.apply(self.init_bert_weights)\n\n def forward(self, input_ids, token_type_ids=None, attention_mask=None, masked_lm_labels=None,\n visual_feats=None, pos=None, obj_labels=None, matched_label=None, ans=None):\n (lang_output, visn_output), pooled_output = self.bert(\n input_ids, token_type_ids, attention_mask,\n visual_feats=(visual_feats, pos),\n )\n\n lang_prediction_scores, cross_relationship_score = self.cls(lang_output, pooled_output)\n if self.task_qa:\n answer_score = self.answer_head(pooled_output)\n else:\n # This answer_score would not be used anywhere,\n # just to keep a constant return function signature.\n answer_score = pooled_output[0][0]\n\n total_loss = 0.\n loss_fct = CrossEntropyLoss(ignore_index=-1)\n losses = ()\n if masked_lm_labels is not None and self.task_mask_lm:\n masked_lm_loss = loss_fct(\n lang_prediction_scores.view(-1, self.config.vocab_size),\n masked_lm_labels.view(-1)\n )\n total_loss += masked_lm_loss\n losses += (masked_lm_loss.detach(),)\n if matched_label is not None and self.task_matched:\n matched_loss = loss_fct(\n cross_relationship_score.view(-1, 2),\n matched_label.view(-1)\n )\n total_loss += matched_loss\n losses += (matched_loss.detach(),)\n if obj_labels is not None and self.task_obj_predict:\n loss_fcts = {\n 'l2': SmoothL1Loss(reduction='none'),\n 'ce': CrossEntropyLoss(ignore_index=-1, reduction='none')\n }\n total_visn_loss = 0.\n visn_prediction_scores_dict = self.obj_predict_head(visn_output)\n for key in VISUAL_CONFIG.visual_losses:\n label, mask_conf = obj_labels[key]\n output_dim, loss_fct_name, label_shape, weight = VISUAL_CONFIG.visual_loss_config[key]\n visn_loss_fct = loss_fcts[loss_fct_name]\n visn_prediction_scores = visn_prediction_scores_dict[key]\n visn_loss = visn_loss_fct(\n visn_prediction_scores.view(-1, output_dim),\n label.view(*label_shape),\n )\n if visn_loss.dim() > 1: # Regression Losses\n visn_loss = visn_loss.mean(1)\n visn_loss = (visn_loss * mask_conf.view(-1)).mean() * weight\n total_visn_loss += visn_loss\n losses += (visn_loss.detach(),)\n total_loss += total_visn_loss\n if ans is not None and self.task_qa:\n answer_loss = loss_fct(\n answer_score.view(-1, self.num_answers),\n ans.view(-1)\n )\n # Since this Github version pre-trains with QA loss from the beginning,\n # I exclude \"*2\" here to match the effect of QA losses.\n # Previous: (loss *0) for 6 epochs, (loss *2) for 6 epochs. (Used 10 instead of 6 in EMNLP paper)\n # Now : (loss *1) for 12 epochs\n #\n # * 2 # Multiply by 2 because > half of the data will not have label\n total_loss += answer_loss\n losses += (answer_loss.detach(),)\n return total_loss, torch.stack(losses).unsqueeze(0), answer_score.detach()\n\n\nclass LXRTFeatureExtraction(BertPreTrainedModel):\n \"\"\"\n BERT model for classification.\n \"\"\"\n def __init__(self, config, mode='lxr'):\n \"\"\"\n\n :param config:\n :param mode: Number of visual layers\n \"\"\"\n super().__init__(config)\n self.bert = LXRTModel(config)\n self.mode = mode\n self.apply(self.init_bert_weights)\n\n def forward(self, input_ids, token_type_ids=None, attention_mask=None, visual_feats=None,\n visual_attention_mask=None):\n feat_seq, pooled_output = self.bert(input_ids, token_type_ids, attention_mask,\n visual_feats=visual_feats,\n visual_attention_mask=visual_attention_mask)\n if 'x' == self.mode:\n return pooled_output\n elif 'x' in self.mode and ('l' in self.mode or 'r' in self.mode):\n return feat_seq, pooled_output\n elif 'l' in self.mode or 'r' in self.mode:\n return feat_seq\n\n" ]
[ [ "torch.nn.Linear", "torch.cat", "torch.stack", "torch.nn.SmoothL1Loss", "torch.cuda.is_available", "torch.nn.CrossEntropyLoss", "torch.sigmoid", "torch.nn.Softmax", "tensorflow.train.list_variables", "numpy.transpose", "torch.zeros_like", "tensorflow.train.load_variable", "torch.nn.Tanh", "torch.matmul", "torch.nn.Dropout", "torch.arange", "torch.from_numpy", "torch.ones_like", "torch.nn.Embedding" ] ]
coletamburri2112/flare_imp_study
[ "8f1480104f5874698cfc0881d43cb8872a1115b9" ]
[ "mid1_process.py" ]
[ "# !/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Mar 9 12:15:45 2022\n\n@author: owner\n\"\"\"\nimport fl_funcs\nfrom fl_funcs import exponential\nfrom fl_funcs import exponential_neg\nimport numpy as np\n\nyear = 2013\nmo = 5\nday = 16\nsthr = 21\nstmin = 36\narnum = 11748\nxclnum = 1.3\nxcl = 'M'\nflnum = 1242\ninstrument = 'n5'\ndaystr = '16'\nmostr = 'may'\nyearstr = '2013'\n\nbestflarefile = \"/Users/owner/Desktop/CU_Research/MAT_SOURCE/bestperf_more.mat\"\n\n\nprint(\"Loading the data...\")\n\nsav_data_aia, sav_data, best304, start304, peak304, end304, eventindices,\\\n times304, curves304, aia_cumul8, aia_step8, last_cumul8, hmi_dat,\\\n last_mask = fl_funcs.load_variables(bestflarefile, year, mo, day, sthr,\n stmin, arnum, xclnum, xcl)\n\nX, Y, conv_f, xarr_Mm, yarr_Mm = fl_funcs.conv_facts()\n\nprint(\"Data loaded! Now just some masking and spur removal.\")\n\nhmi_cumul_mask1, hmi_step_mask1, hmi_pos_mask_c, hmi_neg_mask_c \\\n = fl_funcs.pos_neg_masking(aia_cumul8, aia_step8, hmi_dat, last_mask)\n\nneg_rem, pos_rem = fl_funcs.spur_removal_sep(hmi_neg_mask_c, hmi_pos_mask_c,\n pos_crit=2, neg_crit=2,\n jhi=475, jhi2=490)\n\nprint(\"Convolving the HMI images and making the PIL mask.\")\n\nhmi_con_pos_c, hmi_con_neg_c, pil_mask_c = fl_funcs.gauss_conv(pos_rem,\n neg_rem,\n sigma=4)\n\npil_mask_c, ivs, dvs, hmik = fl_funcs.pil_gen(pil_mask_c, hmi_dat)\n\nprint(\"Separation values determination.\")\n\naia8_pos, aia8_neg = fl_funcs.mask_sep(aia_step8, hmi_dat)\n\npos_rem0, neg_rem0 = fl_funcs.spur_removal_sep2(aia8_pos, aia8_neg)\n\ndistpos_med, distpos_mean, distneg_med, distpos_mean \\\n = fl_funcs.separation(aia_step8, ivs, dvs, pos_rem0, neg_rem0)\n\nprint(\"Elongation values determination.\")\n\naia8_pos_2, aia8_neg_2 = fl_funcs.mask_elon(aia_cumul8, hmi_dat)\n\nneg_rem1, pos_rem1 = fl_funcs.spur_removal_elon(aia8_pos_2, aia8_neg_2)\n\nivs_lim, dvs_lim, med_x, med_y = fl_funcs.lim_pil(ivs, dvs)\n\nylim0_neg = 200\nylim1_neg = 600\nylim0_pos = 200\nylim1_pos = 600\nxlim0_neg = 200\nxlim1_neg = 600\nxlim0_pos = 200\nxlim1_pos = 600\n\naia_pos_rem, aia_neg_rem = fl_funcs.rib_lim_elon(aia8_pos_2, aia8_neg_2,\n pos_rem1, neg_rem1, med_x,\n med_y, ylim0_pos, ylim1_pos,\n ylim0_neg, ylim1_neg,\n xlim0_pos, xlim1_pos,\n xlim0_neg, xlim1_neg)\n\nrib_pos_1, rib_pos_2, rib_neg_1, rib_neg_2 = fl_funcs.split_rib(aia_pos_rem,\n aia_neg_rem,\n 380, 400)\n\nlr_coord_neg_1, lr_coord_pos_1 = fl_funcs.find_rib_coordinates(rib_pos_1,\n rib_neg_1)\n\nlr_coord_neg_2, lr_coord_pos_2 = fl_funcs.find_rib_coordinates(rib_pos_2,\n rib_neg_2)\n\nivs_sort, dvs_sort, sortedpil = fl_funcs.sort_pil(ivs_lim, dvs_lim)\n\npil_right_near_pos_1, pil_left_near_pos_1, pil_right_near_neg_1,\\\n pil_left_near_neg_1 = fl_funcs.elon_dist_arrays(lr_coord_pos_1,\n lr_coord_neg_1, ivs_lim,\n dvs_lim, ivs_sort,\n dvs_sort)\n\npil_right_near_pos_2, pil_left_near_pos_2, pil_right_near_neg_2,\\\n pil_left_near_neg_2 = fl_funcs.elon_dist_arrays(lr_coord_pos_2,\n lr_coord_neg_2,\n ivs_lim, dvs_lim, ivs_sort,\n dvs_sort)\n\nlens_pos_1, lens_neg_1 = fl_funcs.elongation(pil_right_near_pos_1,\n pil_left_near_pos_1,\n pil_right_near_neg_1,\n pil_left_near_neg_1,\n sortedpil)\n\nlens_pos_2, lens_neg_2 = fl_funcs.elongation(pil_right_near_pos_2,\n pil_left_near_pos_2,\n pil_right_near_neg_2,\n pil_left_near_neg_2,\n sortedpil)\n\nlens_pos = [sum(x) for x in zip(lens_pos_1, lens_pos_2)]\nlens_neg = [sum(x) for x in zip(lens_neg_1, lens_neg_2)]\n\ndist_pos = distpos_med\ndist_neg = distneg_med\n\nprint(\"Converting separation and elongation to Mm.\")\n\nlens_pos_Mm, lens_neg_Mm, distpos_Mm, distneg_Mm, dneg_len, dpos_len, \\\n dneg_dist, dpos_dist = fl_funcs.convert_to_Mm(lens_pos, dist_pos, lens_neg,\n dist_neg, conv_f)\n\nprint(\"Loading parameters for 304 and 1600 Angstrom light curves.\")\n\nstartin, peakin, endin, times, s304, e304, filter_304, med304, std304, \\\n timelab, aiadat, nt, dn1600, time304, times1600 \\\n = fl_funcs.prep_304_1600_parameters(sav_data_aia, sav_data, eventindices,\n flnum, start304, peak304, end304,\n times304, curves304, outflag=1242)\n\nposrib, negrib, pos1600, neg1600 = fl_funcs.img_mask(aia8_pos, aia8_neg,\n aiadat, nt)\n\nprint(\"Determining the regions of separation and elongation.\")\n\nelonperiod_start_pos, elonperiod_end_pos, elonperiod_start_neg, \\\n elonperiod_end_neg = fl_funcs.elon_periods(dpos_len, dneg_len)\n\nsepperiod_start_pos, sepperiod_end_pos, sepperiod_start_neg, \\\n sepperiod_end_neg = fl_funcs.sep_periods(dpos_dist, dneg_dist, start=1)\n\ndt1600, dt304 = fl_funcs.prep_times(dn1600, time304)\n\nprint(\"Plotting ribbon masks.\")\n\nfl_funcs.mask_plotting(X, Y, pos_rem, neg_rem, xarr_Mm, yarr_Mm, flnum)\n\nprint(\"Plotting convolution masks.\")\n\nfl_funcs.convolution_mask_plotting(X, Y, hmi_con_pos_c, hmi_con_neg_c,\n pil_mask_c, xarr_Mm, yarr_Mm, flnum,\n xlim=[200, 600], ylim=[200, 600])\n\nprint(\"Plotting PIL with representative polynomial.\")\n\nfl_funcs.pil_poly_plot(X, Y, pil_mask_c, hmi_dat, ivs, dvs, conv_f, xarr_Mm,\n yarr_Mm, flnum)\n\nprint(\"Plotting ribbon separation.\")\n\npltstrt = 1\n\nfl_funcs.ribbon_sep_plot(dist_pos, dist_neg, times, flnum, pltstrt, dt1600)\n\nprint(\"Plotting ribbon elongation.\")\n\npltstrt = 1\n\nfl_funcs.ribbon_elon_plot(lens_pos, lens_neg, times, pltstrt, flnum, dt1600)\n\nprint(\"Plotting Elongation with Periods\")\nindstrt = 1\nfl_funcs.elon_period_plot(dpos_len, dneg_len, times, times1600, lens_pos_Mm,\n lens_neg_Mm, flnum, elonperiod_start_neg,\n elonperiod_start_pos, elonperiod_end_neg,\n elonperiod_end_pos, indstart=indstrt)\n\nprint(\"Plotting Separation with Periods\")\n\nindstrt = 1\nfl_funcs.sep_period_plot(dpos_dist, dneg_dist, times, distpos_Mm, distneg_Mm,\n flnum, sepperiod_start_pos, sepperiod_end_pos,\n sepperiod_start_neg, sepperiod_end_neg,\n indstrt=indstrt)\n\nprint(\"Processing data for reconnection flux model.\")\n\nhmi, aia8_pos, aia8_neg, aia8_inst_pos, aia8_inst_neg, peak_pos, \\\n peak_neg = fl_funcs.flux_rec_mod_process(\n sav_data, dt1600, pos1600, neg1600)\n\nprint(\"Load fluxes and pixel counts.\")\n\nrec_flux_pos, rec_flux_neg, pos_pix, neg_pix, pos_area_pix, neg_area_pix, ds2,\\\n pos_area, neg_area = fl_funcs.cumul_flux_process(aia8_pos, aia8_neg,\n conv_f, flnum, peak_pos,\n peak_neg, hmi, dt1600)\n\nprint(\"The same, for instantaneous flux.\")\n\nrec_flux_pos_inst, rec_flux_neg_inst, pos_pix_inst, neg_pix_inst, \\\n ds2 = fl_funcs.inst_flux_process(aia8_inst_pos, aia8_inst_neg, flnum,\n conv_f, hmi, dt1600, peak_pos, peak_neg)\n\nprint(\"Reconnection Rate Determination, Plotting.\")\n\nrec_rate_pos, rec_rate_neg = fl_funcs.rec_rate(rec_flux_pos, rec_flux_neg,\n dn1600, dt1600, peak_pos,\n peak_neg, flnum)\n\nexp_ind = np.argmax(pos1600)\nexp_ind_area = np.argmax(pos1600)\n\nprint(\"Exponential curve fitting for the fluxes.\")\n\npoptposflx, pcovposflx, poptnegflx, pcovnegflx, \\\n poptpos, poptneg, pcovpos, pcovneg, rise_pos_flx, \\\n rise_neg_flx = fl_funcs.exp_curve_fit(exp_ind, exp_ind_area, pos_pix,\n neg_pix, exponential,\n exponential_neg, pos_area,\n neg_area)\n\nprint(\"Exponential curve plot.\")\n\nfl_funcs.exp_curve_plt(dt1600, rec_flux_pos, rec_flux_neg, rise_pos_flx,\n rise_neg_flx, peak_pos, peak_neg, exp_ind, ds2,\n exponential, exponential_neg, poptposflx, poptnegflx,\n flnum)\n\nprint(\"Ribbon Area Plot\")\n\nfl_funcs.rib_area_plt(dt1600, poptpos, poptneg, flnum, pos_area_pix,\n neg_area_pix, peak_pos, peak_neg, exp_ind)\n\nprint(\"Begin determination of shear.\")\n\n# Establish limits for ribbons corresponding to shear code.\nnegylow = ylim0_neg\nnegyhi = ylim1_neg\nnegxlow = xlim0_neg\nnegxhi = xlim1_neg\n\nposylow = ylim0_pos\nposyhi = ylim1_pos\nposxlow = xlim0_pos\nposxhi = xlim1_pos\n\n# Isolate ribbons appropriately for shear analysis\naia_neg_rem_shear, aia_pos_rem_shear = fl_funcs.\\\n shear_ribbon_isolation(aia8_neg, aia8_pos, med_x, med_y, negylow=negylow,\n negyhi=negyhi, posylow=posylow, posyhi=posyhi,\n negxlow=negxlow, negxhi=negxhi, posxlow=posxlow,\n posxhi=posxhi)\n\n# Left and right coordinates of positive and negative ribbons\nlr_coord_neg_shear, lr_coord_pos_shear = \\\n fl_funcs.leftrightshear(aia_pos_rem_shear, aia_neg_rem_shear)\n\n# PIL pixels closest to the left and right coordinates of positive and negative\n# ribbons\npil_right_near_pos_shear, pil_left_near_pos_shear, pil_right_near_neg_shear,\\\n pil_left_near_neg_shear = fl_funcs.sheardists(lr_coord_pos_shear,\n lr_coord_neg_shear,\n ivs_sort, dvs_sort)\n\n# Guide field to the right and left edges of ribbons\nguide_right, guide_left = fl_funcs.guidefieldlen(pil_right_near_pos_shear,\n pil_left_near_pos_shear,\n pil_right_near_neg_shear,\n pil_left_near_neg_shear,\n sortedpil)\n\n# Guide field ratio to the right and left edges of ribbons\nleft_gfr, right_gfr = fl_funcs.gfrcalc(guide_left, guide_right,\n distneg_med, distpos_med)\n\nprint(\"Plot guide field ratio proxy based on footpoints.\")\n\n# Plot guide field ratio\nfl_funcs.plt_gfr(times, right_gfr, left_gfr, flnum, dt1600)\n\nprint(\"Fermi Processing\")\n\nraw_hxr_sum, cspec_hxr_sum, fermitimes = fl_funcs.process_fermi(daystr, mostr, \n yearstr, \n instrument, \n day, mo, year,\n low=16500,\n high=18000,\n ylo=1e-3,\n yhi=100)\n\n# Figure for timestamp comparison\n\nindstrt_sep = 1\nindstrt_elon = 1\ngfr_trans = 7\n\nfl_funcs.plt_fourpanel(times, right_gfr, left_gfr, flnum, dt1600, time304,\n filter_304, lens_pos_Mm, lens_neg_Mm, distpos_Mm, distneg_Mm,\n dt304, timelab, conv_f,\n elonperiod_start_pos, elonperiod_end_pos,\n elonperiod_start_neg, elonperiod_end_neg,\n sepperiod_start_pos, sepperiod_end_pos,\n sepperiod_start_neg, sepperiod_end_neg, exp_ind,\n s304, e304, pos1600, neg1600, dn1600, indstrt_elon, \n indstrt_sep, fermitimes, raw_hxr_sum, cspec_hxr_sum,\n gfr_trans, low_hxr=16500, high_hxr=18000, period_flag = 0)\n" ]
[ [ "numpy.argmax" ] ]
Semen52/nlp4u
[ "06d30b9d37d2d8d1e1b96d825b91c0731b67ab04" ]
[ "models/src/sentirueval.py" ]
[ "#!/usr/bin/ python\n# -*- coding: utf-8 -*-\n# *************************************** #\n#\n# Author: Semen Budenkov\n# Date: 27/02/2017\n#\n# *************************************** #\n\n\nimport sys\nimport csv\nimport pandas as pd\n\n\ndef load_as_csv(file_name):\n with open(file_name) as opened_file:\n data = csv.reader(opened_file, delimiter='\\t')\n\n return data\n\n\ndef load_as_df(file_name):\n print(\"Open file: {}\".format(file_name))\n with open(file_name, encoding='utf-8') as opened_file:\n data = pd.read_csv(opened_file,\n sep='\\t',\n header=0)\n\n print(\"Number of sentences: {}\".format(len(data['text'])))\n labels = data['sentiment']\n text = data['text']\n\n return labels, text\n\n\ndef main(args=None):\n print(\"This module provides functions for interacting with SentiRuEval set.\")\n\n\nif __name__ == '__main__':\n main()\n" ]
[ [ "pandas.read_csv" ] ]
AnthonyQuantum/open_model_zoo
[ "7d235755e2d17f6186b11243a169966e4f05385a", "7d235755e2d17f6186b11243a169966e4f05385a" ]
[ "tools/accuracy_checker/accuracy_checker/annotation_converters/cvat_human_pose.py", "demos/python_demos/handwritten_japanese_recognition_demo/utils/codec.py" ]
[ "\"\"\"\nCopyright (c) 2019 Intel Corporation\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\"\"\"\n\nimport numpy as np\nfrom .format_converter import FileBasedAnnotationConverter, ConverterReturn\nfrom ..representation import PoseEstimationAnnotation\nfrom ..utils import read_xml, check_file_existence\nfrom ..config import PathField\n\nLABELS_TO_COCO = {\n 'nose': 0,\n 'r_shoulder': 6,\n 'r_elbow': 8,\n 'r_wrist': 9,\n 'l_shoulder': 5,\n 'l_elbow': 7,\n 'l_wrist': 10,\n 'r_hip': 12,\n 'r_knee': 14,\n 'r_ankle': 16,\n 'l_hip': 11,\n 'l_knee': 13,\n 'l_ankle': 15,\n 'r_eye': 2,\n 'l_eye': 1,\n 'r_ear': 3,\n 'l_ear': 4\n}\n\n\nclass CVATPoseEstimationConverter(FileBasedAnnotationConverter):\n __provider__ = 'cvat_pose_estimation'\n annotation_types = (PoseEstimationAnnotation, )\n\n @classmethod\n def parameters(cls):\n configuration_parameters = super().parameters()\n configuration_parameters.update({\n 'images_dir': PathField(\n is_directory=True, optional=True,\n description='path to dataset images, used only for content existence check'\n )\n })\n return configuration_parameters\n\n def configure(self):\n super().configure()\n self.images_dir = self.get_value_from_config('images_dir') or self.annotation_file.parent\n\n def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs):\n annotation = read_xml(self.annotation_file)\n meta = annotation.find('meta')\n size = int(meta.find('task').find('size').text)\n annotations = []\n content_errors = None if not check_content else []\n for image_id, image in enumerate(annotation.iter('image')):\n identifier = image.attrib['name'].split('/')[-1]\n if check_content:\n if not check_file_existence(self.images_dir / identifier):\n content_errors.append('{}: does not exist'.format(self.images_dir / identifier))\n label = [1]\n x_vals, y_vals = np.zeros((1, len(LABELS_TO_COCO))), np.zeros((1, len(LABELS_TO_COCO)))\n visilibity = np.zeros((1, len(LABELS_TO_COCO)))\n for point in image.iter('points'):\n point_label = point.attrib['label']\n if point_label not in LABELS_TO_COCO:\n continue\n point_id = LABELS_TO_COCO[point_label]\n point_x, point_y = point.attrib['points'].split(',')\n x_vals[0, point_id] = float(point_x)\n y_vals[0, point_id] = float(point_y)\n if int(point.attrib['occluded']):\n continue\n visilibity[0, point_id] = 2\n annotations.append(PoseEstimationAnnotation(identifier, x_vals, y_vals, visilibity, label))\n\n if progress_callback is not None and image_id % progress_interval == 0:\n progress_callback(image_id * 100 / size)\n meta = {'label_map': {1: 'person'}}\n\n return ConverterReturn(annotations, meta, content_errors)\n\n @staticmethod\n def get_pose(image_annotation, num_landmarks):\n landmarks_x, landmarks_y = np.zeros(num_landmarks), np.zeros(num_landmarks)\n for point in image_annotation:\n idx = int(point.attrib['label'])\n x, y = point.attrib['points'].split(',')\n landmarks_x[idx] = float(x)\n landmarks_y[idx] = float(y)\n\n return landmarks_x, landmarks_y\n", "import numpy as np\n\n\nclass CTCCodec(object):\n \"\"\" Convert between text-label and text-index \"\"\"\n def __init__(self, characters):\n # characters (str): set of the possible characters.\n dict_character = list(characters)\n\n self.dict = {}\n for i, char in enumerate(dict_character):\n # NOTE: 0 is reserved for 'blank' token required by CTCLoss\n self.dict[char] = i + 1\n\n # dummy '[blank]' token for CTCLoss (index 0)\n self.characters = ['[blank]'] + dict_character\n\n def decode(self, preds):\n \"\"\" convert text-index into text-label. \"\"\"\n texts = []\n index = 0\n # Select max probabilty (greedy decoding) then decode index to character\n preds_index = np.argmax(preds, 2)\n preds_index = preds_index.transpose(1, 0)\n preds_index_reshape = preds_index.reshape(-1)\n preds_sizes = np.array([preds_index.shape[1]] * preds_index.shape[0])\n\n for l in preds_sizes:\n t = preds_index_reshape[index:index + l]\n\n # NOTE: t might be zero size\n if t.shape[0] == 0:\n continue\n\n char_list = []\n for i in range(l):\n # removing repeated characters and blank.\n if t[i] != 0 and (not (i > 0 and t[i - 1] == t[i])):\n char_list.append(self.characters[t[i]])\n text = ''.join(char_list)\n texts.append(text)\n\n index += l\n\n return texts\n" ]
[ [ "numpy.zeros" ], [ "numpy.array", "numpy.argmax" ] ]
paarriagadap/Datos-COVID19
[ "055efdc391c4ba93db2a7b5773ddd23244697d96" ]
[ "src/UDD.py" ]
[ "'''\nMIT License\n\nCopyright (c) 2020 Sebastian Cornejo\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n'''\nimport csv\n\n\"\"\"\nLos productos que salen de la contribucion de la UDD son:\n34\n\"\"\"\n\nimport pandas as pd\nimport glob\nfrom utils import *\nimport numpy as np\nfrom datetime import datetime\n\n\ndef prod33(fte,prod):\n\n df_new = pd.DataFrame()\n\n i = 0\n for file in glob.glob(fte + 'indicadores_IM_*_*.csv'):\n print('Processing ' + file)\n temp = pd.read_csv(file, sep=\";\", encoding=\"utf-8\", decimal=\".\")\n\n if i == 0:\n\n dateMissing = pd.read_csv('../input/UDD/indicadores_IM_20200429.csv', sep=\";\", encoding=\"utf-8\", decimal=\".\")\n df_new = pd.concat([temp,dateMissing], axis=0)\n df_new.sort_values(by=['date'], inplace=True)\n #de_new.reset_index(inplace = True)\n\n else:\n df_new = pd.concat([df_new,temp],axis=0)\n\n i += 1\n\n \n df_new.rename(columns={'date': 'Fecha', 'comuna': 'Comuna'}, inplace=True)\n\n df_new['Fecha'] = pd.to_datetime(df_new['Fecha'], format='%Y%m%d').dt.strftime(\"%Y-%m-%d\")\n\n df_new = df_new.drop(columns='region')\n\n data = []\n for file in glob.glob(fte + '/*_indicadores_IM.csv'):\n print('Processing ' + file)\n df_old = pd.read_csv(file, sep=\",\", encoding=\"utf-8\", decimal=\",\")\n\n # standardize column names\n df_old.rename(columns={'date': 'Fecha', 'comuna': 'Comuna'}, inplace=True)\n\n # hay 4 comunas perdidas 5502, 5703, 11302 12202\n # 5502, 5703 listas\n # 11302: O'Higgins no esta\n # 122012: Antartica no esta\n data.append(df_old)\n\n df_old = pd.concat(data)\n\n frames = [df_old,df_new]\n df = pd.concat(frames, axis=0, sort=False)\n df.dropna(how='any', inplace=True)\n\n df = normalizaNombreCodigoRegionYComuna(df)\n df = insertSuperficiePoblacion(df)\n #df.dropna(how='any', inplace=True)\n\n df.drop_duplicates(inplace=True)\n\n #Ordenamos las columnas\n columns = ['Region', 'Codigo region', 'Comuna', 'Codigo comuna', 'Superficie_km2', 'Poblacion',\n 'IM_interno', 'IM_externo', 'IM', 'Fecha']\n df = df[columns]\n df.to_csv(prod + '.csv', index=False)\n\n #try to build a single df with three rows per date per comuna\n aux = df.melt(id_vars=['Region', 'Codigo region', 'Comuna', 'Codigo comuna', 'Superficie_km2', 'Poblacion', 'Fecha'],\n value_vars=['IM_interno', 'IM_externo', 'IM'])\n\n aux.to_csv(prod + '_std.csv', index=False)\n\n #IM_interno,IM_externo,IM,\n IMs = ['IM', 'IM_interno', 'IM_externo']\n for eachIM in IMs:\n columnsToDrop = [x for x in IMs if x != eachIM]\n df_aux = df.drop(columns=columnsToDrop)\n\n reshaped = pd.pivot_table(df_aux,\n index=['Region', 'Codigo region', 'Comuna', 'Codigo comuna', 'Superficie_km2', 'Poblacion'],\n columns=['Fecha'],\n values=eachIM)\n\n reshaped.fillna(0, inplace=True)\n #reshaped = reshaped.applymap(np.int64)\n reshaped.to_csv(prod + '-' + eachIM + '.csv')\n data_t = reshaped.transpose()\n data_t.index.rename('', inplace=True)\n data_t.to_csv(prod + '-' + eachIM + '_T.csv')\n\n\n\nif __name__ == '__main__':\n print('Generating producto 33')\n prod33('../input/UDD/', '../output/producto33/IndiceDeMovilidad')\n" ]
[ [ "pandas.to_datetime", "pandas.DataFrame", "pandas.concat", "pandas.read_csv", "pandas.pivot_table" ] ]
sot/sparkles
[ "f48f00054679cf383f1550956ee3eb44377797ab" ]
[ "sparkles/tests/test_find_er_catalog.py" ]
[ "# Licensed under a 3-clause BSD style license - see LICENSE.rst\nimport os\n\nimport warnings\nfrom proseco import get_aca_catalog\nfrom proseco.tests.test_common import mod_std_info\nimport numpy as np\nfrom Quaternion import Quat\nimport Ska.Sun\nimport agasc\n\n# Do not use the AGASC supplement in testing since mags can change\nos.environ[agasc.SUPPLEMENT_ENABLED_ENV] = 'False'\n\n\nfrom sparkles.find_er_catalog import (\n get_candidate_stars, find_er_catalog, filter_candidate_stars_on_ccd,\n get_guide_counts)\n\n\n# Known tough field: PKS 0023-26 pointing\nATT = Quat([0.20668099834, 0.23164729391, 0.002658888173, 0.9505868852])\nDATE = '2021-09-13'\nT_CCD = -8.0\n\n# Get initial catalog at the PKS 0023-26 attitude. Ignore the penalty limit for\n# this work.\nKWARGS = mod_std_info(att=ATT, t_ccd=T_CCD, date=DATE, n_guide=8,\n n_fid=0, obsid=99999, t_ccd_penalty_limit=999)\nACA = get_aca_catalog(**KWARGS)\nDPITCHES, DYAWS = np.ogrid[-0.01:-3.5:4j, -3.1:3:3j]\nSUN_RA, SUN_DEC = Ska.Sun.position(ACA.date)\nATTS = Ska.Sun.apply_sun_pitch_yaw(ACA.att, pitch=DPITCHES, yaw=DYAWS,\n sun_ra=SUN_RA, sun_dec=SUN_DEC)\n\n\ndef test_get_candidate_and_filter_stars():\n stars = get_candidate_stars(ATT, T_CCD, date=DATE)\n stars = filter_candidate_stars_on_ccd(ATT, stars)\n\n count_9th, count_10th, count_all = get_guide_counts(\n stars['MAG_ACA'][stars['guide_mask']], t_ccd=T_CCD)\n assert np.isclose(count_9th, 2.00, atol=0.01)\n assert np.isclose(count_10th, 2.67, atol=0.01)\n assert np.isclose(count_all, 2.25, atol=0.01)\n\n\nTEST_COLS = ['dpitch', 'dyaw', 'count_9th', 'count_10th', 'count_all',\n 'count_ok', 'n_critical', 'att']\n\n\ndef test_find_er_catalog_minus_2_pitch_bins():\n # Try it all for the bad field near PKS 0023-26\n acar, att_opts = find_er_catalog(ACA, ATTS, alg='pitch_bins')\n # import pprint; pprint.pprint(att_opts[TEST_COLS].pformat_all(), width=100)\n assert acar is att_opts['acar'][8]\n assert att_opts[TEST_COLS].pformat_all() == [\n 'dpitch dyaw count_9th count_10th count_all count_ok n_critical att ',\n '------ ----- --------- ---------- --------- -------- ---------- -----------------',\n ' -0.01 -3.10 4.18 6.00 5.65 True 2 7.67 -25.22 29.3',\n ' -0.01 -0.05 2.00 2.67 2.25 False -- 6.47 -26.05 26.1',\n ' -0.01 3.00 2.62 7.92 5.26 False -- 5.21 -26.82 22.8',\n ' -1.17 -3.10 2.00 9.33 5.92 False -- 8.49 -26.12 29.7',\n ' -1.17 -0.05 0.00 1.23 0.78 False -- 7.23 -27.00 26.4',\n ' -1.17 3.00 0.75 6.87 4.03 False -- 5.91 -27.80 23.1',\n ' -2.34 -3.10 1.89 7.77 5.21 False -- 9.32 -27.02 30.1',\n ' -2.34 -0.05 2.87 8.52 5.97 False -- 8.01 -27.93 26.8',\n ' -2.34 3.00 8.53 13.90 12.67 True 0 6.64 -28.78 23.5',\n ' -3.50 -3.10 2.12 10.01 6.66 False -- 10.16 -27.91 30.4',\n ' -3.50 -0.05 4.87 9.63 7.50 True -- 8.80 -28.86 27.2',\n ' -3.50 3.00 3.60 9.93 6.38 True -- 7.37 -29.75 23.8']\n\n\ndef test_find_er_catalog_minus_2_count_all():\n acar, att_opts = find_er_catalog(ACA, ATTS, alg='count_all')\n # import pprint; pprint.pprint(att_opts[TEST_COLS].pformat_all(), width=100)\n assert acar is att_opts['acar'][8]\n assert att_opts[TEST_COLS].pformat_all() == [\n 'dpitch dyaw count_9th count_10th count_all count_ok n_critical att ',\n '------ ----- --------- ---------- --------- -------- ---------- -----------------',\n ' -0.01 -3.10 4.18 6.00 5.65 True -- 7.67 -25.22 29.3',\n ' -0.01 -0.05 2.00 2.67 2.25 False -- 6.47 -26.05 26.1',\n ' -0.01 3.00 2.62 7.92 5.26 False -- 5.21 -26.82 22.8',\n ' -1.17 -3.10 2.00 9.33 5.92 False -- 8.49 -26.12 29.7',\n ' -1.17 -0.05 0.00 1.23 0.78 False -- 7.23 -27.00 26.4',\n ' -1.17 3.00 0.75 6.87 4.03 False -- 5.91 -27.80 23.1',\n ' -2.34 -3.10 1.89 7.77 5.21 False -- 9.32 -27.02 30.1',\n ' -2.34 -0.05 2.87 8.52 5.97 False -- 8.01 -27.93 26.8',\n ' -2.34 3.00 8.53 13.90 12.67 True 0 6.64 -28.78 23.5',\n ' -3.50 -3.10 2.12 10.01 6.66 False -- 10.16 -27.91 30.4',\n ' -3.50 -0.05 4.87 9.63 7.50 True -- 8.80 -28.86 27.2',\n ' -3.50 3.00 3.60 9.93 6.38 True -- 7.37 -29.75 23.8']\n\n\ndef test_find_er_catalog_minus_2_input_order():\n acar, att_opts = find_er_catalog(ACA, ATTS, alg='input_order')\n # import pprint; pprint.pprint(att_opts[TEST_COLS].pformat_all(), width=100)\n assert acar is att_opts['acar'][8]\n assert att_opts[TEST_COLS].pformat_all() == [\n 'dpitch dyaw count_9th count_10th count_all count_ok n_critical att ',\n '------ ----- --------- ---------- --------- -------- ---------- -----------------',\n ' -0.01 -3.10 4.18 6.00 5.65 True 2 7.67 -25.22 29.3',\n ' -0.01 -0.05 2.00 2.67 2.25 False -- 6.47 -26.05 26.1',\n ' -0.01 3.00 2.62 7.92 5.26 False -- 5.21 -26.82 22.8',\n ' -1.17 -3.10 2.00 9.33 5.92 False -- 8.49 -26.12 29.7',\n ' -1.17 -0.05 0.00 1.23 0.78 False -- 7.23 -27.00 26.4',\n ' -1.17 3.00 0.75 6.87 4.03 False -- 5.91 -27.80 23.1',\n ' -2.34 -3.10 1.89 7.77 5.21 False -- 9.32 -27.02 30.1',\n ' -2.34 -0.05 2.87 8.52 5.97 False -- 8.01 -27.93 26.8',\n ' -2.34 3.00 8.53 13.90 12.67 True 0 6.64 -28.78 23.5',\n ' -3.50 -3.10 2.12 10.01 6.66 False -- 10.16 -27.91 30.4',\n ' -3.50 -0.05 4.87 9.63 7.50 True -- 8.80 -28.86 27.2',\n ' -3.50 3.00 3.60 9.93 6.38 True -- 7.37 -29.75 23.8']\n\n\ndef test_find_er_catalog_fails():\n \"\"\"Test a catalog that will certainly fail at +10 degC\"\"\"\n kwargs = mod_std_info(att=ATT, t_ccd=+10, date=DATE, n_guide=8,\n n_fid=0, obsid=99999, t_ccd_penalty_limit=999)\n\n with warnings.catch_warnings():\n # Ignore warning about grid_model clipping t_ccd\n warnings.filterwarnings(\"ignore\", module=r'.*star_probs.*')\n aca = get_aca_catalog(**kwargs)\n acar, att_opts = find_er_catalog(aca, ATTS, alg='input_order')\n assert acar is None\n assert not np.any(att_opts['count_ok'])\n" ]
[ [ "numpy.any", "numpy.isclose" ] ]
ShihengDuan/neuralhydrology
[ "1c735ffe3a1152a91f1ba549dcb4fcbd26ba8c41" ]
[ "neuralhydrology/modelzoo/tcnn.py" ]
[ "# definition of temporal neural network. details can be found https://doi.org/10.3389/frwa.2020.00028\n\nfrom typing import Dict\n\nimport torch\nfrom torch import nn\nfrom torch.nn.utils import weight_norm\n\nfrom neuralhydrology.modelzoo.basemodel import BaseModel\nfrom neuralhydrology.modelzoo.inputlayer import InputLayer\n\n\nclass Chomp1d(nn.Module): # causal padding\n def __init__(self, chomp_size):\n super(Chomp1d, self).__init__()\n self.chomp_size = chomp_size\n\n def forward(self, x):\n return x[:, :, :-self.chomp_size].contiguous()\n\n\nclass TemporalBlock(nn.Module):\n def __init__(self, n_inputs, n_outputs, kernal_size, stride, dilation, padding, dropout=0.4):\n super(TemporalBlock, self).__init__()\n self.conv1 = weight_norm(nn.Conv1d(n_inputs, n_outputs, kernal_size, stride=stride,\n padding=padding, dilation=dilation))\n self.chomp1 = Chomp1d(padding)\n self.relu1 = nn.ReLU()\n self.dropout1 = nn.Dropout(dropout)\n\n self.conv2 = weight_norm(nn.Conv1d(n_outputs, n_outputs, kernal_size,\n stride=stride, padding=padding, dilation=dilation))\n self.chomp2 = Chomp1d(padding)\n self.relu2 = nn.ReLU()\n self.dropout2 = nn.Dropout(dropout)\n\n self.net = nn.Sequential(self.conv1, self.chomp1, self.relu1, self.dropout1,\n self.conv2, self.chomp2, self.relu2, self.dropout2)\n self.downsample = nn.Conv1d(n_inputs, n_outputs, 1) if n_inputs != n_outputs else None # res connection\n self.relu = nn.ReLU()\n # self.init_weights()\n\n def forward(self, x):\n out = self.net(x)\n res = x if self.downsample is None else self.downsample(x)\n return self.relu(out + res) # res connection\n\n def init_weights(self):\n self.conv1.weight.data.uniform_(-0.1, 0.1)\n self.conv2.weight.data.uniform_(-0.1, 0.1)\n if self.downsample is not None:\n self.downsample.weight.data.normal_(0, 0.01)\n\n\nclass TCNN(BaseModel):\n # specify submodules of the model that can later be used for finetuning. Names must match class attributes\n module_parts = ['tcnn', 'dense1']\n\n def __init__(self, cfg: Dict):\n super(TCNN, self).__init__(cfg=cfg)\n self.kernal_size = cfg[\"kernal_size\"]\n self.num_levels = cfg[\"num_levels\"]\n self.num_channels = cfg[\"num_channels\"]\n self.dr_rate = 0.4\n self.embedding_net = InputLayer(cfg)\n n_attributes = 0\n if (\"camels_attributes\" in cfg.keys()) and cfg[\"camels_attributes\"]:\n print('input attributes')\n n_attributes += len(cfg[\"camels_attributes\"])\n\n self.input_size = len(cfg[\"dynamic_inputs\"] + cfg.get(\"static_inputs\", [])) + n_attributes\n if cfg[\"use_basin_id_encoding\"]:\n self.input_size += cfg[\"number_of_basins\"]\n\n layers = []\n # num_levels = len(num_channels) # number of blocks. Should be 2-3. maybe more?\n\n for i in range(self.num_levels):\n # dilation_size = 2 ** i # dilation rate with layer number\n dilation_size = 6 * (i + 1)\n in_channels = self.input_size if i == 0 else self.num_channels\n out_channels = self.num_channels\n layers += [\n TemporalBlock(in_channels, out_channels, padding=(self.kernal_size - 1) * dilation_size, stride=1,\n dilation=dilation_size,\n dropout=self.dr_rate, kernal_size=self.kernal_size)]\n\n self.tcnn = nn.Sequential(*layers)\n\n self.dropout = nn.Dropout(p=cfg[\"output_dropout\"])\n\n # self.reset_parameters()\n self.dense1 = nn.Linear(self.num_channels * 20, 100)\n self.act = nn.ReLU()\n self.dense2 = nn.Linear(100, 1)\n self.flat = nn.Flatten()\n\n def forward(self, data: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:\n\n x_d = self.embedding_net(data) # [seq_length, batch_size, n_features]\n ## convert to CNN inputs:\n x_d = x_d.transpose(0, 1)\n x_d = x_d.transpose(1, 2) # [batch_size, n_features, seq_length]\n tcnn_out = self.tcnn(input=x_d)\n ## slice:\n tcnn_out = tcnn_out[:, :, -20:]\n\n y_hat = self.dense2(self.dropout(self.act(self.dense1(self.flat(tcnn_out)))))\n\n # y_hat = y_hat.unsqueeze(1)\n pred = {'y_hat': y_hat}\n\n return pred # keep the same form with LSTM's other two outputs\n" ]
[ [ "torch.nn.Linear", "torch.nn.Dropout", "torch.nn.Conv1d", "torch.nn.Sequential", "torch.nn.ReLU", "torch.nn.Flatten" ] ]
CyberZHG/keras-conv-vis
[ "5e342af591b7a9cec11b300fecbe601e947d432e" ]
[ "tests/test_get_gradient.py" ]
[ "from unittest import TestCase\n\nimport numpy as np\n\nfrom keras_conv_vis import (get_gradient, Categorical, replace_layers,\n split_model_by_layer, grad_cam)\nfrom keras_conv_vis.backend import keras, TF_KERAS\n\n\nclass TestGetGradient(TestCase):\n\n def test_get_gradient(self):\n if TF_KERAS:\n model = keras.applications.MobileNetV2()\n gradient_model = keras.models.Sequential()\n gradient_model.add(model)\n gradient_model.add(Categorical(7))\n gradient_model.get_config()\n get_gradient(gradient_model, np.random.random((1, 224, 224, 3)))\n get_gradient(gradient_model, np.random.random((1, 224, 224, 3)),\n targets=model.get_layer('bn_Conv1').trainable_weights[0])\n\n def test_cut_model(self):\n if TF_KERAS:\n model = keras.applications.MobileNetV2()\n head, tail = split_model_by_layer(model, 'block_5_add')\n gradient_model = keras.models.Sequential()\n gradient_model.add(tail)\n gradient_model.add(Categorical(7))\n gradients = get_gradient([head, gradient_model], np.random.random((1, 224, 224, 3)))\n self.assertEqual(2, len(gradients))\n\n def test_grad_cam(self):\n if TF_KERAS:\n model = keras.applications.MobileNetV2()\n cam = grad_cam(model,\n layer_cut='Conv_1',\n inputs=np.random.random((3, 224, 224, 3)),\n target_class=0)\n self.assertEqual((3, 7, 7), cam.shape)\n\n def test_grad_cam_pp(self):\n if TF_KERAS:\n model = keras.applications.MobileNetV2()\n model = replace_layers(model, activation_mapping={'softmax': 'linear'})\n cam = grad_cam(model,\n layer_cut='Conv_1',\n inputs=np.random.random((3, 224, 224, 3)),\n target_class=0,\n plus=True)\n self.assertEqual((3, 7, 7), cam.shape)\n" ]
[ [ "numpy.random.random" ] ]
lsraei20/twitoff
[ "0fe36df4b45b6858fd7467f7a6ebdef9926310fd" ]
[ "twitoff/predict.py" ]
[ "\"\"\"Prediction of Users based on tweets embeddings.\"\"\"\nimport numpy as np\nfrom sklearn.linear_model import LogisticRegression\nfrom .models import User\nfrom .twitter import BASILICA\n\n\ndef predict_user(user1_name, user2_name, tweet_text):\n \"\"\"\n Determine and return which user is more likely to say a given Tweet.\n Example run: predict_user('austen', 'elonmusk', 'Lambda School rocks!')\n Returns 1 (corresponding to first user passed in) or 0 (second).\n \"\"\"\n user1 = User.query.filter(User.name == user1_name).one()\n user2 = User.query.filter(User.name == user2_name).one()\n user1_embeddings = np.array([tweet.embedding for tweet in user1.tweets])\n user2_embeddings = np.array([tweet.embedding for tweet in user2.tweets])\n embeddings = np.vstack([user1_embeddings, user2_embeddings])\n labels = np.concatenate([np.ones(len(user1.tweets)),\n np.zeros(len(user2.tweets))])\n log_reg = LogisticRegression().fit(embeddings, labels)\n # We've done our data science! Now to predict\n tweet_embedding = BASILICA.embed_sentence(tweet_text, model='twitter')\n return log_reg.predict(np.array(tweet_embedding).reshape(1, -1))\n\n" ]
[ [ "sklearn.linear_model.LogisticRegression", "numpy.array", "numpy.vstack" ] ]
ChristianDjurhuus/MLOpsExercise
[ "0ab95482cb29fb58f55046ccdcbfe9b1784a1048" ]
[ "src/models/predict_model.py" ]
[ "# -*- coding: utf-8 -*-\nimport logging\nfrom pathlib import Path\n\nimport click\nimport numpy as np\nimport torch\nfrom dotenv import find_dotenv, load_dotenv\nfrom model import MyAwesomeModel\n\n\[email protected]()\[email protected](\"model_name\", type=click.Path())\ndef evaluate(model_name):\n print(\"Evaluating until hitting the ceiling\")\n\n model = MyAwesomeModel()\n state_dict = torch.load(\"models/\" + model_name)\n model.load_state_dict(state_dict)\n test_set = torch.load(\"data/processed/testloader.pth\")\n\n model.eval()\n accuracies = []\n with torch.no_grad():\n for images, labels in test_set:\n images = images.unsqueeze(1)\n # images = images.view(images.shape[0], -1)\n ps = model(images)\n # ps = torch.exp(log_ps)\n top_p, top_class = ps.topk(1, dim=1)\n equals = top_class == labels.view(*top_class.shape)\n accuracy = torch.mean(equals.type(torch.FloatTensor))\n # print(f'Accuracy: {accuracy.item() * 100}%')\n accuracies.append(accuracy)\n print(\"Estimate of accuracy: \", np.mean(accuracies))\n\n\nif __name__ == \"__main__\":\n log_fmt = \"%(asctime)s - %(name)s - %(levelname)s - %(message)s\"\n logging.basicConfig(level=logging.INFO, format=log_fmt)\n\n # not used in this stub but often useful for finding various files\n project_dir = Path(__file__).resolve().parents[2]\n\n # find .env automagically by walking up directories until it's found, then\n # load up the .env entries as environment variables\n load_dotenv(find_dotenv())\n\n evaluate()\n" ]
[ [ "torch.no_grad", "numpy.mean", "torch.load" ] ]
Camerash/cs231n
[ "5537cc79715fbeb26611b46236f6684bd65c09a7" ]
[ "Assignment2/cs231n/classifiers/cnn.py" ]
[ "from builtins import object\nimport numpy as np\n\nfrom cs231n.layers import *\nfrom cs231n.fast_layers import *\nfrom cs231n.layer_utils import *\n\n\nclass ThreeLayerConvNet(object):\n \"\"\"\n A three-layer convolutional network with the following architecture:\n\n conv - relu - 2x2 max pool - affine - relu - affine - softmax\n\n The network operates on minibatches of data that have shape (N, C, H, W)\n consisting of N images, each with height H and width W and with C input\n channels.\n \"\"\"\n\n def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=7,\n hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0,\n dtype=np.float32):\n \"\"\"\n Initialize a new network.\n\n Inputs:\n - input_dim: Tuple (C, H, W) giving size of input data\n - num_filters: Number of filters to use in the convolutional layer\n - filter_size: Size of filters to use in the convolutional layer\n - hidden_dim: Number of units to use in the fully-connected hidden layer\n - num_classes: Number of scores to produce from the final affine layer.\n - weight_scale: Scalar giving standard deviation for random initialization\n of weights.\n - reg: Scalar giving L2 regularization strength\n - dtype: numpy datatype to use for computation.\n \"\"\"\n self.params = {}\n self.reg = reg\n self.dtype = dtype\n\n ############################################################################\n # TODO: Initialize weights and biases for the three-layer convolutional #\n # network. Weights should be initialized from a Gaussian with standard #\n # deviation equal to weight_scale; biases should be initialized to zero. #\n # All weights and biases should be stored in the dictionary self.params. #\n # Store weights and biases for the convolutional layer using the keys 'W1' #\n # and 'b1'; use keys 'W2' and 'b2' for the weights and biases of the #\n # hidden affine layer, and keys 'W3' and 'b3' for the weights and biases #\n # of the output affine layer. #\n ############################################################################\n self.params['W1'] = np.random.normal(scale=weight_scale, size=(num_filters, input_dim[0], filter_size, filter_size))\n self.params['b1'] = np.zeros(num_filters)\n\n self.params['W2'] = np.random.normal(scale=weight_scale, size=(input_dim[1]*input_dim[2]*num_filters//4, hidden_dim))\n self.params['b2'] = np.zeros(hidden_dim)\n\n self.params['W3'] = np.random.normal(scale=weight_scale, size=(hidden_dim, num_classes))\n self.params['b3'] = np.zeros(num_classes)\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n for k, v in self.params.items():\n self.params[k] = v.astype(dtype)\n\n\n def loss(self, X, y=None):\n \"\"\"\n Evaluate loss and gradient for the three-layer convolutional network.\n\n Input / output: Same API as TwoLayerNet in fc_net.py.\n \"\"\"\n W1, b1 = self.params['W1'], self.params['b1']\n W2, b2 = self.params['W2'], self.params['b2']\n W3, b3 = self.params['W3'], self.params['b3']\n\n # pass conv_param to the forward pass for the convolutional layer\n filter_size = W1.shape[2]\n conv_param = {'stride': 1, 'pad': (filter_size - 1) // 2}\n\n # pass pool_param to the forward pass for the max-pooling layer\n pool_param = {'pool_height': 2, 'pool_width': 2, 'stride': 2}\n\n scores = None\n ############################################################################\n # TODO: Implement the forward pass for the three-layer convolutional net, #\n # computing the class scores for X and storing them in the scores #\n # variable. #\n ############################################################################\n out1, cache1 = conv_relu_pool_forward(X, W1, b1, conv_param, pool_param)\n out2, cache2 = affine_relu_forward(out1, W2, b2)\n scores, cache3 = affine_forward(out2, W3, b3)\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n if y is None:\n return scores\n\n loss, grads = 0, {}\n ############################################################################\n # TODO: Implement the backward pass for the three-layer convolutional net, #\n # storing the loss and gradients in the loss and grads variables. Compute #\n # data loss using softmax, and make sure that grads[k] holds the gradients #\n # for self.params[k]. Don't forget to add L2 regularization! #\n ############################################################################\n loss, dScores = softmax_loss(scores, y)\n loss += 0.5 * self.reg * (np.sum(np.square(W1)) + np.sum(np.square(W2)) + (np.sum(np.square(W3))))\n \n dAffine3, grads['W3'], grads['b3'] = affine_backward(dScores, cache3)\n dAffine2, grads['W2'], grads['b2'] = affine_relu_backward(dAffine3, cache2)\n _, grads['W1'], grads['b1'] = conv_relu_pool_backward(dAffine2, cache1)\n\n grads['W3'] += self.reg * W3\n grads['W2'] += self.reg * W2\n grads['W1'] += self.reg * W1\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n return loss, grads\n" ]
[ [ "numpy.square", "numpy.random.normal", "numpy.zeros" ] ]
MMohanram1/VidioPiksel
[ "cfcaccdfcfeefae29310bf8b290491e93a8d8158" ]
[ "AudioHash/fingerprint.py" ]
[ "import numpy as np\nimport matplotlib.mlab as mlab\nimport matplotlib.pyplot as plt\nfrom scipy.ndimage.filters import maximum_filter\nfrom scipy.ndimage.morphology import (generate_binary_structure,\n iterate_structure, binary_erosion)\nimport hashlib\nfrom operator import itemgetter\n\nIDX_FREQ_I = 0\nIDX_TIME_J = 1\n\n######################################################################\n# Sampling rate, related to the Nyquist conditions, which affects\n# the range frequencies we can detect.\nDEFAULT_FS = 44100\n\n######################################################################\n# Size of the FFT window, affects frequency granularity\nDEFAULT_WINDOW_SIZE = 4096\n\n######################################################################\n# Ratio by which each sequential window overlaps the last and the\n# next window. Higher overlap will allow a higher granularity of offset\n# matching, but potentially more fingerprints.\nDEFAULT_OVERLAP_RATIO = 0.5\n\n######################################################################\n# Degree to which a fingerprint can be paired with its neighbors --\n# higher will cause more fingerprints, but potentially better accuracy.\nDEFAULT_FAN_VALUE = 15\n\n######################################################################\n# Minimum amplitude in spectrogram in order to be considered a peak.\n# This can be raised to reduce number of fingerprints, but can negatively\n# affect accuracy.\nDEFAULT_AMP_MIN = 10\n\n######################################################################\n# Number of cells around an amplitude peak in the spectrogram in order\n# for AudioHash to consider it a spectral peak. Higher values mean less\n# fingerprints and faster matching, but can potentially affect accuracy.\nPEAK_NEIGHBORHOOD_SIZE = 20\n\n######################################################################\n# Thresholds on how close or far fingerprints can be in time in order\n# to be paired as a fingerprint. If your max is too low, higher values of\n# DEFAULT_FAN_VALUE may not perform as expected.\nMIN_HASH_TIME_DELTA = 0\nMAX_HASH_TIME_DELTA = 200\n\n######################################################################\n# If True, will sort peaks temporally for fingerprinting;\n# not sorting will cut down number of fingerprints, but potentially\n# affect performance.\nPEAK_SORT = True\n\n######################################################################\n# Number of bits to throw away from the front of the SHA1 hash in the\n# fingerprint calculation. The more you throw away, the less storage, but\n# potentially higher collisions and misclassifications when identifying songs.\nFINGERPRINT_REDUCTION = 20\n\ndef fingerprint(channel_samples, Fs=DEFAULT_FS,\n wsize=DEFAULT_WINDOW_SIZE,\n wratio=DEFAULT_OVERLAP_RATIO,\n fan_value=DEFAULT_FAN_VALUE,\n amp_min=DEFAULT_AMP_MIN):\n \"\"\"\n FFT the channel, log transform output, find local maxima, then return\n locally sensitive hashes.\n \"\"\"\n # FFT the signal and extract frequency components\n arr2D = mlab.specgram(\n channel_samples,\n NFFT=wsize,\n Fs=Fs,\n window=mlab.window_hanning,\n noverlap=int(wsize * wratio))[0]\n\n # apply log transform since specgram() returns linear array\n arr2D = 10 * np.log10(arr2D)\n arr2D[arr2D == -np.inf] = 0 # replace infs with zeros\n\n # find local maxima\n local_maxima = get_2D_peaks(arr2D, plot=False, amp_min=amp_min)\n\n # return hashes\n return generate_hashes(local_maxima, fan_value=fan_value)\n\n\ndef get_2D_peaks(arr2D, plot=False, amp_min=DEFAULT_AMP_MIN):\n # http://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.morphology.iterate_structure.html#scipy.ndimage.morphology.iterate_structure\n struct = generate_binary_structure(2, 1)\n neighborhood = iterate_structure(struct, PEAK_NEIGHBORHOOD_SIZE)\n\n # find local maxima using our fliter shape\n local_max = maximum_filter(arr2D, footprint=neighborhood) == arr2D\n background = (arr2D == 0)\n eroded_background = binary_erosion(background, structure=neighborhood,\n border_value=1)\n\n # Boolean mask of arr2D with True at peaks\n detected_peaks = local_max - eroded_background\n\n # extract peaks\n amps = arr2D[detected_peaks]\n j, i = np.where(detected_peaks)\n\n # filter peaks\n amps = amps.flatten()\n peaks = zip(i, j, amps)\n peaks_filtered = [x for x in peaks if x[2] > amp_min] # freq, time, amp\n\n # get indices for frequency and time\n frequency_idx = [x[1] for x in peaks_filtered]\n time_idx = [x[0] for x in peaks_filtered]\n\n if plot:\n # scatter of the peaks\n fig, ax = plt.subplots()\n ax.imshow(arr2D)\n ax.scatter(time_idx, frequency_idx)\n ax.set_xlabel('Time')\n ax.set_ylabel('Frequency')\n ax.set_title(\"Spectrogram\")\n plt.gca().invert_yaxis()\n plt.show()\n\n return zip(frequency_idx, time_idx)\n\n\ndef generate_hashes(peaks, fan_value=DEFAULT_FAN_VALUE):\n \"\"\"\n Hash list structure:\n sha1_hash[0:20] time_offset\n [(e05b341a9b77a51fd26, 32), ... ]\n \"\"\"\n if PEAK_SORT:\n peaks = sorted(peaks, key=itemgetter(1))\n\n for i in range(len(peaks)):\n for j in range(1, fan_value):\n if (i + j) < len(peaks):\n\n freq1 = peaks[i][IDX_FREQ_I]\n freq2 = peaks[i + j][IDX_FREQ_I]\n t1 = peaks[i][IDX_TIME_J]\n t2 = peaks[i + j][IDX_TIME_J]\n t_delta = t2 - t1\n\n if t_delta >= MIN_HASH_TIME_DELTA and t_delta <= MAX_HASH_TIME_DELTA:\n h = hashlib.sha1(\n (\"%s|%s|%s\" % (str(freq1), str(freq2), str(t_delta))).encode('utf-8')\n )\n yield (h.hexdigest()[0:FINGERPRINT_REDUCTION], t1)\n" ]
[ [ "scipy.ndimage.morphology.generate_binary_structure", "matplotlib.pyplot.gca", "matplotlib.pyplot.subplots", "scipy.ndimage.filters.maximum_filter", "numpy.where", "matplotlib.pyplot.show", "scipy.ndimage.morphology.iterate_structure", "numpy.log10", "scipy.ndimage.morphology.binary_erosion" ] ]
mxsg/CMS-Model-Calibration
[ "f7f85e863190f7a7ef0922dca4d0a8f8178e5a9e" ]
[ "cmscalibration/utils/visualization.py" ]
[ "import math\n\nimport matplotlib.pyplot as plt\n\n\ndef draw_binned_data(counts, bins):\n fig, axes = plt.subplots()\n\n draw_binned_data_subplot(counts, bins, axes)\n\n return fig, axes\n\n\ndef draw_binned_data_subplot(counts, bins, axes, name=''):\n centroids = (bins[1:] + bins[:-1]) / 2\n total = sum(counts)\n\n axes.hist(centroids, density=True, bins=bins, weights=counts, histtype='step')\n\n axes.set_xlim(left=0)\n axes.set_ylim(bottom=0)\n\n axes.set_title(\"{} (from {} jobs)\".format(name, total))\n\n return axes\n\n\ndef draw_integer_distribution(values, counts, norm=True, name=''):\n fig, axes = plt.subplots()\n\n axes = draw_integer_distribution_subplot(values, counts, axes, norm, name)\n\n return fig, axes\n\n\ndef draw_integer_distribution_subplot(values, counts, axes, norm=True, name=''):\n total = sum(counts)\n shares = [x / total for x in counts]\n\n # Either draw total number or shares\n bars = shares if norm else counts\n\n axes.bar(values, bars)\n\n axes.set_title(\"{} (from {} jobs)\".format(name, total))\n\n axes.set_xlabel(r\"Number of required slots\")\n if norm:\n axes.set_ylabel(\"Share\")\n axes.set_ylim(0, 1)\n else:\n axes.set_ylabel(\"Number of Jobs\")\n\n return axes\n\n\ndef draw_efficiency_timeseries(series_dict, resample_freq=None):\n fig, axes = plt.subplots()\n\n for name, series in series_dict.items():\n # Resample time series\n if resample_freq is not None:\n series = series.resample(resample_freq).mean()\n\n label = \"{} (average {:.2f}%)\".format(name, series.mean() * 100)\n series.plot.line(ax=axes, label=label)\n\n axes.set_xlabel('')\n axes.legend()\n axes.set_ylim([0, 1])\n\n fig.tight_layout()\n\n return fig, axes\n\n\nclass MultiPlotFigure:\n \"\"\"Objects of this class can be used to iteratively draw multiple subplots into the same figure.\"\"\"\n\n def __init__(self, nplots, ncols=2):\n self.nplots = nplots\n self.ncols = ncols\n self.nrows = math.ceil(nplots / ncols)\n\n self.maxplots = self.nrows * self.ncols\n\n self.i_next_subplot = 0\n self.fig, self.axes_list = plt.subplots(ncols=self.ncols, nrows=self.nrows)\n\n @property\n def current_axis(self):\n return self.axes_list[self.i_next_subplot // self.ncols, self.i_next_subplot % self.ncols]\n\n def finish_subplot(self):\n if self.i_next_subplot >= self.nplots:\n raise ValueError(\n \"Cannot step to next plot with number {}, figure is already full!\".format(self.i_next_subplot + 1))\n else:\n self.i_next_subplot += 1\n\n def add_to_report(self, report, identifier, width=10, height=10):\n\n self.fig.set_size_inches(width, height)\n\n # Remove all plots that were not used\n for i in range(self.i_next_subplot, self.maxplots):\n self.fig.delaxes(self.axes_list[i // self.ncols, i % self.ncols])\n\n # Add plot to report\n report.add_figure(self.fig, self.axes_list, identifier)\n" ]
[ [ "matplotlib.pyplot.subplots" ] ]
NeelayS/pose_tracking
[ "68d151eef0c6a84623348f7be76ece709e6ca57a" ]
[ "AlphaPose/detector/nms/nms_wrapper.py" ]
[ "import numpy as np\nimport torch\n\nfrom . import nms_cpu, nms_cuda\nfrom .soft_nms_cpu import soft_nms_cpu\n\n\ndef nms(dets, iou_thr, device_id=None):\n \"\"\"Dispatch to either CPU or GPU NMS implementations.\n\n The input can be either a torch tensor or numpy array. GPU NMS will be used\n if the input is a gpu tensor or device_id is specified, otherwise CPU NMS\n will be used. The returned type will always be the same as inputs.\n\n Arguments:\n dets (torch.Tensor or np.ndarray): bboxes with scores.\n iou_thr (float): IoU threshold for NMS.\n device_id (int, optional): when `dets` is a numpy array, if `device_id`\n is None, then cpu nms is used, otherwise gpu_nms will be used.\n\n Returns:\n tuple: kept bboxes and indice, which is always the same data type as\n the input.\n \"\"\"\n # convert dets (tensor or numpy array) to tensor\n if isinstance(dets, torch.Tensor):\n is_numpy = False\n dets_th = dets.to(\"cpu\")\n elif isinstance(dets, np.ndarray):\n is_numpy = True\n device = \"cpu\" if device_id is None else \"cuda:{}\".format(device_id)\n dets_th = torch.from_numpy(dets).to(device)\n else:\n raise TypeError(\n \"dets must be either a Tensor or numpy array, but got {}\".format(type(dets))\n )\n\n # execute cpu or cuda nms\n if dets_th.shape[0] == 0:\n inds = dets_th.new_zeros(0, dtype=torch.long)\n else:\n if dets_th.is_cuda:\n inds = nms_cuda.nms(dets_th, iou_thr)\n else:\n inds = nms_cpu.nms(dets_th, iou_thr)\n\n if is_numpy:\n inds = inds.cpu().numpy()\n return dets[inds, :], inds\n\n\ndef soft_nms(dets, iou_thr, method=\"linear\", sigma=0.5, min_score=1e-3):\n if isinstance(dets, torch.Tensor):\n is_tensor = True\n dets_np = dets.detach().cpu().numpy()\n elif isinstance(dets, np.ndarray):\n is_tensor = False\n dets_np = dets\n else:\n raise TypeError(\n \"dets must be either a Tensor or numpy array, but got {}\".format(type(dets))\n )\n\n method_codes = {\"linear\": 1, \"gaussian\": 2}\n if method not in method_codes:\n raise ValueError(\"Invalid method for SoftNMS: {}\".format(method))\n new_dets, inds = soft_nms_cpu(\n dets_np, iou_thr, method=method_codes[method], sigma=sigma, min_score=min_score\n )\n\n if is_tensor:\n return dets.new_tensor(new_dets), dets.new_tensor(inds, dtype=torch.long)\n else:\n return new_dets.astype(np.float32), inds.astype(np.int64)\n" ]
[ [ "torch.from_numpy" ] ]
olutosinbanjo/oneMKL_getrs
[ "072e036498a6fc4a0a801c316ce84925dac8b9d0" ]
[ "plots/heterogeneous_versions.py" ]
[ "\r\n\"\"\"\r\nTITLE: grouped bar plots\r\n@author: Oluwatosin S. Oluseyi , December 11, 2021; modified: February 06, 2022\r\n\"\"\"\r\n\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\nfig, ax = plt.subplots()\r\n\r\nBUF_het = [1.8723, 3.1941, 12.2682, 83.4657]\r\nUSM_het1 = [1.9384, 3.0731, 10.8887, 69.6758]\r\nUSM_het2 = [0.02036, 1.3692, 10.2501, 79.6202]\r\n\r\n\r\nn = len(BUF_het)\r\nx = np.arange(n)\r\nwidth = 0.2\r\n\r\nplt.bar(x, BUF_het, color = '#04D8B2',\r\n width = width, edgecolor = 'white', label='getrs_buf_het')\r\n\r\nplt.bar(x + width, USM_het1, color = '#15B01A',\r\n width = width, edgecolor = 'white', label ='getrs_usm_het1')\r\n\r\nplt.bar(x + width*2, USM_het2, color = 'khaki',\r\n width = width, edgecolor = 'white', label ='getrs_usm_het2')\r\n\r\n\r\nplt.yscale('log')\r\nplt.ylabel(\"Time (in seconds)\")\r\nplt.xticks(x+width,['500','2500','5000','10000'])\r\nplt.title(\"Heterogeneous Implementations - host + gpu\")\r\n \r\nplt.legend(loc=\"upper left\")\r\n\r\n# BAR ANNOTATION (see https://www.pythoncharts.com/matplotlib/grouped-bar-charts-matplotlib/)\r\nfor bar in ax.patches:\r\n # The text annotation for each bar should be its height.\r\n bar_value = bar.get_height()\r\n # Format text for bars\r\n text = f'{bar_value}'\r\n # This will give the middle of each bar on the x-axis.\r\n text_x = bar.get_x() + bar.get_width() / 2\r\n # get_y() is where the bar starts so we add the height to it.\r\n text_y = bar.get_y() + bar_value\r\n # make text the same color as the bar\r\n bar_color = bar.get_facecolor()\r\n # If you want a consistent color, you can just set it as a constant, e.g. #222222\r\n ax.text(text_x, text_y, text, ha='center', va='bottom', color=bar_color,\r\n size=4)\r\n \r\nplt.show()\r\n" ]
[ [ "matplotlib.pyplot.title", "matplotlib.pyplot.legend", "matplotlib.pyplot.subplots", "numpy.arange", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.show", "matplotlib.pyplot.bar", "matplotlib.pyplot.yscale", "matplotlib.pyplot.xticks" ] ]
archeltaneka/cgp-cnn-PyTorch
[ "667a1e9c6e25a26dd8c3c094944a53bb95f379f0" ]
[ "cnn_train.py" ]
[ "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport time\nimport math\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom torch.nn import init\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim as optim\nimport torch.utils.data\nimport torchvision.datasets as dset\nimport torchvision.transforms as transforms\nimport torchvision.utils as vutils\nfrom torch.autograd import Variable\nimport random\nfrom skimage.measure import compare_psnr\nimport os\n\nfrom cnn_model import CGP2CNN\nfrom my_data_loader import get_train_valid_loader, get_test_loader\n\n\ndef weights_init(m):\n classname = m.__class__.__name__\n if classname.find('Conv') != -1:\n m.weight.data.normal_(0.0, 0.02)\n elif classname.find('BatchNorm') != -1:\n m.weight.data.normal_(1.0, 0.02)\n m.bias.data.fill_(0)\n\ndef weights_init_normal(m):\n classname = m.__class__.__name__\n if classname.find('Conv2d') != -1:\n m.apply(weights_init_normal_)\n elif classname.find('Linear') != -1:\n init.uniform(m.weight.data, 0.0, 0.02)\n elif classname.find('BatchNorm2d') != -1:\n init.uniform(m.weight.data, 1.0, 0.02)\n init.constant(m.bias.data, 0.0)\n\ndef weights_init_normal_(m):\n classname = m.__class__.__name__\n if classname.find('Conv') != -1:\n init.uniform(m.weight.data, 0.0, 0.02)\n elif classname.find('Linear') != -1:\n init.uniform(m.weight.data, 0.0, 0.02)\n elif classname.find('BatchNorm2d') != -1:\n init.uniform(m.weight.data, 1.0, 0.02)\n init.constant(m.bias.data, 0.0)\n\ndef weights_init_xavier(m):\n classname = m.__class__.__name__\n if classname.find('Conv') != -1:\n init.xavier_normal(m.weight.data, gain=1)\n elif classname.find('Linear') != -1:\n init.xavier_normal(m.weight.data, gain=1)\n elif classname.find('BatchNorm2d') != -1:\n init.uniform(m.weight.data, 1.0, 0.02)\n init.constant(m.bias.data, 0.0)\n\ndef weights_init_kaiming(m):\n classname = m.__class__.__name__\n if classname.find('Conv2d') != -1:\n init.kaiming_normal(m.weight.data, a=0, mode='fan_in')\n elif classname.find('Linear') != -1:\n init.kaiming_normal(m.weight.data, a=0, mode='fan_in')\n elif classname.find('BatchNorm2d') != -1:\n init.uniform(m.weight.data, 1.0, 0.02)\n init.constant(m.bias.data, 0.0)\n\ndef weights_init_orthogonal(m):\n classname = m.__class__.__name__\n print(classname)\n if classname.find('Conv') != -1:\n init.orthogonal(m.weight.data, gain=1)\n elif classname.find('Linear') != -1:\n init.orthogonal(m.weight.data, gain=1)\n elif classname.find('BatchNorm2d') != -1:\n init.uniform(m.weight.data, 1.0, 0.02)\n init.constant(m.bias.data, 0.0)\n\ndef init_weights(net, init_type='normal'):\n print('initialization method [%s]' % init_type)\n if init_type == 'normal':\n net.apply(weights_init_normal)\n elif init_type == 'xavier':\n net.apply(weights_init_xavier)\n elif init_type == 'kaiming':\n net.apply(weights_init_kaiming)\n elif init_type == 'orthogonal':\n net.apply(weights_init_orthogonal)\n else:\n raise NotImplementedError('initialization method [%s] is not implemented' % init_type)\n\n\n\n# __init__: load dataset\n# __call__: training the CNN defined by CGP list\nclass CNN_train():\n def __init__(self, dataset_name, validation=True, verbose=True, imgSize=32, batchsize=128):\n # dataset_name: name of data set ('bsds'(color) or 'bsds_gray')\n # validation: [True] model train/validation mode\n # [False] model test mode for final evaluation of the evolved model\n # (raining data : all training data, test data : all test data)\n # verbose: flag of display\n self.verbose = verbose\n self.imgSize = imgSize\n self.validation = validation\n self.batchsize = batchsize\n self.dataset_name = dataset_name\n\n # load dataset\n if dataset_name == 'cifar10' or dataset_name == 'mnist' or dataset_name == 'coronahack':\n if dataset_name == 'coronahack':\n self.n_class = 2\n self.channel = 3\n if self.validation:\n self.dataloader, self.test_dataloader = get_train_valid_loader(data_dir='./data/', batch_size=self.batchsize, augment=True, random_seed=2018, num_workers=1, pin_memory=True)\n # self.dataloader, self.test_dataloader = loaders[0], loaders[1]\n else:\n train_dataset = dset.CIFAR10(root='./', train=True, download=True,\n transform=transforms.Compose([\n transforms.RandomHorizontalFlip(),\n transforms.Scale(self.imgSize),\n transforms.ToTensor(),\n transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),\n ]))\n test_dataset = dset.CIFAR10(root='./', train=False, download=True,\n transform=transforms.Compose([\n transforms.Scale(self.imgSize),\n transforms.ToTensor(),\n transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),\n ]))\n self.dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=self.batchsize, shuffle=True, num_workers=int(2))\n self.test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=self.batchsize, shuffle=True, num_workers=int(2))\n print('train num ', len(self.dataloader.dataset))\n print('test num ', len(self.test_dataloader.dataset))\n else:\n print('\\tInvalid input dataset name at CNN_train()')\n exit(1)\n\n def __call__(self, cgp, gpuID, epoch_num=200, out_model='mymodel.model'):\n # empty cuda cache\n torch.cuda.empty_cache()\n if self.verbose:\n print('GPUID :', gpuID)\n print('epoch_num :', epoch_num)\n print('batch_size:', self.batchsize)\n \n # model\n torch.backends.cudnn.benchmark = True\n model = CGP2CNN(cgp, self.channel, self.n_class, self.imgSize)\n init_weights(model, 'kaiming')\n model.cuda(gpuID)\n # Loss and Optimizer\n criterion = nn.CrossEntropyLoss()\n criterion.cuda(gpuID)\n optimizer = optim.Adam(model.parameters(), lr=0.01, betas=(0.5, 0.999))\n # optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.9, dampening=0, weight_decay=0.0005)\n input = torch.FloatTensor(self.batchsize, self.channel, self.imgSize, self.imgSize)\n input = input.cuda(gpuID)\n label = torch.LongTensor(self.batchsize)\n label = label.cuda(gpuID)\n\n # Train loop\n for epoch in range(1, epoch_num+1):\n start_time = time.time()\n if self.verbose:\n print('epoch', epoch)\n train_loss = 0\n total = 0\n correct = 0\n ite = 0\n for module in model.children():\n module.train(True)\n for _, (data, target) in enumerate(self.dataloader):\n if self.dataset_name == 'mnist':\n data = data[:,0:1,:,:] # for gray scale images\n data = data.cuda(gpuID)\n target = target.cuda(gpuID)\n input.resize_as_(data).copy_(data)\n input_ = Variable(input)\n label.resize_as_(target).copy_(target)\n label_ = Variable(label)\n optimizer.zero_grad()\n try:\n output = model(input_, None)\n except:\n import traceback\n traceback.print_exc()\n return 0.\n loss = criterion(output, label_)\n train_loss += loss.data[0]\n loss.backward()\n optimizer.step()\n _, predicted = torch.max(output.data, 1)\n total += label_.size(0)\n correct += predicted.eq(label_.data).cpu().sum()\n ite += 1\n print('Train set : Average loss: {:.4f}'.format(train_loss))\n print('Train set : Average Acc : {:.4f}'.format(correct/total))\n print('time ', time.time()-start_time)\n if self.validation:\n if epoch == 30:\n for param_group in optimizer.param_groups:\n tmp = param_group['lr']\n tmp *= 0.1\n for param_group in optimizer.param_groups:\n param_group['lr'] = tmp\n if epoch == epoch_num:\n for module in model.children():\n module.train(False)\n t_loss = self.__test_per_std(model, criterion, gpuID, input, label)\n else:\n if epoch == 5:\n for param_group in optimizer.param_groups:\n tmp = param_group['lr']\n tmp *= 10\n for param_group in optimizer.param_groups:\n param_group['lr'] = tmp\n if epoch % 10 == 0:\n for module in model.children():\n module.train(False)\n t_loss = self.__test_per_std(model, criterion, gpuID, input, label)\n if epoch == 250:\n for param_group in optimizer.param_groups:\n tmp = param_group['lr']\n tmp *= 0.1\n for param_group in optimizer.param_groups:\n param_group['lr'] = tmp\n if epoch == 375:\n for param_group in optimizer.param_groups:\n tmp = param_group['lr']\n tmp *= 0.1\n for param_group in optimizer.param_groups:\n param_group['lr'] = tmp\n # save the model\n torch.save(model.state_dict(), './model_%d.pth' % int(gpuID))\n return t_loss\n\n # For validation/test\n def __test_per_std(self, model, criterion, gpuID, input, label):\n test_loss = 0\n total = 0\n correct = 0\n ite = 0\n for _, (data, target) in enumerate(self.test_dataloader):\n if self.dataset_name == 'mnsit':\n data = data[:,0:1,:,:]\n data = data.cuda(gpuID)\n target = target.cuda(gpuID)\n input.resize_as_(data).copy_(data)\n input_ = Variable(input)\n label.resize_as_(target).copy_(target)\n label_ = Variable(label)\n try:\n output = model(input_, None)\n except:\n import traceback\n traceback.print_exc()\n return 0.\n loss = criterion(output, label_)\n test_loss += loss.data[0]\n _, predicted = torch.max(output.data, 1)\n total += label_.size(0)\n correct += predicted.eq(label_.data).cpu().sum()\n ite += 1\n print('Test set : Average loss: {:.4f}'.format(test_loss))\n print('Test set : (%d/%d)' % (correct, total))\n print('Test set : Average Acc : {:.4f}'.format(correct/total))\n\n return (correct/total)\n" ]
[ [ "torch.nn.init.orthogonal", "torch.nn.init.constant", "torch.nn.init.xavier_normal", "torch.autograd.Variable", "torch.max", "torch.nn.init.uniform", "torch.FloatTensor", "torch.nn.init.kaiming_normal", "torch.cuda.empty_cache", "torch.LongTensor", "torch.nn.CrossEntropyLoss" ] ]
Hugo101/SPFlow
[ "daaeed819f3ef85e6632f2f7a3bf4f8bb663ff8c" ]
[ "src/spn/gpu/TensorFlow.py" ]
[ "'''\nCreated on March 27, 2018\n\n@author: Alejandro Molina\n'''\n\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.python.client import timeline\n\nfrom spn.algorithms.TransformStructure import Copy\nfrom spn.structure.Base import Product, Sum, eval_spn_bottom_up\nfrom spn.structure.leaves.histogram.Histograms import Histogram\nfrom spn.structure.leaves.histogram.Inference import histogram_likelihood\nfrom spn.structure.leaves.parametric.Parametric import Gaussian\n\n\ndef log_sum_to_tf_graph(node, children, data_placeholder=None, variable_dict=None, log_space=True, dtype=np.float32):\n assert log_space\n with tf.variable_scope(\"%s_%s\" % (node.__class__.__name__, node.id)):\n softmaxInverse = np.log(node.weights / np.max(node.weights)).astype(dtype)\n tfweights = tf.nn.softmax(tf.get_variable(\"weights\", initializer=tf.constant(softmaxInverse)))\n variable_dict[node] = tfweights\n childrenprob = tf.stack(children, axis=1)\n return tf.reduce_logsumexp(childrenprob + tf.log(tfweights), axis=1)\n\n\ndef tf_graph_to_sum(node, tfvar):\n node.weights = tfvar.eval().tolist()\n\n\ndef log_prod_to_tf_graph(node, children, data_placeholder=None, variable_dict=None, log_space=True, dtype=np.float32):\n assert log_space\n with tf.variable_scope(\"%s_%s\" % (node.__class__.__name__, node.id)):\n return tf.add_n(children)\n\n\ndef histogram_to_tf_graph(node, data_placeholder=None, log_space=True, variable_dict=None, dtype=np.float32):\n with tf.variable_scope(\"%s_%s\" % (node.__class__.__name__, node.id)):\n inps = np.arange(int(max(node.breaks))).reshape((-1, 1))\n tmpscope = node.scope[0]\n node.scope[0] = 0\n hll = histogram_likelihood(node, inps)\n node.scope[0] = tmpscope\n if log_space:\n hll = np.log(hll)\n\n lls = tf.constant(hll.astype(dtype))\n\n col = data_placeholder[:, node.scope[0]]\n\n return tf.squeeze(tf.gather(lls, col))\n\n\n_node_log_tf_graph = {Sum: log_sum_to_tf_graph, Product: log_prod_to_tf_graph, Histogram: histogram_to_tf_graph}\n\n\ndef add_node_to_tf_graph(node_type, lambda_func):\n _node_log_tf_graph[node_type] = lambda_func\n\n\n_tf_graph_to_node = {Sum: tf_graph_to_sum}\n\n\ndef add_tf_graph_to_node(node_type, lambda_func):\n _tf_graph_to_node[node_type] = lambda_func\n\n\ndef spn_to_tf_graph(node, data, node_tf_graph=_node_log_tf_graph, log_space=True, dtype=np.float32):\n tf.reset_default_graph()\n # data is a placeholder, with shape same as numpy data\n data_placeholder = tf.placeholder(data.dtype, (None, data.shape[1]))\n variable_dict = {}\n tf_graph = eval_spn_bottom_up(node, node_tf_graph, data_placeholder=data_placeholder, log_space=log_space,\n variable_dict=variable_dict, dtype=dtype)\n return tf_graph, data_placeholder, variable_dict\n\n\ndef tf_graph_to_spn(variable_dict, tf_graph_to_node=_tf_graph_to_node):\n for n, tfvars in variable_dict.items():\n tf_graph_to_node[type(n)](n, tfvars)\n\n\ndef likelihood_loss(tf_graph):\n # minimize negative log likelihood\n return -tf.reduce_sum(tf_graph)\n\n\ndef optimize_tf(spn, data, epochs=1000, optimizer=None):\n spn_copy = Copy(spn)\n tf_graph, data_placeholder, variable_dict = spn_to_tf_graph(spn_copy, data)\n optimize_tf_graph(tf_graph, variable_dict, data_placeholder, data, epochs=epochs, optimizer=optimizer)\n return spn_copy\n\n\ndef optimize_tf_graph(tf_graph, variable_dict, data_placeholder, data, epochs=1000, optimizer=None):\n if optimizer is None:\n optimizer = tf.train.GradientDescentOptimizer(0.001)\n opt_op = optimizer.minimize(-tf.reduce_sum(tf_graph))\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n for _ in range(epochs):\n sess.run(opt_op, feed_dict={data_placeholder: data})\n tf_graph_to_spn(variable_dict)\n\n\ndef eval_tf(spn, data, save_graph_path=None, dtype=np.float32):\n tf_graph, placeholder, _ = spn_to_tf_graph(spn, data, dtype=dtype)\n return eval_tf_graph(tf_graph, placeholder, data, save_graph_path=save_graph_path)\n\n\ndef eval_tf_graph(tf_graph, data_placeholder, data, save_graph_path=None):\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n result = sess.run(tf_graph, feed_dict={data_placeholder: data})\n\n if save_graph_path is not None:\n tf.summary.FileWriter(save_graph_path, sess.graph)\n\n return result.reshape(-1, 1)\n\n\ndef eval_tf_trace(spn, data, log_space=True, save_graph_path=None):\n data_placeholder = tf.placeholder(data.dtype, data.shape)\n import time\n tf_graph = spn_to_tf_graph(spn, data_placeholder, log_space)\n run_metadata = None\n with tf.Session() as sess:\n run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)\n run_metadata = tf.RunMetadata()\n sess.run(tf.global_variables_initializer())\n\n start = time.perf_counter()\n result = sess.run(tf_graph, feed_dict={data_placeholder: data}, options=run_options,\n run_metadata=run_metadata)\n end = time.perf_counter()\n\n e2 = end - start\n\n print(e2)\n\n tl = timeline.Timeline(run_metadata.step_stats)\n ctf = tl.generate_chrome_trace_format()\n\n import json\n traceEvents = json.loads(ctf)[\"traceEvents\"]\n elapsed = max([o[\"ts\"] + o[\"dur\"] for o in traceEvents if \"ts\" in o and \"dur\" in o]) - min(\n [o[\"ts\"] for o in traceEvents if \"ts\" in o])\n return result, elapsed\n\n if save_graph_path is not None:\n summary_fw = tf.summary.FileWriter(save_graph_path, sess.graph)\n if trace:\n summary_fw.add_run_metadata(run_metadata, \"run\")\n\n return result, -1\n" ]
[ [ "numpy.max", "tensorflow.python.client.timeline.Timeline", "tensorflow.summary.FileWriter", "numpy.log", "tensorflow.reset_default_graph", "tensorflow.Session", "tensorflow.add_n", "tensorflow.RunMetadata", "tensorflow.constant", "tensorflow.variable_scope", "tensorflow.log", "tensorflow.placeholder", "tensorflow.reduce_sum", "tensorflow.stack", "tensorflow.gather", "tensorflow.RunOptions", "tensorflow.global_variables_initializer", "tensorflow.train.GradientDescentOptimizer" ] ]
teamdatatonic/tf-sampling
[ "2a86091e5d2b94d416601cfb8becb0bf8d6473ca" ]
[ "sampling/io/schema.py" ]
[ "\"\"\"Defines a Schema class for ingesting structured data.\"\"\"\n\nfrom google.cloud import bigquery, storage\nimport tensorflow as tf\nimport numpy as np\nimport json\nimport collections\n\nfrom ..gcp.api import get_credentials\nfrom ..util.dimensions import ensure_iterable\n\nDTYPE_DICT = {\n 'STRING': tf.string,\n 'INTEGER': tf.int64,\n 'FLOAT': tf.float32,\n 'NUMERIC': tf.float32,\n 'BOOLEAN': tf.bool,\n 'TIMESTAMP': None,\n 'RECORD': None\n}\n\nDEFAULT_DICT = {\n 'STRING': ' ',\n 'INTEGER': 0,\n 'FLOAT': 0.0,\n 'NUMERIC': 0.0,\n 'BOOLEAN': False,\n 'TIMESTAMP': None,\n 'RECORD': None\n}\n\n\nclass Field(bigquery.SchemaField):\n \"\"\"Extends BigQuery SchemaField by binding defaults and tensorflow types.\n\n Note: does not currently handle TIMESTAMP, nested (RECORD) or REPEATED entries.\n \"\"\"\n def __init__(self,\n name,\n field_type,\n mode='NULLABLE',\n description=None,\n fields=()):\n\n self.dtype = _make_tf_dtype(field_type)\n bq_dtype = _make_bq_dtype(self.dtype)\n self.default = DEFAULT_DICT[bq_dtype]\n\n assert mode.upper() in ('NULLABLE','REQUIRED'), \\\n 'unsupported field mode {}'.format(mode.upper())\n\n super().__init__(name, bq_dtype, mode, description, fields)\n\n @classmethod\n def from_schemafield(cls, f):\n return cls(f.name, f.field_type, f.mode, f.description, f.fields)\n\n\nclass Schema(collections.UserList):\n \"\"\"Schema (i.e. list of Fields) for reading tabular data.\n\n Can be built manually with list operations, or read from json.\n\n Args:\n label: Name of field to use as label\n features_to_forward: Name(s) of field(s) to copy for forwarding (so\n that they can also be used in a model). Copied\n features will be named with a trailing underscore,\n so if you need to forward a field called 'user_id'\n then the key for tf.contrib.estimator.forward_features\n should be 'user_id_'.\n \"\"\"\n def __init__(self, arg=None, label=None, features_to_forward=[]):\n if arg is None:\n super().__init__()\n else:\n super().__init__(arg)\n\n self.label = label\n self.features_to_forward = ensure_iterable(features_to_forward)\n\n def __getitem__(self, index):\n if isinstance(index, int):\n return super().__getitem__(index)\n else:\n return self.as_dict[index]\n\n @property\n def names(self):\n return [field.name for field in self]\n\n @property\n def defaults(self):\n return [[tf.constant(field.default, dtype=field.dtype)]\n for field in self]\n\n @property\n def dtypes(self):\n return [field.dtype for field in self]\n\n @property\n def placeholders(self):\n return {\n field.name: tf.placeholder(shape=[None],\n dtype=field.dtype,\n name='placeholder_' + field.name)\n for field in self\n }\n\n @property\n def as_dict(self):\n return {field.name: field for field in self}\n\n @classmethod\n def from_BQ(cls,\n dataset,\n table,\n label=None,\n credentials_json=None,\n features_to_forward=None):\n \"\"\"Gets schema by interrogating table directly.\"\"\"\n\n credentials = get_credentials(credentials_json)\n\n client = bigquery.Client(credentials.project_id,\n credentials=credentials)\n dataset_ref = client.dataset(dataset)\n table_ref = dataset_ref.table(table)\n table = client.get_table(table_ref)\n\n return cls([Field.from_schemafield(f) for f in table.schema],\n label=label,\n features_to_forward=None)\n\n @classmethod\n def from_gcs(cls,\n path,\n label=None,\n credentials_json=None,\n features_to_forward=None):\n \"\"\"Reads json schema from file in Google Cloud Storage.\"\"\"\n\n credentials = get_credentials(credentials_json)\n\n client = storage.Client(credentials.project_id,\n credentials=credentials)\n\n bucket_name = path.split('/')[2]\n bucket = client.get_bucket(bucket_name)\n blob = bucket.get_blob(path.split(bucket_name + '/')[1])\n json_string = blob.download_as_string()\n\n return cls.from_json(json_string.decode('utf-8'),\n label=label,\n features_to_forward=features_to_forward)\n\n @classmethod\n def from_json(cls, json_string, label=None, features_to_forward=None):\n \"\"\"Reads schema from json string.\n\n String can be e.g. saved output of::\n\n bq show --schema --format=prettyjson [PROJECT_ID]:[DATASET].[TABLE]\n\n and is expected to be structured something like\n\n .. code-block:: javascript\n\n [\n {'mode': 'NULLABLE',\n 'name': 'customer_id',\n 'type': 'INTEGER',\n 'description': '...description...'\n },\n ...\n ]\n \"\"\"\n\n return cls([\n Field.from_api_repr(f_dict) for f_dict in json.loads(json_string)\n ],\n label=label,\n features_to_forward=features_to_forward)\n\n @classmethod\n def from_dict(cls, schemadict, label=None, features_to_forward=None):\n \"\"\"Makes schema from dictionary (``{name:type, ... }``) \"\"\"\n\n schemalist = []\n for name, field_type in schemadict.items():\n f = Field(name, field_type)\n schemalist.append(f)\n\n return cls(schemalist,\n label=label,\n features_to_forward=features_to_forward)\n\n def to_json(self):\n \"\"\"Writes schema to json string.\n\n See also :meth:`~datatonicml.io.schema.Schema.from_json`)\n \"\"\"\n\n output = [field.to_api_repr() for field in self]\n return json.dumps(output)\n\n def serving_input_receiver_fn(self,\n receive_csv=False,\n csv_includes_label=True):\n \"\"\"Makes a raw serving input receiver for ML Engine prediction.\n\n Returns a class that receives and passes forward a feature Tensor dict.\n\n Args:\n receive_csv: Served model expects csv rows (can batch-input .csv file)\n csv_includes_label: Served model expects (unused) label column in .csv\n \"\"\"\n class FakeLenDict(dict):\n \"\"\"Make input dictionary appear length>1 (hack the export signature)\"\"\"\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def __len__(self):\n return 2\n\n if receive_csv:\n receiver_tensors = FakeLenDict(\n input=tf.placeholder(shape=[None], dtype=tf.string))\n feature_tensors, _ = self.parse_csv(\n receiver_tensors[\"input\"], include_label=csv_includes_label)\n else:\n receiver_tensors = self.placeholders\n feature_tensors = dict([\n (name, (_bool2str(t) if t.dtype == tf.bool else t))\n for name, t in receiver_tensors.items()\n ])\n\n for ff in self.features_to_forward:\n feature_tensors[ff + '_'] = tf.identity(feature_tensors[ff])\n\n return tf.estimator.export.ServingInputReceiver(\n feature_tensors, receiver_tensors)\n\n @property\n def _csv_defaults(self):\n return [(['true'] if d else ['false']) if isinstance(d[0], bool) else d\n for d in self.defaults]\n\n def parse_csv(self, records, include_label=True, **kwargs):\n \"\"\"Makes features and labels from tensor of csv rowstrings.\"\"\"\n\n if include_label:\n tensors = tf.decode_csv(records, self._csv_defaults, **kwargs)\n else:\n defaults_without_label = [\n y for (x, y) in zip(self.names, self._csv_defaults)\n if x != self.label\n ]\n\n tensors = tf.decode_csv(records, defaults_without_label, **kwargs)\n\n features = dict(zip(self.names, tensors))\n\n try:\n labels = features.pop(self.label)\n return features, labels\n except KeyError:\n return features, []\n\n\ndef _make_tf_dtype(field_type):\n \"\"\"Handles various ways of passing type information.\n\n (tf.DType, np.dtype, type or string)\n \"\"\"\n\n if isinstance(field_type, tf.DType):\n return field_type\n elif isinstance(field_type, type):\n return tf.as_dtype(np.dtype(field_type))\n elif isinstance(field_type, str):\n try:\n return tf.as_dtype(field_type)\n except TypeError:\n try:\n return tf.as_dtype(np.dtype(field_type))\n except TypeError:\n assert field_type.upper() in ('STRING','INTEGER','FLOAT','NUMERIC','BOOLEAN'), \\\n 'unsupported field type {}'.format(field_type.upper())\n return DTYPE_DICT[field_type]\n else:\n raise TypeError(\n '{} is not a recognized datatype or type name'.format(field_type))\n\n\ndef _make_bq_dtype(tftype):\n \"\"\"Makes schema type string from tf.DType\"\"\"\n\n if tftype is tf.string:\n return 'STRING'\n elif tftype.is_integer:\n return 'INTEGER'\n elif tftype.is_floating:\n return 'FLOAT'\n elif tftype.is_bool:\n return 'BOOLEAN'\n else:\n raise TypeError('unsupported dtype {}'.format(tftype))\n\n\ndef _bool2str(tensor):\n return tf.where(tensor, tf.fill(tf.shape(tensor), 'true'),\n tf.fill(tf.shape(tensor), 'false'))\n" ]
[ [ "tensorflow.shape", "tensorflow.decode_csv", "tensorflow.as_dtype", "tensorflow.estimator.export.ServingInputReceiver", "tensorflow.constant", "tensorflow.placeholder", "tensorflow.identity", "numpy.dtype" ] ]
alexchungio/IMDB-LSTM
[ "d7a5fbc84a24a2fd7b5fb945cf016e92bf7bcd98" ]
[ "train.py" ]
[ "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#------------------------------------------------------\n# @ File : train.py\n# @ Description: \n# @ Author : Alex Chung\n# @ Contact : [email protected]\n# @ License : Copyright (c) 2017-2018\n# @ Time : 2020/9/7 下午2:34\n# @ Software : PyCharm\n#-------------------------------------------------------\n\nimport os\nimport numpy as np\nimport tensorflow.compat.v1 as tf\nfrom tqdm import tqdm\nimport json\nfrom tensorflow.keras.datasets import imdb\nfrom tensorflow.keras import preprocessing\nfrom tensorflow.keras.preprocessing.text import Tokenizer\n\nfrom libs.configs import cfgs\nfrom data.dataset_pipeline import dataset_batch\nfrom libs.nets.model import LSTM\n\nimdb_dir = './data/aclImdb'\nglove_dir = './data/glove.6B'\ntrain_dir = os.path.join(imdb_dir, 'train')\n\ndef main(argv):\n\n # -------------------load dataset-------------------------------------------\n data_text = []\n data_label = []\n for index, label_type in enumerate(['neg', 'pos']):\n type_dir = os.path.join(train_dir, label_type)\n for filename in os.listdir(type_dir):\n if filename.split('.')[-1] == 'txt':\n with open(os.path.join(type_dir, filename)) as f:\n data_text.append(f.read())\n data_label.append(index)\n\n tokenizer = Tokenizer(num_words=cfgs.FEATURE_SIZE)\n tokenizer.fit_on_texts(data_text)\n # save word index\n word_index = tokenizer.word_index\n with open(cfgs.WORD_INDEX, 'w') as f:\n f.write(json.dumps(word_index))\n print('Found uique token {0}'.format(len(word_index)))\n sequence = tokenizer.texts_to_sequences(data_text)\n # pad squeence\n # pad_sequence = preprocessing.sequence.pad_sequences(sequence, max_length)\n # max_index = max([max(seq) for seq in data])\n #\n # (x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=cfgs.FEATURE_SIZE)\n # word_index = imdb.get_word_index()\n # word_index = dict(sorted(word_index.items(), key=lambda kv: (kv[1], kv[0]))) # sort word index\n # use train dataset to train and validation model\n x_dataset = sequence\n y_dataset = data_label\n # turn the samples into 2D tensor of shape (num_samples, max_length)\n x_dataset = preprocessing.sequence.pad_sequences(x_dataset, cfgs.MAX_LENGTH)\n y_dataset = np.asarray(y_dataset).astype(np.float32)\n # x_test = preprocessing.sequence.pad_sequences(x_test, cfgs.MAX_LENGTH)\n # y_train = np.asarray(y_train).astype(np.float32)\n\n num_val_samples = int(np.floor(len(x_dataset) * cfgs.SPLIT_RATIO))\n num_train_samples = len(x_dataset) - num_val_samples\n\n # shuffle dataset\n indices = np.arange(len(x_dataset))\n np.random.shuffle(indices)\n x_dataset = x_dataset[indices]\n y_dataset = y_dataset[indices]\n\n # split dataset\n x_train, y_train = x_dataset[:num_train_samples], y_dataset[:num_train_samples]\n x_val, y_val = x_dataset[num_train_samples:], y_dataset[num_train_samples:]\n\n # --------------------- construct model------------------------------------------\n model = LSTM(input_length=cfgs.MAX_LENGTH, feature_size=cfgs.FEATURE_SIZE, embedding_size= cfgs.EMBEDDING_SIZE,\n num_layers=cfgs.NUM_LAYERS, num_units=cfgs.NUM_UNITS)\n\n saver = tf.train.Saver(max_to_keep=30)\n\n # get computer graph\n graph = tf.get_default_graph()\n\n write = tf.summary.FileWriter(logdir=cfgs.SUMMARY_PATH, graph=graph)\n\n # os.environ[\"CUDA_VISIBLE_DEVICES\"] = '0'\n config = tf.ConfigProto()\n # config.gpu_options.per_process_gpu_memory_fraction = 0.5 # maximun alloc gpu50% of MEM\n config.gpu_options.allow_growth = True\n\n init_op = tf.group(\n tf.global_variables_initializer(),\n tf.local_variables_initializer()\n )\n # train and save model\n with tf.Session(config=config) as sess:\n sess.run(init_op)\n\n # get model variable of network\n model_variable = tf.model_variables()\n for var in model_variable:\n print(var.op.name, var.shape)\n\n #------------------load embedding pretrained weights---------------------\n # parse glove pretrained model\n if cfgs.EMBEDDING_TRANSFER:\n embedding_index = {}\n with open(os.path.join(glove_dir, 'glove.6B.100d.txt')) as f:\n for line in f:\n value = line.split()\n word = value[0]\n coeff = value[1:]\n embedding_index[word] = coeff\n\n embedding_matrix = np.zeros(shape=(cfgs.FEATURE_SIZE, cfgs.EMBEDDING_SIZE))\n for word, index in word_index.items():\n if index < cfgs.FEATURE_SIZE:\n embedding_vector = embedding_index.get(word)\n if embedding_vector is not None:\n embedding_matrix[index] = embedding_vector\n\n embedding_variable = tf.global_variables(scope='embedding')\n tf.assign(embedding_variable[0], tf.convert_to_tensor(embedding_matrix, dtype=tf.float32))\n print('+++++++++++++++++++++Successful load glove embedding weights+++++++++++++++++++++++')\n # -----------------------train part------------------------------------------------\n # merges all summaries collected in the default graph\n summary_op = tf.summary.merge_all()\n\n train_step_per_epoch = num_train_samples // cfgs.BATCH_SIZE\n test_step_pre_epoch = num_val_samples // cfgs.BATCH_SIZE\n\n # generate batch\n train_dataset = dataset_batch(x_train, y_train, batch_size=cfgs.BATCH_SIZE, is_training=True)\n val_dataset = dataset_batch(x_val, y_val, batch_size=cfgs.BATCH_SIZE, is_training=False)\n train_data_batch, train_label_batch = train_dataset.get_next()\n val_data_batch, val_label_batch = val_dataset.get_next()\n # use k folder validation\n for epoch in range(cfgs.NUM_EPOCH):\n train_bar = tqdm(range(1, train_step_per_epoch+1))\n for step in train_bar:\n x_train, y_train = sess.run([train_data_batch, train_label_batch])\n y_train = y_train[:, np.newaxis]\n feed_dict = model.fill_feed_dict(x_train, y_train, keep_prob=cfgs.KEEP_PROB)\n summary, global_step, train_loss, train_acc, _ = sess.run([summary_op, model.global_step, model.loss, model.acc, model.train],\n feed_dict=feed_dict)\n if step % cfgs.SMRY_ITER == 0:\n write.add_summary(summary=summary, global_step=global_step)\n write.flush()\n\n train_bar.set_description(\"Epoch {0} : Step {1} => Train Loss: {2:.4f} | Train ACC: {3:.4f}\".\n format(epoch, step, train_loss, train_acc))\n test_loss_list = []\n test_acc_list = []\n for step in range(test_step_pre_epoch):\n x_test, y_test = sess.run([val_data_batch, val_label_batch])\n y_test = y_test[:, np.newaxis]\n feed_dict = model.fill_feed_dict(x_test, y_test, keep_prob=1.0)\n\n test_loss, test_acc, _ = sess.run([model.loss, model.acc, model.train], feed_dict=feed_dict)\n test_loss_list.append(test_loss)\n test_acc_list.append(test_acc)\n test_loss = sum(test_loss_list) / len(test_loss_list)\n test_acc = sum(test_acc_list) / len(test_acc_list)\n print(\"Epoch {0} : Step {1} => Val Loss: {2:.4f} | Val ACC: {3:.4f} \".format(epoch, step,\n test_loss, test_acc))\n ckpt_file = os.path.join(cfgs.TRAINED_CKPT, 'model_loss={0:4f}.ckpt'.format(test_loss))\n saver.save(sess=sess, save_path=ckpt_file, global_step=global_step)\n sess.close()\n print('model training has complete')\n\n\n\nif __name__ == \"__main__\":\n\n tf.app.run()" ]
[ [ "tensorflow.keras.preprocessing.sequence.pad_sequences", "tensorflow.compat.v1.model_variables", "tensorflow.compat.v1.summary.FileWriter", "tensorflow.compat.v1.global_variables_initializer", "tensorflow.compat.v1.summary.merge_all", "numpy.asarray", "numpy.zeros", "tensorflow.compat.v1.train.Saver", "tensorflow.compat.v1.get_default_graph", "tensorflow.compat.v1.ConfigProto", "tensorflow.compat.v1.global_variables", "tensorflow.compat.v1.local_variables_initializer", "numpy.random.shuffle", "tensorflow.compat.v1.Session", "tensorflow.compat.v1.convert_to_tensor", "tensorflow.keras.preprocessing.text.Tokenizer", "tensorflow.compat.v1.app.run" ] ]
clairvoyant/GamestonkTerminal
[ "7b40cfe61b32782e36f5de8a08d075532a08c294" ]
[ "gamestonk_terminal/stocks/backtesting/bt_view.py" ]
[ "\"\"\"bt view module\"\"\"\n__docformat__ = \"numpy\"\n\nimport os\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nfrom pandas.plotting import register_matplotlib_converters\n\nfrom gamestonk_terminal import feature_flags as gtff\nfrom gamestonk_terminal.config_plot import PLOT_DPI\nfrom gamestonk_terminal.helper_funcs import export_data, plot_autoscale\nfrom gamestonk_terminal.stocks.backtesting import bt_model\n\nregister_matplotlib_converters()\n\nnp.seterr(divide=\"ignore\")\n\n\ndef display_simple_ema(\n ticker: str,\n df_stock: pd.DataFrame,\n ema_length: int,\n spy_bt: bool = True,\n no_bench: bool = False,\n export: str = \"\",\n):\n \"\"\"Strategy where stock is bought when Price > EMA(l)\n\n Parameters\n ----------\n ticker : str\n Stock ticker\n df_stock : pd.Dataframe\n Dataframe of prices\n ema_length : int\n Length of ema window\n spy_bt : bool\n Boolean to add spy comparison\n no_bench : bool\n Boolean to not show buy and hold comparison\n export : bool\n Format to export backtest results\n \"\"\"\n res = bt_model.ema_strategy(ticker, df_stock, ema_length, spy_bt, no_bench)\n fig, ax = plt.subplots(figsize=plot_autoscale(), dpi=PLOT_DPI)\n res.plot(title=f\"Equity for EMA({ema_length})\", ax=ax)\n ax.grid(b=True, which=\"major\", color=\"#666666\", linestyle=\"-\")\n ax.set_xlim([df_stock.index[0], df_stock.index[-1]])\n fig.tight_layout()\n if gtff.USE_ION:\n plt.ion()\n plt.show()\n print(res.display(), \"\\n\")\n export_data(\n export, os.path.dirname(os.path.abspath(__file__)), \"simple_ema\", res.stats\n )\n\n\ndef display_ema_cross(\n ticker: str,\n df_stock: pd.DataFrame,\n short_ema: int,\n long_ema: int,\n spy_bt: bool = True,\n no_bench: bool = False,\n shortable: bool = True,\n export: str = \"\",\n):\n \"\"\"Strategy where we go long/short when EMA(short) is greater than/less than EMA(short)\n\n Parameters\n ----------\n ticker : str\n Stock ticker\n df_stock : pd.Dataframe\n Dataframe of prices\n short_ema : int\n Length of short ema window\n long_ema : int\n Length of long ema window\n spy_bt : bool\n Boolean to add spy comparison\n no_bench : bool\n Boolean to not show buy and hold comparison\n shortable : bool\n Boolean to allow for selling of the stock at cross\n export : str\n Format to export data\n \"\"\"\n res = bt_model.ema_cross_strategy(\n ticker, df_stock, short_ema, long_ema, spy_bt, no_bench, shortable\n )\n fig, ax = plt.subplots(figsize=plot_autoscale(), dpi=PLOT_DPI)\n res.plot(title=f\"EMA Cross for EMA({short_ema})/EMA({long_ema})\", ax=ax)\n ax.grid(b=True, which=\"major\", color=\"#666666\", linestyle=\"-\")\n ax.set_xlim([df_stock.index[0], df_stock.index[-1]])\n fig.tight_layout()\n if gtff.USE_ION:\n plt.ion()\n plt.show()\n print(res.display(), \"\\n\")\n export_data(\n export, os.path.dirname(os.path.abspath(__file__)), \"ema_cross\", res.stats\n )\n\n\n# pylint:disable=too-many-arguments\ndef display_rsi_strategy(\n ticker: str,\n df_stock: pd.DataFrame,\n periods: int,\n low_rsi: int,\n high_rsi: int,\n spy_bt: bool = True,\n no_bench: bool = False,\n shortable: bool = True,\n export: str = \"\",\n):\n \"\"\"Strategy that buys when the stock is less than a threshold and shorts when it exceeds a threshold.\n\n Parameters\n ----------\n ticker : str\n Stock ticker\n df_stock : pd.Dataframe\n Dataframe of prices\n periods : int\n Number of periods for RSI calculati\n low_rsi : int\n Low RSI value to buy\n hirh_rsi : int\n High RSI value to sell\n spy_bt : bool\n Boolean to add spy comparison\n no_bench : bool\n Boolean to not show buy and hold comparison\n shortable : bool\n Boolean to allow for selling of the stock at cross\n export : str\n Format to export backtest results\n \"\"\"\n res = bt_model.rsi_strategy(\n ticker, df_stock, periods, low_rsi, high_rsi, spy_bt, no_bench, shortable\n )\n fig, ax = plt.subplots(figsize=plot_autoscale(), dpi=PLOT_DPI)\n res.plot(title=f\"RSI Strategy between ({low_rsi}, {high_rsi})\", ax=ax)\n ax.grid(b=True, which=\"major\", color=\"#666666\", linestyle=\"-\")\n ax.set_xlim([df_stock.index[0], df_stock.index[-1]])\n fig.tight_layout()\n if gtff.USE_ION:\n plt.ion()\n plt.show()\n print(res.display(), \"\\n\")\n export_data(\n export, os.path.dirname(os.path.abspath(__file__)), \"rsi_corss\", res.stats\n )\n" ]
[ [ "matplotlib.pyplot.show", "matplotlib.pyplot.ion", "numpy.seterr", "pandas.plotting.register_matplotlib_converters" ] ]
mschart/iblapps
[ "f7e36ddc102cb003d81e0c75d7871bfc666d7f32" ]
[ "atlaselectrophysiology/plot_data.py" ]
[ "from matplotlib import cm\r\nfrom pathlib import Path\r\nimport numpy as np\r\nimport alf.io\r\nfrom brainbox.processing import bincount2D\r\nfrom brainbox.population import xcorr\r\nimport scipy\r\nfrom PyQt5 import QtGui\r\n\r\nN_BNK = 4\r\nBNK_SIZE = 10\r\nAUTOCORR_BIN_SIZE = 0.25 / 1000\r\nAUTOCORR_WIN_SIZE = 10 / 1000\r\nFS = 30000\r\nnp.seterr(divide='ignore', invalid='ignore')\r\n\r\n\r\nclass PlotData:\r\n def __init__(self, alf_path, ephys_path):\r\n self.alf_path = alf_path\r\n self.ephys_path = ephys_path\r\n\r\n self.chn_coords = np.load(Path(self.alf_path, 'channels.localCoordinates.npy'))\r\n self.chn_ind = np.load(Path(self.alf_path, 'channels.rawInd.npy'))\r\n # See if spike data is available\r\n try:\r\n self.spikes = alf.io.load_object(self.alf_path, 'spikes')\r\n self.spike_data_status = True\r\n except Exception:\r\n print('spike data was not found, some plots will not display')\r\n self.spike_data_status = False\r\n\r\n try:\r\n self.clusters = alf.io.load_object(self.alf_path, 'clusters')\r\n self.filter_units('all')\r\n self.cluster_data_status = True\r\n self.compute_timescales()\r\n except Exception:\r\n print('cluster data was not found, some plots will not display')\r\n self.cluster_data_status = False\r\n\r\n try:\r\n lfp_spectrum = alf.io.load_object(self.ephys_path, 'ephysSpectralDensityLF',\r\n namespace='iblqc')\r\n if len(lfp_spectrum) == 2:\r\n self.lfp_freq = lfp_spectrum.get('freqs')\r\n self.lfp_power = lfp_spectrum.get('power', [])\r\n if not np.any(self.lfp_power):\r\n self.lfp_power = lfp_spectrum.get('amps')\r\n self.lfp_data_status = True\r\n else:\r\n print('lfp data was not found, some plots will not display')\r\n self.lfp_data_status = False\r\n except Exception:\r\n print('lfp data was not found, some plots will not display')\r\n self.lfp_data_status = False\r\n\r\n def filter_units(self, type):\r\n if type == 'all':\r\n self.spike_idx = np.arange(self.spikes['clusters'].size)\r\n self.kp_idx = np.where(~np.isnan(self.spikes['depths'][self.spike_idx]))[0]\r\n else:\r\n clust = np.where(self.clusters.metrics.ks2_label == type)\r\n self.spike_idx = np.where(np.isin(self.spikes['clusters'], clust))[0]\r\n self.kp_idx = np.where(~np.isnan(self.spikes['depths'][self.spike_idx]))[0]\r\n\r\n# Plots that require spike and cluster data\r\n def get_depth_data_scatter(self):\r\n if not self.spike_data_status:\r\n data_scatter = None\r\n return data_scatter\r\n else:\r\n A_BIN = 10\r\n amp_range = np.quantile(self.spikes['amps'][self.spike_idx], [0, 0.9])\r\n amp_bins = np.linspace(amp_range[0], amp_range[1], A_BIN)\r\n colour_bin = np.linspace(0.0, 1.0, A_BIN)\r\n colours = (cm.get_cmap('BuPu')(colour_bin)[np.newaxis, :, :3][0]) * 255\r\n spikes_colours = np.empty(self.spikes['amps'][self.spike_idx].size, dtype=object)\r\n spikes_size = np.empty(self.spikes['amps'][self.spike_idx].size)\r\n for iA in range(amp_bins.size - 1):\r\n idx = np.where((self.spikes['amps'][self.spike_idx] > amp_bins[iA]) &\r\n (self.spikes['amps'][self.spike_idx] <= amp_bins[iA + 1]))[0]\r\n\r\n spikes_colours[idx] = QtGui.QColor(*colours[iA])\r\n spikes_size[idx] = iA / (A_BIN / 4)\r\n\r\n data_scatter = {\r\n 'x': self.spikes['times'][self.spike_idx][0:-1:100],\r\n 'y': self.spikes['depths'][self.spike_idx][0:-1:100],\r\n 'levels': amp_range * 1e6,\r\n 'colours': spikes_colours[0:-1:100],\r\n 'pen': None,\r\n 'size': spikes_size[0:-1:100],\r\n 'symbol': np.array('o'),\r\n 'xrange': np.array([np.min(self.spikes['times'][self.spike_idx][0:-1:100]),\r\n np.max(self.spikes['times'][self.spike_idx][0:-1:100])]),\r\n 'xaxis': 'Time (s)',\r\n 'title': 'Amplitude (uV)',\r\n 'cmap': 'BuPu',\r\n 'cluster': False\r\n }\r\n\r\n return data_scatter\r\n\r\n def get_fr_p2t_data_scatter(self):\r\n if not self.spike_data_status:\r\n data_fr_scatter = None\r\n data_p2t_scatter = None\r\n data_amp_scatter = None\r\n return data_fr_scatter, data_p2t_scatter, data_amp_scatter\r\n else:\r\n (clu,\r\n spike_depths,\r\n spike_amps,\r\n n_spikes) = self.compute_spike_average(self.spikes['clusters'][self.spike_idx],\r\n self.spikes['depths'][self.spike_idx],\r\n self.spikes['amps'][self.spike_idx])\r\n spike_amps = spike_amps * 1e6\r\n fr = n_spikes / np.max(self.spikes['times'])\r\n fr_norm, fr_levels = self.normalise_data(fr, lquant=0, uquant=1)\r\n\r\n data_fr_scatter = {\r\n 'x': spike_amps,\r\n 'y': spike_depths,\r\n 'colours': fr_norm,\r\n 'pen': 'k',\r\n 'size': np.array(8),\r\n 'symbol': np.array('o'),\r\n 'levels': fr_levels,\r\n 'xrange': np.array([0.9 * np.min(spike_amps),\r\n 1.1 * np.max(spike_amps)]),\r\n 'xaxis': 'Amplitude (uV)',\r\n 'title': 'Firing Rate (Sp/s)',\r\n 'cmap': 'hot',\r\n 'cluster': True\r\n }\r\n\r\n p2t = self.clusters['peakToTrough'][clu]\r\n p2t_norm, p2t_levels = self.normalise_data(p2t, lquant=0, uquant=1)\r\n\r\n # Define the p2t levels so always same colourbar across sessions\r\n p2t_levels = [-1.5, 1.5]\r\n data_p2t_scatter = {\r\n 'x': spike_amps,\r\n 'y': spike_depths,\r\n\r\n 'colours': p2t_norm,\r\n 'pen': 'k',\r\n 'size': np.array(8),\r\n 'symbol': np.array('o'),\r\n 'levels': p2t_levels,\r\n 'xrange': np.array([0.9 * np.min(spike_amps),\r\n 1.1 * np.max(spike_amps)]),\r\n 'xaxis': 'Amplitude (uV)',\r\n 'title': 'Peak to Trough duration (ms)',\r\n 'cmap': 'RdYlGn',\r\n 'cluster': True\r\n }\r\n\r\n spike_amps_norm, spike_amps_levels = self.normalise_data(spike_amps, lquant=0,\r\n uquant=1)\r\n\r\n data_amp_scatter = {\r\n 'x': fr,\r\n 'y': spike_depths,\r\n\r\n 'colours': spike_amps_norm,\r\n 'pen': 'k',\r\n 'size': np.array(8),\r\n 'symbol': np.array('o'),\r\n 'levels': spike_amps_levels,\r\n 'xrange': np.array([0.9 * np.min(fr),\r\n 1.1 * np.max(fr)]),\r\n 'xaxis': 'Firing Rate (Sp/s)',\r\n 'title': 'Amplitude (uV)',\r\n 'cmap': 'magma',\r\n 'cluster': True\r\n }\r\n\r\n return data_fr_scatter, data_p2t_scatter, data_amp_scatter\r\n\r\n def get_fr_img(self):\r\n if not self.spike_data_status:\r\n data_img = None\r\n return data_img\r\n else:\r\n T_BIN = 0.05\r\n D_BIN = 5\r\n n, times, depths = bincount2D(self.spikes['times'][self.spike_idx][self.kp_idx],\r\n self.spikes['depths'][self.spike_idx][self.kp_idx],\r\n T_BIN, D_BIN, ylim=[0, np.max(self.chn_coords[:, 1])])\r\n img = n.T / T_BIN\r\n xscale = (times[-1] - times[0]) / img.shape[0]\r\n yscale = (depths[-1] - depths[0]) / img.shape[1]\r\n\r\n data_img = {\r\n 'img': img,\r\n 'scale': np.array([xscale, yscale]),\r\n 'levels': np.quantile(np.mean(img, axis=0), [0, 1]),\r\n 'xrange': np.array([times[0], times[-1]]),\r\n 'xaxis': 'Time (s)',\r\n 'cmap': 'binary',\r\n 'title': 'Firing Rate'\r\n }\r\n\r\n return data_img\r\n\r\n def get_fr_amp_data_line(self):\r\n if not self.spike_data_status:\r\n data_fr_line = None\r\n data_amp_line = None\r\n return data_fr_line, data_amp_line\r\n else:\r\n T_BIN = np.max(self.spikes['times'])\r\n D_BIN = 10\r\n nspikes, times, depths = bincount2D(self.spikes['times'][self.spike_idx][self.kp_idx],\r\n self.spikes['depths'][self.spike_idx][self.kp_idx],\r\n T_BIN, D_BIN,\r\n ylim=[0, np.max(self.chn_coords[:, 1])])\r\n\r\n amp, times, depths = bincount2D(self.spikes['amps'][self.spike_idx][self.kp_idx],\r\n self.spikes['depths'][self.spike_idx][self.kp_idx],\r\n T_BIN, D_BIN, ylim=[0, np.max(self.chn_coords[:, 1])],\r\n weights=self.spikes['amps'][self.spike_idx]\r\n [self.kp_idx])\r\n mean_fr = nspikes[:, 0] / T_BIN\r\n mean_amp = np.divide(amp[:, 0], nspikes[:, 0]) * 1e6\r\n mean_amp[np.isnan(mean_amp)] = 0\r\n remove_bins = np.where(nspikes[:, 0] < 50)[0]\r\n mean_amp[remove_bins] = 0\r\n\r\n data_fr_line = {\r\n 'x': mean_fr,\r\n 'y': depths,\r\n 'xrange': np.array([0, np.max(mean_fr)]),\r\n 'xaxis': 'Firing Rate (Sp/s)'\r\n }\r\n\r\n data_amp_line = {\r\n 'x': mean_amp,\r\n 'y': depths,\r\n 'xrange': np.array([0, np.max(mean_amp)]),\r\n 'xaxis': 'Amplitude (uV)'\r\n }\r\n\r\n return data_fr_line, data_amp_line\r\n\r\n def get_correlation_data_img(self):\r\n if not self.spike_data_status:\r\n data_img = None\r\n return data_img\r\n else:\r\n T_BIN = 0.05\r\n D_BIN = 40\r\n R, times, depths = bincount2D(self.spikes['times'][self.spike_idx][self.kp_idx],\r\n self.spikes['depths'][self.spike_idx][self.kp_idx],\r\n T_BIN, D_BIN, ylim=[0, np.max(self.chn_coords[:, 1])])\r\n corr = np.corrcoef(R)\r\n corr[np.isnan(corr)] = 0\r\n scale = (np.max(depths) - np.min(depths)) / corr.shape[0]\r\n data_img = {\r\n 'img': corr,\r\n 'scale': np.array([scale, scale]),\r\n 'levels': np.array([np.min(corr), np.max(corr)]),\r\n 'xrange': np.array([np.min(self.chn_coords[:, 1]), np.max(self.chn_coords[:, 1])]),\r\n 'cmap': 'viridis',\r\n 'title': 'Correlation',\r\n 'xaxis': 'Distance from probe tip (um)'\r\n }\r\n return data_img\r\n\r\n def get_rms_data_img_probe(self, format):\r\n # Finds channels that are at equivalent depth on probe and averages rms values for each\r\n # time point at same depth togehter\r\n try:\r\n rms_amps = alf.io.load_file_content(Path(self.ephys_path, '_iblqc_ephysTimeRms' +\r\n format + '.rms.npy'))\r\n except Exception:\r\n try:\r\n rms_amps = alf.io.load_file_content(Path(self.ephys_path, '_iblqc_ephysTimeRms' +\r\n format + '.amps.npy'))\r\n except Exception:\r\n print('rms data was not found, some plots will not display')\r\n data_img = None\r\n data_probe = None\r\n return data_img, data_probe\r\n\r\n try:\r\n rms_times = alf.io.load_file_content(Path(self.ephys_path, '_iblqc_ephysTimeRms' +\r\n format + '.timestamps.npy'))\r\n xaxis = 'Time (s)'\r\n except Exception:\r\n rms_times = np.array([0, rms_amps.shape[0]])\r\n xaxis = 'Time samples'\r\n\r\n # Img data\r\n _rms = np.take(rms_amps, self.chn_ind, axis=1)\r\n _, self.chn_depth, chn_count = np.unique(self.chn_coords[:, 1], return_index=True,\r\n return_counts=True)\r\n self.chn_depth_eq = np.copy(self.chn_depth)\r\n self.chn_depth_eq[np.where(chn_count == 2)] += 1\r\n\r\n def avg_chn_depth(a):\r\n return(np.mean([a[self.chn_depth], a[self.chn_depth_eq]], axis=0))\r\n\r\n def get_median(a):\r\n return(np.median(a))\r\n\r\n def median_subtract(a):\r\n return(a - np.median(a))\r\n img = np.apply_along_axis(avg_chn_depth, 1, _rms * 1e6)\r\n median = np.mean(np.apply_along_axis(get_median, 1, img))\r\n # Medium subtract to remove bands, but add back average median so values make sense\r\n img = np.apply_along_axis(median_subtract, 1, img) + median\r\n levels = np.quantile(img, [0.1, 0.9])\r\n xscale = (rms_times[-1] - rms_times[0]) / img.shape[0]\r\n yscale = (np.max(self.chn_coords[:, 1]) - np.min(self.chn_coords[:, 1])) / img.shape[1]\r\n\r\n if format == 'AP':\r\n cmap = 'plasma'\r\n else:\r\n cmap = 'inferno'\r\n\r\n data_img = {\r\n 'img': img,\r\n 'scale': np.array([xscale, yscale]),\r\n 'levels': levels,\r\n 'cmap': cmap,\r\n 'xrange': np.array([rms_times[0], rms_times[-1]]),\r\n 'xaxis': xaxis,\r\n 'title': format + ' RMS (uV)'\r\n }\r\n\r\n # Probe data\r\n rms_avg = (np.mean(rms_amps, axis=0)[self.chn_ind]) * 1e6\r\n probe_levels = np.quantile(rms_avg, [0.1, 0.9])\r\n probe_img, probe_scale, probe_offset = self.arrange_channels2banks(rms_avg)\r\n\r\n data_probe = {\r\n 'img': probe_img,\r\n 'scale': probe_scale,\r\n 'offset': probe_offset,\r\n 'level': probe_levels,\r\n 'cmap': cmap,\r\n 'xrange': np.array([0 * BNK_SIZE, (N_BNK) * BNK_SIZE]),\r\n 'title': format + ' RMS (uV)'\r\n }\r\n\r\n return data_img, data_probe\r\n\r\n def get_lfp_spectrum_data(self):\r\n freq_bands = np.vstack(([0, 4], [4, 10], [10, 30], [30, 80], [80, 200]))\r\n data_probe = {}\r\n if not self.lfp_data_status:\r\n data_img = None\r\n for freq in freq_bands:\r\n lfp_band_data = {f\"{freq[0]} - {freq[1]} Hz\": None}\r\n data_probe.update(lfp_band_data)\r\n\r\n return data_img, data_probe\r\n else:\r\n # Power spectrum image\r\n freq_range = [0, 300]\r\n freq_idx = np.where((self.lfp_freq >= freq_range[0]) &\r\n (self.lfp_freq < freq_range[1]))[0]\r\n _lfp = np.take(self.lfp_power[freq_idx], self.chn_ind, axis=1)\r\n _lfp_dB = 10 * np.log10(_lfp)\r\n _, self.chn_depth, chn_count = np.unique(self.chn_coords[:, 1], return_index=True,\r\n return_counts=True)\r\n self.chn_depth_eq = np.copy(self.chn_depth)\r\n self.chn_depth_eq[np.where(chn_count == 2)] += 1\r\n\r\n def avg_chn_depth(a):\r\n return(np.mean([a[self.chn_depth], a[self.chn_depth_eq]], axis=0))\r\n\r\n img = np.apply_along_axis(avg_chn_depth, 1, _lfp_dB)\r\n levels = np.quantile(img, [0.1, 0.9])\r\n xscale = (freq_range[-1] - freq_range[0]) / img.shape[0]\r\n yscale = (np.max(self.chn_coords[:, 1]) - np.min(self.chn_coords[:, 1])) / img.shape[1]\r\n\r\n data_img = {\r\n 'img': img,\r\n 'scale': np.array([xscale, yscale]),\r\n 'levels': levels,\r\n 'cmap': 'viridis',\r\n 'xrange': np.array([freq_range[0], freq_range[-1]]),\r\n 'xaxis': 'Frequency (Hz)',\r\n 'title': 'PSD (dB)'\r\n }\r\n\r\n # Power spectrum in bands on probe\r\n for freq in freq_bands:\r\n freq_idx = np.where((self.lfp_freq >= freq[0]) & (self.lfp_freq < freq[1]))[0]\r\n lfp_avg = np.mean(self.lfp_power[freq_idx], axis=0)[self.chn_ind]\r\n lfp_avg_dB = 10 * np.log10(lfp_avg)\r\n probe_img, probe_scale, probe_offset = self.arrange_channels2banks(lfp_avg_dB)\r\n probe_levels = np.quantile(lfp_avg_dB, [0.1, 0.9])\r\n\r\n lfp_band_data = {f\"{freq[0]} - {freq[1]} Hz\": {\r\n 'img': probe_img,\r\n 'scale': probe_scale,\r\n 'offset': probe_offset,\r\n 'level': probe_levels,\r\n 'cmap': 'viridis',\r\n 'xaxis': 'Time (s)',\r\n 'xrange': np.array([0 * BNK_SIZE, (N_BNK) * BNK_SIZE]),\r\n 'title': f\"{freq[0]} - {freq[1]} Hz (dB)\"}\r\n }\r\n data_probe.update(lfp_band_data)\r\n\r\n return data_img, data_probe\r\n\r\n def get_autocorr(self, clust_idx):\r\n idx = np.where(self.spikes['clusters'] == self.clust_id[clust_idx])[0]\r\n autocorr = xcorr(self.spikes['times'][idx], self.spikes['clusters'][idx],\r\n AUTOCORR_BIN_SIZE, AUTOCORR_WIN_SIZE)\r\n\r\n return autocorr[0, 0, :]\r\n\r\n def get_template_wf(self, clust_idx):\r\n template_wf = (self.clusters['waveforms'][self.clust_id[clust_idx], :, 0])\r\n return template_wf * 1e6\r\n\r\n def arrange_channels2banks(self, data):\r\n Y_OFFSET = 20\r\n bnk_data = []\r\n bnk_scale = np.empty((N_BNK, 2))\r\n bnk_offset = np.empty((N_BNK, 2))\r\n for iX, x in enumerate(np.unique(self.chn_coords[:, 0])):\r\n bnk_idx = np.where(self.chn_coords[:, 0] == x)[0]\r\n bnk_vals = data[bnk_idx]\r\n _bnk_data = np.reshape(bnk_vals, (bnk_vals.size, 1)).T\r\n _bnk_yscale = ((np.max(self.chn_coords[bnk_idx, 1]) -\r\n np.min(self.chn_coords[bnk_idx, 1])) / _bnk_data.shape[1])\r\n _bnk_xscale = BNK_SIZE / _bnk_data.shape[0]\r\n _bnk_yoffset = np.min(self.chn_coords[bnk_idx, 1]) - Y_OFFSET\r\n _bnk_xoffset = BNK_SIZE * iX\r\n\r\n bnk_data.append(_bnk_data)\r\n bnk_scale[iX, :] = np.array([_bnk_xscale, _bnk_yscale])\r\n bnk_offset[iX, :] = np.array([_bnk_xoffset, _bnk_yoffset])\r\n\r\n return bnk_data, bnk_scale, bnk_offset\r\n\r\n def compute_spike_average(self, spike_clusters, spike_depth, spike_amp):\r\n clust, inverse, counts = np.unique(spike_clusters, return_inverse=True, return_counts=True)\r\n _spike_depth = scipy.sparse.csr_matrix((spike_depth, (inverse,\r\n np.zeros(inverse.size, dtype=int))))\r\n _spike_amp = scipy.sparse.csr_matrix((spike_amp, (inverse,\r\n np.zeros(inverse.size, dtype=int))))\r\n spike_depth_avg = np.ravel(_spike_depth.toarray()) / counts\r\n spike_amp_avg = np.ravel(_spike_amp.toarray()) / counts\r\n self.clust_id = clust\r\n return clust, spike_depth_avg, spike_amp_avg, counts\r\n\r\n def compute_timescales(self):\r\n self.t_autocorr = 1e3 * np.arange((AUTOCORR_WIN_SIZE / 2) - AUTOCORR_WIN_SIZE,\r\n (AUTOCORR_WIN_SIZE / 2) + AUTOCORR_BIN_SIZE,\r\n AUTOCORR_BIN_SIZE)\r\n n_template = self.clusters['waveforms'][0, :, 0].size\r\n self.t_template = 1e3 * (np.arange(n_template)) / FS\r\n\r\n def normalise_data(self, data, lquant=0, uquant=1):\r\n levels = np.quantile(data, [lquant, uquant])\r\n if np.min(data) < 0:\r\n data = data + np.abs(np.min(data))\r\n norm_data = data / np.max(data)\r\n norm_levels = np.quantile(norm_data, [lquant, uquant])\r\n norm_data[np.where(norm_data < norm_levels[0])] = 0\r\n norm_data[np.where(norm_data > norm_levels[1])] = 1\r\n\r\n return norm_data, levels\r\n" ]
[ [ "numpy.quantile", "numpy.median", "numpy.copy", "numpy.min", "numpy.mean", "numpy.where", "numpy.apply_along_axis", "numpy.max", "numpy.divide", "numpy.empty", "numpy.seterr", "numpy.take", "numpy.arange", "numpy.log10", "numpy.vstack", "numpy.array", "numpy.reshape", "numpy.zeros", "matplotlib.cm.get_cmap", "numpy.corrcoef", "numpy.isnan", "numpy.any", "numpy.linspace", "numpy.unique", "numpy.isin" ] ]
cooper-sloan/PATEC
[ "422e0b039d87e68b7aee98b89e4747953cab31dd" ]
[ "train_netflix_teachers.py" ]
[ "import deep_recommender as dr\nimport tensorflow as tf\nimport torch\nfrom DeepRecommender.reco_encoder.data import input_layer, new_input_layer\nfrom DeepRecommender.reco_encoder.model import model\nimport torch.optim as optim\nfrom torch.optim.lr_scheduler import MultiStepLR\nimport torch.nn as nn\nfrom torch.autograd import Variable\nimport copy\nimport time\nfrom pathlib import Path\nfrom math import sqrt, floor\nimport numpy as np\nimport os\nfrom os import listdir, path\n\nnb_teachers = 10\n\ndef train_teacher(nb_teachers, teacher_id):\n '''\n Very similar to code from DeepRecommender/run.py\n '''\n nf_data_dir = dr.config['path_to_train_data']\n nf_eval_data_dir = dr.config['path_to_eval_data']\n\n all_files = [path.join(nf_data_dir, f) for f in listdir(nf_data_dir)\n if path.isfile(path.join(nf_data_dir, f)) and f.endswith('.txt')]\n chunk_size = floor(len(all_files)/nb_teachers)\n start = teacher_id*chunk_size\n chunk = all_files[start:start+chunk_size]\n\n params['src_files'] = chunk\n print(\"Loading Training Data\")\n data_layer = new_input_layer.UserItemRecDataProviderNew(params=params,\n user_id_map=userIdMap,\n item_id_map=itemIdMap)\n print(\"Data loaded\")\n print(\"Total items found: {}\".format(len(data_layer.data.keys())))\n print(\"Vector dim: {}\".format(data_layer.vector_dim))\n\n print(\"Loading eval data\")\n eval_params = copy.deepcopy(params)\n del eval_params['src_files']\n # must set eval batch size to 1 to make sure no examples are missed\n eval_params['data_dir'] = nf_eval_data_dir\n eval_data_layer = input_layer.UserItemRecDataProvider(params=eval_params,\n user_id_map=userIdMap,\n item_id_map=itemIdMap)\n\n eval_data_layer.src_data = src_data_layer.data\n\n rencoder = model.AutoEncoder(layer_sizes=[data_layer.vector_dim] +\n [int(l) for l in dr.config['hidden_layers'].split(',')],\n nl_type=dr.config['non_linearity_type'],\n is_constrained=dr.config['constrained'],\n dp_drop_prob=dr.config['drop_prob'],\n last_layer_activations=dr.config['skip_last_layer_nl'])\n os.makedirs(dr.config['logdir'], exist_ok=True)\n model_checkpoint = dr.config['logdir'] + \"/model_%s_%s\" % (nb_teachers,\n teacher_id)\n path_to_model = Path(model_checkpoint)\n if path_to_model.is_file():\n print(\"Loading model from: {}\".format(model_checkpoint))\n rencoder.load_state_dict(torch.load(model_checkpoint))\n\n print('######################################################')\n print('######################################################')\n print('############# AutoEncoder Model: #####################')\n print(rencoder)\n print('######################################################')\n print('######################################################')\n\n gpu_ids = [int(g) for g in dr.config['gpu_ids'].split(',')]\n print('Using GPUs: {}'.format(gpu_ids))\n if len(gpu_ids)>1:\n rencoder = nn.DataParallel(rencoder,\n device_ids=gpu_ids)\n\n if use_gpu: rencoder = rencoder.cuda()\n\n if dr.config['optimizer'] == \"adam\":\n optimizer = optim.Adam(rencoder.parameters(),\n lr=dr.config['lr'],\n weight_decay=dr.config['weight_decay'])\n elif dr.config['optimizer'] == \"adagrad\":\n optimizer = optim.Adagrad(rencoder.parameters(),\n lr=dr.config['lr'],\n weight_decay=dr.config['weight_decay'])\n elif dr.config['optimizer'] == \"momentum\":\n optimizer = optim.SGD(rencoder.parameters(),\n lr=dr.config['lr'], momentum=0.9,\n weight_decay=dr.config['weight_decay'])\n scheduler = MultiStepLR(optimizer, milestones=[24, 36, 48, 66, 72], gamma=0.5)\n elif dr.config['optimizer'] == \"rmsprop\":\n optimizer = optim.RMSprop(rencoder.parameters(),\n lr=dr.config['lr'], momentum=0.9,\n weight_decay=dr.config['weight_decay'])\n else:\n raise ValueError('Unknown optimizer kind')\n\n t_loss = 0.0\n t_loss_denom = 0.0\n global_step = 0\n\n if dr.config['noise_prob'] > 0.0:\n dp = nn.Dropout(p=dr.config['noise_prob'])\n\n for epoch in range(dr.config['num_epochs']):\n print('Doing epoch {} of {}'.format(epoch, dr.config['num_epochs']))\n e_start_time = time.time()\n rencoder.train()\n total_epoch_loss = 0.0\n denom = 0.0\n if dr.config['optimizer'] == \"momentum\":\n scheduler.step()\n for i, mb in enumerate(data_layer.iterate_one_epoch()):\n inputs = Variable(mb.cuda().to_dense() if use_gpu else mb.to_dense())\n optimizer.zero_grad()\n outputs = rencoder(inputs)\n loss, num_ratings = model.MSEloss(outputs, inputs)\n loss = loss / num_ratings\n loss.backward()\n optimizer.step()\n global_step += 1\n t_loss += torch.Tensor.item(loss.data)\n t_loss_denom += 1\n\n if i % dr.config['summary_frequency'] == 0:\n print('[%d, %5d] RMSE: %.7f' % (epoch, i, sqrt(t_loss / t_loss_denom)))\n logger.scalar_summary(\"Training_RMSE\", sqrt(t_loss/t_loss_denom), global_step)\n t_loss = 0\n t_loss_denom = 0.0\n log_var_and_grad_summaries(logger, rencoder.encode_w, global_step, \"Encode_W\")\n log_var_and_grad_summaries(logger, rencoder.encode_b, global_step, \"Encode_b\")\n if not rencoder.is_constrained:\n log_var_and_grad_summaries(logger, rencoder.decode_w, global_step, \"Decode_W\")\n log_var_and_grad_summaries(logger, rencoder.decode_b, global_step, \"Decode_b\")\n\n total_epoch_loss += torch.Tensor.item(loss.data)\n denom += 1\n\n #if dr.config['aug_step'] > 0 and i % dr.config['aug_step'] == 0 and i > 0:\n if dr.config['aug_step'] > 0:\n # Magic data augmentation trick happen here\n for t in range(dr.config['aug_step']):\n inputs = Variable(outputs.data)\n if dr.config['noise_prob'] > 0.0:\n inputs = dp(inputs)\n optimizer.zero_grad()\n outputs = rencoder(inputs)\n loss, num_ratings = model.MSEloss(outputs, inputs)\n loss = loss / num_ratings\n loss.backward()\n optimizer.step()\n\n e_end_time = time.time()\n print('Total epoch {} finished in {} seconds with TRAINING RMSE loss: {}'\n .format(epoch, e_end_time - e_start_time, sqrt(total_epoch_loss/denom)))\n logger.scalar_summary(\"Training_RMSE_per_epoch\", sqrt(total_epoch_loss/denom), epoch)\n logger.scalar_summary(\"Epoch_time\", e_end_time - e_start_time, epoch)\n if epoch == dr.config['num_epochs'] - 1:\n eval_loss = do_eval(rencoder, eval_data_layer)\n print('Epoch {} EVALUATION LOSS: {}'.format(epoch, eval_loss))\n logger.scalar_summary(\"EVALUATION_RMSE\", eval_loss, epoch)\n\n print(\"Saving model to {}\".format(model_checkpoint + \".last\"))\n torch.save(rencoder.state_dict(), model_checkpoint + \".last\")\n\n return True\n\ndef main(argv=None):\n dr.load_maps()\n dr.load_src_data_layer()\n\n for i in range(nb_teachers):\n print(\"Training Teacher %s\" % i)\n train_teacher(nb_teachers, i)\n print('-'*160)\n\n print(\"All Teachers Trained\")\n\nif __name__ == '__main__':\n main()\n" ]
[ [ "torch.nn.Dropout", "torch.autograd.Variable", "torch.optim.lr_scheduler.MultiStepLR", "torch.load", "torch.Tensor.item", "torch.nn.DataParallel" ] ]
Mikma03/Optimization_in_Machine_Learning
[ "257d0455d4ae0b4fc7a762eda841a16611c49000", "257d0455d4ae0b4fc7a762eda841a16611c49000" ]
[ "Books/code/chapter_30/13_kfold_xgboost.py", "Books/code/chapter_14/17_sine.py" ]
[ "# xgboost with default hyperparameters for binary classification\nfrom numpy import mean\nfrom numpy import std\nfrom sklearn.datasets import make_classification\nfrom sklearn.model_selection import cross_val_score\nfrom sklearn.model_selection import RepeatedStratifiedKFold\nfrom xgboost import XGBClassifier\n# define dataset\nX, y = make_classification(n_samples=1000, n_features=5, n_informative=2, n_redundant=1, random_state=1)\n# define model\nmodel = XGBClassifier(use_label_encoder=False, eval_metric=\"logloss\")\n# define evaluation procedure\ncv = RepeatedStratifiedKFold(n_splits=10, n_repeats=3, random_state=1)\n# evaluate model\nscores = cross_val_score(model, X, y, scoring='accuracy', cv=cv, n_jobs=-1)\n# report result\nprint('Mean Accuracy: %.3f (%.3f)' % (mean(scores), std(scores)))\n", "# fit a line to the economic data\nfrom numpy import sin\nfrom numpy import sqrt\nfrom numpy import arange\nfrom pandas import read_csv\nfrom scipy.optimize import curve_fit\nfrom matplotlib import pyplot\n\n# define the true objective function\ndef objective(x, a, b, c, d):\n\treturn a * sin(b - x) + c * x**2 + d\n\n# load the dataset\nurl = 'https://raw.githubusercontent.com/jbrownlee/Datasets/master/longley.csv'\ndataframe = read_csv(url, header=None)\ndata = dataframe.values\n# choose the input and output variables\nx, y = data[:, 4], data[:, -1]\n# curve fit\npopt, _ = curve_fit(objective, x, y)\n# summarize the parameter values\na, b, c, d = popt\nprint(popt)\n# plot input vs output\npyplot.scatter(x, y)\n# define a sequence of inputs between the smallest and largest known inputs\nx_line = arange(min(x), max(x), 1)\n# calculate the output for the range\ny_line = objective(x_line, a, b, c, d)\n# create a line plot for the mapping function\npyplot.plot(x_line, y_line, '--', color='red')\npyplot.show()\n" ]
[ [ "numpy.mean", "sklearn.model_selection.RepeatedStratifiedKFold", "numpy.std", "sklearn.model_selection.cross_val_score", "sklearn.datasets.make_classification" ], [ "numpy.sin", "scipy.optimize.curve_fit", "matplotlib.pyplot.plot", "matplotlib.pyplot.show", "matplotlib.pyplot.scatter", "pandas.read_csv" ] ]
sjwsl/taichi
[ "5211ec6c8f803d57327100b2aa1534740f86860d" ]
[ "python/taichi/lang/mesh.py" ]
[ "import json\n\nimport numpy as np\nfrom taichi._lib import core as _ti_core\nfrom taichi.lang import impl\nfrom taichi.lang.enums import Layout\nfrom taichi.lang.exception import TaichiSyntaxError\nfrom taichi.lang.field import Field, ScalarField\nfrom taichi.lang.matrix import MatrixField, _IntermediateMatrix\nfrom taichi.lang.struct import StructField\nfrom taichi.lang.util import python_scope\nfrom taichi.types import CompoundType\n\nimport taichi as ti\n\nMeshTopology = _ti_core.MeshTopology\nMeshElementType = _ti_core.MeshElementType\nMeshRelationType = _ti_core.MeshRelationType\nConvType = _ti_core.ConvType\nelement_order = _ti_core.element_order\nfrom_end_element_order = _ti_core.from_end_element_order\nto_end_element_order = _ti_core.to_end_element_order\nrelation_by_orders = _ti_core.relation_by_orders\ninverse_relation = _ti_core.inverse_relation\nelement_type_name = _ti_core.element_type_name\n\n\nclass MeshAttrType:\n def __init__(self, name, dtype, reorder, needs_grad):\n self.name = name\n self.dtype = dtype\n self.reorder = reorder\n self.needs_grad = needs_grad\n\n\nclass MeshReorderedScalarFieldProxy(ScalarField):\n def __init__(self, field: ScalarField, mesh_ptr: _ti_core.MeshPtr,\n element_type: MeshElementType, g2r_field: ScalarField):\n self.vars = field.vars\n self.host_accessors = field.host_accessors\n self.grad = field.grad\n\n self.mesh_ptr = mesh_ptr\n self.element_type = element_type\n self.g2r_field = g2r_field\n\n @python_scope\n def __setitem__(self, key, value):\n self.initialize_host_accessors()\n key = self.g2r_field[key]\n self.host_accessors[0].setter(value, *self.pad_key(key))\n\n @python_scope\n def __getitem__(self, key):\n self.initialize_host_accessors()\n key = self.g2r_field[key]\n return self.host_accessors[0].getter(*self.pad_key(key))\n\n\nclass MeshReorderedMatrixFieldProxy(MatrixField):\n def __init__(self, field: MatrixField, mesh_ptr: _ti_core.MeshPtr,\n element_type: MeshElementType, g2r_field: ScalarField):\n self.vars = field.vars\n self.host_accessors = field.host_accessors\n self.grad = field.grad\n self.n = field.n\n self.m = field.m\n\n self.mesh_ptr = mesh_ptr\n self.element_type = element_type\n self.g2r_field = g2r_field\n\n @python_scope\n def __setitem__(self, key, value):\n self.initialize_host_accessors()\n self[key].set_entries(value)\n\n @python_scope\n def __getitem__(self, key):\n self.initialize_host_accessors()\n key = self.g2r_field[key]\n key = self.pad_key(key)\n return _IntermediateMatrix(self.n, self.m, self.host_access(key))\n\n\nclass MeshElementField:\n def __init__(self, mesh_instance, _type, attr_dict, field_dict, g2r_field):\n self.mesh = mesh_instance\n self._type = _type\n self.attr_dict = attr_dict\n self.field_dict = field_dict\n self.g2r_field = g2r_field\n\n self.register_fields()\n\n @property\n def keys(self):\n return list(self.field_dict.keys())\n\n @property\n def members(self):\n return list(self.field_dict.values())\n\n @property\n def items(self):\n return self.field_dict.items()\n\n @staticmethod\n def make_getter(key):\n def getter(self):\n if key not in self.getter_dict:\n if self.attr_dict[key].reorder:\n if isinstance(self.field_dict[key], ScalarField):\n self.getter_dict[key] = MeshReorderedScalarFieldProxy(\n self.field_dict[key], self.mesh.mesh_ptr,\n self._type, self.g2r_field)\n elif isinstance(self.field_dict[key], MatrixField):\n self.getter_dict[key] = MeshReorderedMatrixFieldProxy(\n self.field_dict[key], self.mesh.mesh_ptr,\n self._type, self.g2r_field)\n else:\n self.getter_dict[key] = self.field_dict[key]\n \"\"\"Get an entry from custom struct by name.\"\"\"\n _taichi_skip_traceback = 1\n return self.getter_dict[key]\n\n return getter\n\n def register_fields(self):\n self.getter_dict = {}\n for k in self.keys:\n setattr(MeshElementField, k,\n property(fget=MeshElementField.make_getter(k)))\n\n def get_field_members(self):\n field_members = []\n for m in self.members:\n assert isinstance(m, Field)\n field_members += m.get_field_members()\n return field_members\n\n @python_scope\n def copy_from(self, other):\n assert isinstance(other, Field)\n assert set(self.keys) == set(other.keys)\n for k in self.keys:\n self.field_dict[k].copy_from(other[k])\n\n @python_scope\n def fill(self, val):\n for v in self.members:\n v.fill(val)\n\n def initialize_host_accessors(self):\n for v in self.members:\n v.initialize_host_accessors()\n\n def get_member_field(self, key):\n return self.field_dict[key]\n\n @python_scope\n def from_numpy(self, array_dict):\n for k, v in self.items:\n v.from_numpy(array_dict[k])\n\n @python_scope\n def from_torch(self, array_dict):\n for k, v in self.items:\n v.from_torch(array_dict[k])\n\n @python_scope\n def to_numpy(self):\n return {k: v.to_numpy() for k, v in self.items}\n\n @python_scope\n def to_torch(self, device=None):\n return {k: v.to_torch(device=device) for k, v in self.items}\n\n @python_scope\n def __len__(self):\n return _ti_core.get_num_elements(self.mesh.mesh_ptr, self._type)\n\n\nclass MeshElement:\n def __init__(self, _type, builder):\n self.builder = builder\n self._type = _type\n self.layout = Layout.SOA\n self.attr_dict = {}\n\n def _SOA(self, soa=True): # AOS/SOA\n self.layout = Layout.SOA if soa else Layout.AOS\n\n def _AOS(self, aos=True):\n self.layout = Layout.AOS if aos else Layout.SOA\n\n SOA = property(fget=_SOA)\n AOS = property(fget=_AOS)\n\n def place(\n self,\n members,\n reorder=False,\n needs_grad=False,\n ):\n self.builder.elements.add(self._type)\n for key, dtype in members.items():\n if key in {'verts', 'edges', 'faces', 'cells'}:\n raise TaichiSyntaxError(\n f\"'{key}' cannot use as attribute name. It has been reserved as ti.Mesh's keyword.\"\n )\n self.attr_dict[key] = MeshAttrType(key, dtype, reorder, needs_grad)\n\n def build(self, mesh_instance, size, g2r_field):\n field_dict = {}\n\n for key, attr in self.attr_dict.items():\n if isinstance(attr.dtype, CompoundType):\n field_dict[key] = attr.dtype.field(shape=None,\n needs_grad=attr.needs_grad)\n else:\n field_dict[key] = impl.field(attr.dtype,\n shape=None,\n needs_grad=attr.needs_grad)\n\n if self.layout == Layout.SOA:\n for key, field in field_dict.items():\n impl.root.dense(impl.axes(0), size).place(field)\n if self.attr_dict[key].needs_grad:\n impl.root.dense(impl.axes(0), size).place(field.grad)\n elif len(field_dict) > 0:\n impl.root.dense(impl.axes(0),\n size).place(*tuple(field_dict.values()))\n grads = []\n for key, field in field_dict.items():\n if self.attr_dict[key].needs_grad:\n grads.append(field.grad)\n if len(grads) > 0:\n impl.root.dense(impl.axes(0), size).place(*grads)\n\n return MeshElementField(mesh_instance, self._type, self.attr_dict,\n field_dict, g2r_field)\n\n def link(self, element):\n assert isinstance(element, MeshElement)\n assert element.builder == self.builder\n self.builder.relations.add(tuple([self._type, element._type]))\n self.builder.elements.add(self._type)\n self.builder.elements.add(element._type)\n\n\n# Define the instance of the Mesh Type, stores the field (type and data) info\nclass MeshInstance:\n def __init__(self, _type):\n self._type = _type\n self.mesh_ptr = _ti_core.create_mesh()\n\n def set_owned_offset(self, element_type: MeshElementType,\n owned_offset: ScalarField):\n _ti_core.set_owned_offset(self.mesh_ptr, element_type,\n owned_offset.vars[0].ptr.snode())\n\n def set_total_offset(self, element_type: MeshElementType,\n total_offset: ScalarField):\n _ti_core.set_total_offset(self.mesh_ptr, element_type,\n total_offset.vars[0].ptr.snode())\n\n def set_index_mapping(self, element_type: MeshElementType,\n conv_type: ConvType, mapping: ScalarField):\n _ti_core.set_index_mapping(self.mesh_ptr, element_type, conv_type,\n mapping.vars[0].ptr.snode())\n\n def set_num_patches(self, num_patches: int):\n _ti_core.set_num_patches(self.mesh_ptr, num_patches)\n\n def set_patch_max_element_num(self, element_type: MeshElementType,\n max_element_num: int):\n _ti_core.set_patch_max_element_num(self.mesh_ptr, element_type,\n max_element_num)\n\n def set_relation_fixed(self, rel_type: MeshRelationType,\n value: ScalarField):\n _ti_core.set_relation_fixed(self.mesh_ptr, rel_type,\n value.vars[0].ptr.snode())\n\n def set_relation_dynamic(self, rel_type: MeshRelationType,\n value: ScalarField, offset: ScalarField):\n _ti_core.set_relation_dynamic(self.mesh_ptr, rel_type,\n value.vars[0].ptr.snode(),\n offset.vars[0].ptr.snode())\n\n def add_mesh_attribute(self, element_type, snode, reorder_type):\n _ti_core.add_mesh_attribute(self.mesh_ptr, element_type, snode,\n reorder_type)\n\n\nclass MeshMetadata:\n def __init__(self, filename):\n with open(filename, \"r\") as fi:\n data = json.loads(fi.read())\n\n self.num_patches = data[\"num_patches\"]\n\n self.element_fields = {}\n self.relation_fields = {}\n self.num_elements = {}\n self.max_num_per_patch = {}\n\n for element in data[\"elements\"]:\n element_type = MeshElementType(element[\"order\"])\n self.num_elements[element_type] = element[\"num\"]\n self.max_num_per_patch[element_type] = element[\"max_num_per_patch\"]\n\n element[\"l2g_mapping\"] = np.array(element[\"l2g_mapping\"])\n element[\"l2r_mapping\"] = np.array(element[\"l2r_mapping\"])\n element[\"g2r_mapping\"] = np.array(element[\"g2r_mapping\"])\n self.element_fields[element_type] = {}\n self.element_fields[element_type][\"owned\"] = impl.field(\n dtype=ti.i32, shape=self.num_patches + 1)\n self.element_fields[element_type][\"total\"] = impl.field(\n dtype=ti.i32, shape=self.num_patches + 1)\n self.element_fields[element_type][\"l2g\"] = impl.field(\n dtype=ti.i32, shape=element[\"l2g_mapping\"].shape[0])\n self.element_fields[element_type][\"l2r\"] = impl.field(\n dtype=ti.i32, shape=element[\"l2r_mapping\"].shape[0])\n self.element_fields[element_type][\"g2r\"] = impl.field(\n dtype=ti.i32, shape=element[\"g2r_mapping\"].shape[0])\n\n for relation in data[\"relations\"]:\n from_order = relation[\"from_order\"]\n to_order = relation[\"to_order\"]\n rel_type = MeshRelationType(\n relation_by_orders(from_order, to_order))\n self.relation_fields[rel_type] = {}\n self.relation_fields[rel_type][\"value\"] = impl.field(\n dtype=ti.i32, shape=len(relation[\"value\"]))\n if from_order <= to_order:\n self.relation_fields[rel_type][\"offset\"] = impl.field(\n dtype=ti.i32, shape=len(relation[\"offset\"]))\n\n for element in data[\"elements\"]:\n element_type = MeshElementType(element[\"order\"])\n self.element_fields[element_type][\"owned\"].from_numpy(\n np.array(element[\"owned_offsets\"]))\n self.element_fields[element_type][\"total\"].from_numpy(\n np.array(element[\"total_offsets\"]))\n self.element_fields[element_type][\"l2g\"].from_numpy(\n element[\"l2g_mapping\"])\n self.element_fields[element_type][\"l2r\"].from_numpy(\n element[\"l2r_mapping\"])\n self.element_fields[element_type][\"g2r\"].from_numpy(\n element[\"g2r_mapping\"])\n\n for relation in data[\"relations\"]:\n from_order = relation[\"from_order\"]\n to_order = relation[\"to_order\"]\n rel_type = MeshRelationType(\n relation_by_orders(from_order, to_order))\n self.relation_fields[rel_type][\"value\"].from_numpy(\n np.array(relation[\"value\"]))\n if from_order <= to_order:\n self.relation_fields[rel_type][\"offset\"].from_numpy(\n np.array(relation[\"offset\"]))\n\n self.attrs = {}\n self.attrs[\"x\"] = np.array(data[\"attrs\"][\"x\"]).reshape(-1, 3)\n\n\n# Define the Mesh Type, stores the field type info\nclass MeshBuilder:\n def __init__(self, topology):\n if not ti.is_extension_supported(ti.cfg.arch, ti.extension.mesh):\n raise Exception('Backend ' + str(ti.cfg.arch) +\n ' doesn\\'t support MeshTaichi extension')\n\n self.topology = topology\n self.verts = MeshElement(MeshElementType.Vertex, self)\n self.edges = MeshElement(MeshElementType.Edge, self)\n self.faces = MeshElement(MeshElementType.Face, self)\n if topology == MeshTopology.Tetrahedron:\n self.cells = MeshElement(MeshElementType.Cell, self)\n\n self.elements = set()\n self.relations = set()\n\n def build(self, metadata: MeshMetadata):\n instance = MeshInstance(self)\n instance.fields = {}\n\n instance.set_num_patches(metadata.num_patches)\n\n for element in self.elements:\n _ti_core.set_num_elements(instance.mesh_ptr, element,\n metadata.num_elements[element])\n instance.set_patch_max_element_num(\n element, metadata.max_num_per_patch[element])\n\n element_name = element_type_name(element)\n setattr(\n instance, element_name,\n getattr(self, element_name).build(\n instance, metadata.num_elements[element],\n metadata.element_fields[element][\"g2r\"]))\n instance.fields[element] = getattr(instance, element_name)\n\n instance.set_owned_offset(\n element, metadata.element_fields[element][\"owned\"])\n instance.set_total_offset(\n element, metadata.element_fields[element][\"total\"])\n instance.set_index_mapping(element, ConvType.l2g,\n metadata.element_fields[element][\"l2g\"])\n instance.set_index_mapping(element, ConvType.l2r,\n metadata.element_fields[element][\"l2r\"])\n instance.set_index_mapping(element, ConvType.g2r,\n metadata.element_fields[element][\"g2r\"])\n\n for relation in self.relations:\n from_order = element_order(relation[0])\n to_order = element_order(relation[1])\n rel_type = MeshRelationType(\n relation_by_orders(from_order, to_order))\n if from_order <= to_order:\n instance.set_relation_dynamic(\n rel_type, metadata.relation_fields[rel_type][\"value\"],\n metadata.relation_fields[rel_type][\"offset\"])\n else:\n instance.set_relation_fixed(\n rel_type, metadata.relation_fields[rel_type][\"value\"])\n\n if \"x\" in instance.verts.attr_dict: # pylint: disable=E1101\n instance.verts.x.from_numpy(metadata.attrs[\"x\"]) # pylint: disable=E1101\n\n return instance\n\n\n# Mesh First Class\nclass Mesh:\n def __init__(self):\n pass\n\n @staticmethod\n def Tet():\n return MeshBuilder(MeshTopology.Tetrahedron)\n\n @staticmethod\n def Tri():\n return MeshBuilder(MeshTopology.Triangle)\n\n @staticmethod\n def load_meta(filename):\n return MeshMetadata(filename)\n\n\ndef TriMesh():\n return Mesh.Tri()\n\n\ndef TetMesh():\n return Mesh.Tet()\n\n\nclass MeshElementFieldProxy:\n def __init__(self, mesh: MeshInstance, element_type: MeshElementType,\n entry_expr: impl.Expr):\n self.mesh = mesh\n self.element_type = element_type\n self.entry_expr = entry_expr\n\n element_field = self.mesh.fields[self.element_type]\n for key, attr in element_field.field_dict.items():\n global_entry_expr = impl.Expr(\n _ti_core.get_index_conversion(\n self.mesh.mesh_ptr, element_type, entry_expr,\n ConvType.l2r if element_field.attr_dict[key].reorder else\n ConvType.l2g)) # transform index space\n global_entry_expr_group = impl.make_expr_group(\n *tuple([global_entry_expr]))\n if isinstance(attr, MatrixField):\n setattr(\n self, key,\n _IntermediateMatrix(attr.n, attr.m, [\n impl.Expr(\n _ti_core.subscript(e.ptr, global_entry_expr_group))\n for e in attr.get_field_members()\n ]))\n elif isinstance(attr, StructField):\n raise RuntimeError('ti.Mesh has not support StructField yet')\n else: # isinstance(attr, Field)\n var = attr.get_field_members()[0].ptr\n setattr(\n self, key,\n impl.Expr(_ti_core.subscript(var,\n global_entry_expr_group)))\n\n for element_type in self.mesh._type.elements:\n setattr(self, element_type_name(element_type),\n impl.mesh_relation_access(self.mesh, self, element_type))\n\n @property\n def ptr(self):\n return self.entry_expr\n\n @property\n def id(self): # return the global non-reordered index\n l2g_expr = impl.Expr(\n _ti_core.get_index_conversion(self.mesh.mesh_ptr,\n self.element_type, self.entry_expr,\n ConvType.l2g))\n return l2g_expr\n\n\nclass MeshRelationAccessProxy:\n def __init__(self, mesh: MeshInstance, from_index: impl.Expr,\n to_element_type: MeshElementType):\n self.mesh = mesh\n self.from_index = from_index\n self.to_element_type = to_element_type\n\n @property\n def size(self):\n return impl.Expr(\n _ti_core.get_relation_size(self.mesh.mesh_ptr, self.from_index.ptr,\n self.to_element_type))\n\n def subscript(self, *indices):\n assert len(indices) == 1\n entry_expr = _ti_core.get_relation_access(self.mesh.mesh_ptr,\n self.from_index.ptr,\n self.to_element_type,\n impl.Expr(indices[0]).ptr)\n entry_expr.type_check()\n return MeshElementFieldProxy(self.mesh, self.to_element_type,\n entry_expr)\n" ]
[ [ "numpy.array" ] ]
ashigirl96/stable-baselines
[ "0c3478eb9917c0357131913215df7abca6c8d566" ]
[ "stable_baselines/common/tf_util.py" ]
[ "import copy\nimport os\nimport functools\nimport collections\nimport multiprocessing\n\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.python.client import device_lib\n\nfrom stable_baselines import logger\n\n\ndef switch(condition, then_expression, else_expression):\n \"\"\"\n Switches between two operations depending on a scalar value (int or bool).\n Note that both `then_expression` and `else_expression`\n should be symbolic tensors of the *same shape*.\n\n :param condition: (TensorFlow Tensor) scalar tensor.\n :param then_expression: (TensorFlow Operation)\n :param else_expression: (TensorFlow Operation)\n :return: (TensorFlow Operation) the switch output\n \"\"\"\n x_shape = copy.copy(then_expression.get_shape())\n out_tensor = tf.cond(tf.cast(condition, 'bool'),\n lambda: then_expression,\n lambda: else_expression)\n out_tensor.set_shape(x_shape)\n return out_tensor\n\n\n# ================================================================\n# Extras\n# ================================================================\n\ndef leaky_relu(tensor, leak=0.2):\n \"\"\"\n Leaky ReLU\n http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf\n\n :param tensor: (float) the input value\n :param leak: (float) the leaking coeficient when the function is saturated\n :return: (float) Leaky ReLU output\n \"\"\"\n f_1 = 0.5 * (1 + leak)\n f_2 = 0.5 * (1 - leak)\n return f_1 * tensor + f_2 * abs(tensor)\n\n\n# ================================================================\n# Mathematical utils\n# ================================================================\n\ndef huber_loss(tensor, delta=1.0):\n \"\"\"\n Reference: https://en.wikipedia.org/wiki/Huber_loss\n\n :param tensor: (TensorFlow Tensor) the input value\n :param delta: (float) huber loss delta value\n :return: (TensorFlow Tensor) huber loss output\n \"\"\"\n return tf.where(\n tf.abs(tensor) < delta,\n tf.square(tensor) * 0.5,\n delta * (tf.abs(tensor) - 0.5 * delta)\n )\n\n\n# ================================================================\n# Global session\n# ================================================================\n\ndef make_session(num_cpu=None, make_default=False, graph=None):\n \"\"\"\n Returns a session that will use <num_cpu> CPU's only\n\n :param num_cpu: (int) number of CPUs to use for TensorFlow\n :param make_default: (bool) if this should return an InteractiveSession or a normal Session\n :param graph: (TensorFlow Graph) the graph of the session\n :return: (TensorFlow session)\n \"\"\"\n if num_cpu is None:\n num_cpu = int(os.getenv('RCALL_NUM_CPU', multiprocessing.cpu_count()))\n tf_config = tf.ConfigProto(\n allow_soft_placement=True,\n inter_op_parallelism_threads=num_cpu,\n intra_op_parallelism_threads=num_cpu)\n # Prevent tensorflow from taking all the gpu memory\n tf_config.gpu_options.allow_growth = True\n # tf_config.gpu_options.visible_device_list ='1'\n if make_default:\n return tf.InteractiveSession(config=tf_config, graph=graph)\n else:\n return tf.Session(config=tf_config, graph=graph)\n\n\ndef single_threaded_session(make_default=False, graph=None):\n \"\"\"\n Returns a session which will only use a single CPU\n\n :param make_default: (bool) if this should return an InteractiveSession or a normal Session\n :param graph: (TensorFlow Graph) the graph of the session\n :return: (TensorFlow session)\n \"\"\"\n return make_session(num_cpu=1, make_default=make_default, graph=graph)\n\n\ndef in_session(func):\n \"\"\"\n wrappes a function so that it is in a TensorFlow Session\n\n :param func: (function) the function to wrap\n :return: (function)\n \"\"\"\n\n @functools.wraps(func)\n def newfunc(*args, **kwargs):\n with tf.Session():\n func(*args, **kwargs)\n\n return newfunc\n\n\nALREADY_INITIALIZED = set()\n\n\ndef initialize(sess=None):\n \"\"\"\n Initialize all the uninitialized variables in the global scope.\n\n :param sess: (TensorFlow Session)\n \"\"\"\n if sess is None:\n sess = tf.get_default_session()\n new_variables = set(tf.global_variables()) - ALREADY_INITIALIZED\n sess.run(tf.variables_initializer(new_variables))\n ALREADY_INITIALIZED.update(new_variables)\n\n\n# ================================================================\n# Model components\n# ================================================================\n\ndef normc_initializer(std=1.0, axis=0):\n \"\"\"\n Return a parameter initializer for TensorFlow\n\n :param std: (float) standard deviation\n :param axis: (int) the axis to normalize on\n :return: (function)\n \"\"\"\n\n def _initializer(shape, dtype=None, partition_info=None):\n out = np.random.randn(*shape).astype(np.float32)\n out *= std / np.sqrt(np.square(out).sum(axis=axis, keepdims=True))\n return tf.constant(out)\n\n return _initializer\n\n\ndef conv2d(input_tensor, num_filters, name, filter_size=(3, 3), stride=(1, 1),\n pad=\"SAME\", dtype=tf.float32, collections=None, summary_tag=None):\n \"\"\"\n Creates a 2d convolutional layer for TensorFlow\n\n :param input_tensor: (TensorFlow Tensor) The input tensor for the convolution\n :param num_filters: (int) The number of filters\n :param name: (str) The TensorFlow variable scope\n :param filter_size: (tuple) The filter size\n :param stride: (tuple) The stride of the convolution\n :param pad: (str) The padding type ('VALID' or 'SAME')\n :param dtype: (type) The data type for the Tensors\n :param collections: (list) List of graph collections keys to add the Variable to\n :param summary_tag: (str) image summary name, can be None for no image summary\n :return: (TensorFlow Tensor) 2d convolutional layer\n \"\"\"\n with tf.variable_scope(name):\n stride_shape = [1, stride[0], stride[1], 1]\n filter_shape = [filter_size[0], filter_size[1], int(input_tensor.get_shape()[3]), num_filters]\n\n # there are \"num input feature maps * filter height * filter width\"\n # inputs to each hidden unit\n fan_in = intprod(filter_shape[:3])\n # each unit in the lower layer receives a gradient from:\n # \"num output feature maps * filter height * filter width\" /\n # pooling size\n fan_out = intprod(filter_shape[:2]) * num_filters\n # initialize weights with random weights\n w_bound = np.sqrt(6. / (fan_in + fan_out))\n\n weight = tf.get_variable(\"W\", filter_shape, dtype, tf.random_uniform_initializer(-w_bound, w_bound),\n collections=collections)\n bias = tf.get_variable(\"b\", [1, 1, 1, num_filters], initializer=tf.zeros_initializer(),\n collections=collections)\n\n if summary_tag is not None:\n tf.summary.image(summary_tag,\n tf.transpose(tf.reshape(weight, [filter_size[0], filter_size[1], -1, 1]), [2, 0, 1, 3]),\n max_outputs=10)\n\n return tf.nn.conv2d(input_tensor, weight, stride_shape, pad) + bias\n\n\n# ================================================================\n# Theano-like Function\n# ================================================================\n\ndef function(inputs, outputs, updates=None, givens=None):\n \"\"\"\n Take a bunch of tensorflow placeholders and expressions\n computed based on those placeholders and produces f(inputs) -> outputs. Function f takes\n values to be fed to the input's placeholders and produces the values of the expressions\n in outputs. Just like a Theano function. \n\n Input values can be passed in the same order as inputs or can be provided as kwargs based\n on placeholder name (passed to constructor or accessible via placeholder.op.name).\n\n Example:\n >>> x = tf.placeholder(tf.int32, (), name=\"x\")\n >>> y = tf.placeholder(tf.int32, (), name=\"y\")\n >>> z = 3 * x + 2 * y\n >>> lin = function([x, y], z, givens={y: 0})\n >>> with single_threaded_session():\n >>> initialize()\n >>> assert lin(2) == 6\n >>> assert lin(x=3) == 9\n >>> assert lin(2, 2) == 10\n \n :param inputs: (TensorFlow Tensor or Object with make_feed_dict) list of input arguments\n :param outputs: (TensorFlow Tensor) list of outputs or a single output to be returned from function. Returned\n value will also have the same shape.\n :param updates: ([tf.Operation] or tf.Operation)\n list of update functions or single update function that will be run whenever\n the function is called. The return is ignored. \n :param givens: (dict) the values known for the output\n \"\"\"\n if isinstance(outputs, list):\n return _Function(inputs, outputs, updates, givens=givens)\n elif isinstance(outputs, (dict, collections.OrderedDict)):\n func = _Function(inputs, outputs.values(), updates, givens=givens)\n return lambda *args, **kwargs: type(outputs)(zip(outputs.keys(), func(*args, **kwargs)))\n else:\n func = _Function(inputs, [outputs], updates, givens=givens)\n return lambda *args, **kwargs: func(*args, **kwargs)[0]\n\n\nclass _Function(object):\n def __init__(self, inputs, outputs, updates, givens):\n \"\"\"\n Theano like function\n\n :param inputs: (TensorFlow Tensor or Object with make_feed_dict) list of input arguments\n :param outputs: (TensorFlow Tensor) list of outputs or a single output to be returned from function. Returned\n value will also have the same shape.\n :param updates: ([tf.Operation] or tf.Operation)\n list of update functions or single update function that will be run whenever\n the function is called. The return is ignored. \n :param givens: (dict) the values known for the output\n \"\"\"\n for inpt in inputs:\n if not hasattr(inpt, 'make_feed_dict') and not (isinstance(inpt, tf.Tensor)and len(inpt.op.inputs) == 0):\n assert False, \"inputs should all be placeholders, constants, or have a make_feed_dict method\"\n self.inputs = inputs\n updates = updates or []\n self.update_group = tf.group(*updates)\n self.outputs_update = list(outputs) + [self.update_group]\n self.givens = {} if givens is None else givens\n\n @classmethod\n def _feed_input(cls, feed_dict, inpt, value):\n if hasattr(inpt, 'make_feed_dict'):\n feed_dict.update(inpt.make_feed_dict(value))\n else:\n feed_dict[inpt] = value\n\n def __call__(self, *args, sess=None, **kwargs):\n assert len(args) <= len(self.inputs), \"Too many arguments provided\"\n if sess is None:\n sess = tf.get_default_session()\n feed_dict = {}\n # Update the args\n for inpt, value in zip(self.inputs, args):\n self._feed_input(feed_dict, inpt, value)\n # Update feed dict with givens.\n for inpt in self.givens:\n feed_dict[inpt] = feed_dict.get(inpt, self.givens[inpt])\n results = sess.run(self.outputs_update, feed_dict=feed_dict, **kwargs)[:-1]\n return results\n\n\n# ================================================================\n# Flat vectors\n# ================================================================\n\ndef var_shape(tensor):\n \"\"\"\n get TensorFlow Tensor shape\n\n :param tensor: (TensorFlow Tensor) the input tensor\n :return: ([int]) the shape\n \"\"\"\n out = tensor.get_shape().as_list()\n assert all(isinstance(a, int) for a in out), \\\n \"shape function assumes that shape is fully known\"\n return out\n\n\ndef numel(tensor):\n \"\"\"\n get TensorFlow Tensor's number of elements\n\n :param tensor: (TensorFlow Tensor) the input tensor\n :return: (int) the number of elements\n \"\"\"\n return intprod(var_shape(tensor))\n\n\ndef intprod(tensor):\n \"\"\"\n calculates the product of all the elements in a list\n\n :param tensor: ([Number]) the list of elements\n :return: (int) the product truncated\n \"\"\"\n return int(np.prod(tensor))\n\n\ndef flatgrad(loss, var_list, clip_norm=None):\n \"\"\"\n calculates the gradient and flattens it\n\n :param loss: (float) the loss value\n :param var_list: ([TensorFlow Tensor]) the variables\n :param clip_norm: (float) clip the gradients (disabled if None)\n :return: ([TensorFlow Tensor]) flattend gradient\n \"\"\"\n grads = tf.gradients(loss, var_list)\n if clip_norm is not None:\n grads = [tf.clip_by_norm(grad, clip_norm=clip_norm) for grad in grads]\n return tf.concat(axis=0, values=[\n tf.reshape(grad if grad is not None else tf.zeros_like(v), [numel(v)])\n for (v, grad) in zip(var_list, grads)\n ])\n\n\nclass SetFromFlat(object):\n def __init__(self, var_list, dtype=tf.float32, sess=None):\n \"\"\"\n Set the parameters from a flat vector\n\n :param var_list: ([TensorFlow Tensor]) the variables\n :param dtype: (type) the type for the placeholder\n :param sess: (TensorFlow Session)\n \"\"\"\n shapes = list(map(var_shape, var_list))\n total_size = np.sum([intprod(shape) for shape in shapes])\n\n self.theta = theta = tf.placeholder(dtype, [total_size])\n start = 0\n assigns = []\n for (shape, _var) in zip(shapes, var_list):\n size = intprod(shape)\n assigns.append(tf.assign(_var, tf.reshape(theta[start:start + size], shape)))\n start += size\n self.operation = tf.group(*assigns)\n self.sess = sess\n\n def __call__(self, theta):\n if self.sess is None:\n return tf.get_default_session().run(self.operation, feed_dict={self.theta: theta})\n else:\n return self.sess.run(self.operation, feed_dict={self.theta: theta})\n\n\nclass GetFlat(object):\n def __init__(self, var_list, sess=None):\n \"\"\"\n Get the parameters as a flat vector\n\n :param var_list: ([TensorFlow Tensor]) the variables\n :param sess: (TensorFlow Session)\n \"\"\"\n self.operation = tf.concat(axis=0, values=[tf.reshape(v, [numel(v)]) for v in var_list])\n self.sess = sess\n\n def __call__(self):\n if self.sess is None:\n return tf.get_default_session().run(self.operation)\n else:\n return self.sess.run(self.operation)\n\n\ndef flattenallbut0(tensor):\n \"\"\"\n flatten all the dimension, except from the first one\n\n :param tensor: (TensorFlow Tensor) the input tensor\n :return: (TensorFlow Tensor) the flattened tensor\n \"\"\"\n return tf.reshape(tensor, [-1, intprod(tensor.get_shape().as_list()[1:])])\n\n\n# ================================================================\n# Diagnostics\n# ================================================================\n\ndef display_var_info(_vars):\n \"\"\"\n log variable information, for debug purposes\n\n :param _vars: ([TensorFlow Tensor]) the variables\n \"\"\"\n count_params = 0\n for _var in _vars:\n name = _var.name\n if \"/Adam\" in name or \"beta1_power\" in name or \"beta2_power\" in name:\n continue\n v_params = np.prod(_var.shape.as_list())\n count_params += v_params\n if \"/b:\" in name or \"/biases\" in name:\n continue # Wx+b, bias is not interesting to look at => count params, but not print\n logger.info(\" %s%s %i params %s\" % (name, \" \" * (55 - len(name)), v_params, str(_var.shape)))\n\n logger.info(\"Total model parameters: %0.2f million\" % (count_params * 1e-6))\n\n\ndef get_available_gpus():\n \"\"\"\n Return a list of all the available GPUs\n\n :return: ([str]) the GPUs available\n \"\"\"\n # recipe from here:\n # https://stackoverflow.com/questions/38559755/how-to-get-current-available-gpus-in-tensorflow?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa\n local_device_protos = device_lib.list_local_devices()\n return [x.name for x in local_device_protos if x.device_type == 'GPU']\n\n\n# ================================================================\n# Saving variables\n# ================================================================\n\ndef load_state(fname, sess=None, var_list=None):\n \"\"\"\n Load a TensorFlow saved model\n\n :param fname: (str) the graph name\n :param sess: (TensorFlow Session) the session, if None: get_default_session()\n :param var_list: ([TensorFlow Tensor] or dict(str: TensorFlow Tensor)) A list of Variable/SaveableObject,\n or a dictionary mapping names to SaveableObject`s. If ``None``, defaults to the list of all saveable objects.\n \"\"\"\n if sess is None:\n sess = tf.get_default_session()\n\n # avoir crashing when loading the direct name without explicitly adding the root folder\n if os.path.dirname(fname) == '':\n fname = os.path.join('./', fname)\n\n saver = tf.train.Saver(var_list=var_list)\n saver.restore(sess, fname)\n\n\ndef save_state(fname, sess=None, var_list=None):\n \"\"\"\n Save a TensorFlow model\n\n :param fname: (str) the graph name\n :param sess: (TensorFlow Session) The tf session, if None, get_default_session()\n :param var_list: ([TensorFlow Tensor] or dict(str: TensorFlow Tensor)) A list of Variable/SaveableObject,\n or a dictionary mapping names to SaveableObject`s. If ``None``, defaults to the list of all saveable objects.\n \"\"\"\n if sess is None:\n sess = tf.get_default_session()\n\n dir_name = os.path.dirname(fname)\n # avoir crashing when saving the direct name without explicitly adding the root folder\n if dir_name == '':\n dir_name = './'\n fname = os.path.join(dir_name, fname)\n os.makedirs(dir_name, exist_ok=True)\n\n saver = tf.train.Saver(var_list=var_list)\n saver.save(sess, fname)\n\n\n# ================================================================\n# retrieving variables\n# ================================================================\n\ndef get_trainable_vars(name):\n \"\"\"\n returns the trainable variables\n\n :param name: (str) the scope\n :return: ([TensorFlow Variable])\n \"\"\"\n return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=name)\n\n\ndef get_globals_vars(name):\n \"\"\"\n returns the trainable variables\n\n :param name: (str) the scope\n :return: ([TensorFlow Variable])\n \"\"\"\n return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=name)\n\n\ndef outer_scope_getter(scope, new_scope=\"\"):\n \"\"\"\n remove a scope layer for the getter\n\n :param scope: (str) the layer to remove\n :param new_scope: (str) optional replacement name\n :return: (function (function, str, ``*args``, ``**kwargs``): Tensorflow Tensor)\n \"\"\"\n def _getter(getter, name, *args, **kwargs):\n name = name.replace(scope + \"/\", new_scope, 1)\n val = getter(name, *args, **kwargs)\n return val\n return _getter\n" ]
[ [ "tensorflow.get_default_session", "tensorflow.nn.conv2d", "tensorflow.group", "tensorflow.gradients", "tensorflow.reshape", "tensorflow.clip_by_norm", "tensorflow.zeros_like", "tensorflow.cast", "tensorflow.InteractiveSession", "tensorflow.train.Saver", "tensorflow.global_variables", "tensorflow.ConfigProto", "tensorflow.constant", "tensorflow.variable_scope", "numpy.prod", "numpy.sqrt", "tensorflow.get_collection", "numpy.square", "tensorflow.abs", "tensorflow.Session", "numpy.random.randn", "tensorflow.python.client.device_lib.list_local_devices", "tensorflow.placeholder", "tensorflow.zeros_initializer", "tensorflow.random_uniform_initializer", "tensorflow.variables_initializer", "tensorflow.square" ] ]
drichardson/examples
[ "d8b285db4ad1cfd9a92091deab2eb385748f97c8" ]
[ "python/scipy/ode2.py" ]
[ "# Following along to https://www.youtube.com/watch?v=VV3BnroVjZo\n#\n# Example 2: Pass additional arguments to model\n#\nimport numpy as np\nfrom scipy.integrate import odeint\nimport matplotlib.pyplot as plt\n\n# function that returns dy/dt\ndef model(y, t, k):\n dydt = -k * y\n return dydt\n\n# initial condition\ny0 = 5\n\n# time points\nt = np.linspace(0, 20, 50)\n\n# solve ODC\nk = 0.1\ny1 = odeint(model, y0, t, args=(k,))\nk = 0.2\ny2 = odeint(model, y0, t, args=(k,))\nk = 0.5\ny3 = odeint(model, y0, t, args=(k,))\n\n# plot results\nplt.plot(t, y1, 'r-', linewidth=2, label='k=0.1')\nplt.plot(t, y2, 'b--', linewidth=2, label='k=0.2')\nplt.plot(t, y3, 'g:', linewidth=2, label='k=0.5')\nplt.xlabel('time')\nplt.ylabel('y(t)')\nplt.legend(loc='best')\nplt.show()\n" ]
[ [ "scipy.integrate.odeint", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "matplotlib.pyplot.legend", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.show", "numpy.linspace" ] ]
ikergarcia1996/jiant-v1-legacy
[ "d1b5204bde2f03067e3f5176b6eedb8f03dc17fb" ]
[ "jiant/evaluate.py" ]
[ "\"\"\" Helper functions to evaluate a model on a dataset \"\"\"\nimport json\nimport logging as log\nimport os\nimport time\nfrom collections import defaultdict\nfrom csv import QUOTE_MINIMAL, QUOTE_NONE\nfrom typing import Dict, Iterable, List, Sequence, Tuple\n\nimport pandas as pd\nimport torch\nfrom allennlp.nn.util import move_to_device\nfrom allennlp.data.iterators import BasicIterator\nfrom jiant import tasks as tasks_module\nfrom jiant.tasks.tasks import (\n BooleanQuestionTask,\n CommitmentTask,\n COPATask,\n RTESuperGLUETask,\n WiCTask,\n WinogradCoreferenceTask,\n GLUEDiagnosticTask,\n)\nfrom jiant.tasks.qa import MultiRCTask, ReCoRDTask, QASRLTask\nfrom jiant.tasks.edge_probing import EdgeProbingTask\nfrom jiant.utils.utils import get_output_attribute\n\n\nLOG_INTERVAL = 30\n\n\ndef _format_preds(preds):\n if isinstance(preds, (list, torch.Tensor)):\n preds = _coerce_list(preds)\n assert isinstance(preds, list), \"Convert predictions to list!\"\n cols = {\"preds\": preds}\n elif isinstance(preds, dict):\n cols = {}\n for k, v in preds.items():\n cols[f\"preds_{k}\"] = _coerce_list(v)\n else:\n raise TypeError(type(preds))\n return cols\n\n\ndef _coerce_list(preds) -> List:\n if isinstance(preds, torch.Tensor):\n return preds.data.tolist()\n else:\n return list(preds)\n\n\ndef parse_write_preds_arg(write_preds_arg: str) -> List[str]:\n if write_preds_arg == 0:\n return []\n elif write_preds_arg == 1:\n return [\"test\"]\n else:\n return write_preds_arg.split(\",\")\n\n\ndef evaluate(\n model, tasks: Sequence[tasks_module.Task], batch_size: int, cuda_device, split=\"val\"\n) -> Tuple[Dict, pd.DataFrame]:\n \"\"\"Evaluate on a dataset\n {par,qst,ans}_idx are used for MultiRC and other question answering dataset\"\"\"\n FIELDS_TO_EXPORT = [\n \"idx\",\n \"sent1_str\",\n \"sent2_str\",\n \"labels\",\n \"pair_id\",\n \"psg_idx\",\n \"qst_idx\",\n \"ans_idx\",\n \"ans_str\",\n ]\n # Enforce that these tasks have the 'idx' field set.\n IDX_REQUIRED_TASK_NAMES = (\n tasks_module.ALL_GLUE_TASKS\n + tasks_module.ALL_SUPERGLUE_TASKS\n + tasks_module.ALL_COLA_NPI_TASKS\n )\n model.eval()\n iterator = BasicIterator(batch_size)\n\n all_metrics = {\"micro_avg\": 0.0, \"macro_avg\": 0.0}\n all_preds = {}\n n_examples_overall = 0 # n examples over all tasks\n assert len(tasks) > 0, \"Configured to evaluate, but specified no task to evaluate.\"\n\n for task in tasks:\n log.info(\"Evaluating on: %s, split: %s\", task.name, split)\n last_log = time.time()\n n_task_examples = 0\n task_preds = [] # accumulate DataFrames\n assert split in [\"train\", \"val\", \"test\"]\n generator = iterator(task.get_instance_iterable(split), num_epochs=1, shuffle=False)\n for batch_idx, batch in enumerate(generator):\n with torch.no_grad():\n if isinstance(cuda_device, int):\n batch = move_to_device(batch, cuda_device)\n out = model.forward(task=task, batch=batch, predict=True)\n if task is not None:\n task.update_metrics(out, batch)\n\n n_exs = get_output_attribute(out, \"n_exs\", cuda_device)\n # in multi-GPU mode n_exs is expected to be a tensor, w/ single-GPU an int is expected:\n if isinstance(n_exs, torch.Tensor):\n n_task_examples += n_exs.item()\n elif isinstance(n_exs, int):\n n_task_examples += n_exs\n else:\n raise ValueError(\"n_exs is type \" + type(n_exs) + \", int or Tensor is expected.\")\n\n # get predictions\n if \"preds\" not in out:\n continue\n out[\"preds\"] = task.handle_preds(out[\"preds\"], batch)\n cols = _format_preds(out[\"preds\"])\n if task.name in IDX_REQUIRED_TASK_NAMES:\n assert \"idx\" in batch, f\"'idx' field missing from batches \" \"for task {task.name}!\"\n for field in FIELDS_TO_EXPORT:\n if field in batch:\n cols[field] = _coerce_list(batch[field])\n\n # Transpose data using Pandas\n df = pd.DataFrame(cols)\n task_preds.append(df)\n\n if time.time() - last_log > LOG_INTERVAL:\n log.info(\"\\tTask %s: batch %d\", task.name, batch_idx)\n last_log = time.time()\n # task_preds will be a DataFrame with columns\n # ['preds'] + FIELDS_TO_EXPORT\n # for GLUE tasks, preds entries should be single scalars.\n # Update metrics\n task_metrics = task.get_metrics(reset=True)\n for name, value in task_metrics.items():\n all_metrics[\"%s_%s\" % (task.name, name)] = value\n\n # We don't want diagnostic tasks to affect the micro and macro average.\n # Accuracy on diagnostic tasks is hardcoded to 0 except for winogender-diagnostic.\n if task.contributes_to_aggregate_score:\n all_metrics[\"micro_avg\"] += all_metrics[task.val_metric] * n_task_examples\n all_metrics[\"macro_avg\"] += all_metrics[task.val_metric]\n n_examples_overall += n_task_examples\n\n if not task_preds:\n log.warning(\"Task %s: has no predictions!\", task.name)\n continue\n\n # Combine task_preds from each batch to a single DataFrame.\n task_preds = pd.concat(task_preds, ignore_index=True)\n\n # Store predictions, sorting by index if given.\n if \"idx\" in task_preds.columns:\n log.info(\"Task '%s': sorting predictions by 'idx'\", task.name)\n task_preds.sort_values(by=[\"idx\"], inplace=True)\n all_preds[task.name] = task_preds\n log.info(\"Finished evaluating on: %s\", task.name)\n\n # hack for diagnostics\n all_metrics[\"micro_avg\"] /= max(n_examples_overall, 1)\n all_metrics[\"macro_avg\"] /= len(tasks)\n\n return all_metrics, all_preds\n\n\ndef write_preds(\n tasks: Iterable[tasks_module.Task], all_preds, pred_dir, split_name, strict_glue_format=False\n) -> None:\n for task in tasks:\n if task.name not in all_preds:\n log.warning(\"Task '%s': missing predictions for split '%s'\", task.name, split_name)\n continue\n\n preds_df = all_preds[task.name]\n # Tasks that use _write_glue_preds:\n glue_style_tasks = (\n tasks_module.ALL_NLI_PROBING_TASKS\n + tasks_module.ALL_GLUE_TASKS\n + [\"wmt\"]\n + tasks_module.ALL_COLA_NPI_TASKS\n )\n\n if task.name in glue_style_tasks:\n # Strict mode: strict GLUE format (no extra cols)\n strict = strict_glue_format and task.name in tasks_module.ALL_GLUE_TASKS\n _write_glue_preds(task.name, preds_df, pred_dir, split_name, strict_glue_format=strict)\n elif isinstance(task, EdgeProbingTask):\n # Edge probing tasks, have structured output.\n _write_edge_preds(task, preds_df, pred_dir, split_name)\n elif isinstance(task, BooleanQuestionTask):\n _write_boolq_preds(\n task, preds_df, pred_dir, split_name, strict_glue_format=strict_glue_format\n )\n elif isinstance(task, CommitmentTask):\n _write_commitment_preds(\n task, preds_df, pred_dir, split_name, strict_glue_format=strict_glue_format\n )\n elif isinstance(task, COPATask):\n _write_copa_preds(\n task, preds_df, pred_dir, split_name, strict_glue_format=strict_glue_format\n )\n elif isinstance(task, MultiRCTask):\n _write_multirc_preds(\n task, preds_df, pred_dir, split_name, strict_glue_format=strict_glue_format\n )\n elif isinstance(task, RTESuperGLUETask):\n _write_rte_preds(\n task, preds_df, pred_dir, split_name, strict_glue_format=strict_glue_format\n )\n elif isinstance(task, ReCoRDTask):\n _write_record_preds(\n task, preds_df, pred_dir, split_name, strict_glue_format=strict_glue_format\n )\n elif isinstance(task, WiCTask):\n _write_wic_preds(\n task, preds_df, pred_dir, split_name, strict_glue_format=strict_glue_format\n )\n elif isinstance(task, WinogradCoreferenceTask):\n _write_winograd_preds(\n task, preds_df, pred_dir, split_name, strict_glue_format=strict_glue_format\n )\n elif isinstance(task, GLUEDiagnosticTask):\n # glue-diagnostic is caught above by being in ALL_GLUE_TASKS\n # currently this only catches superglue-diagnostic\n _write_diagnostics_preds(\n task, preds_df, pred_dir, split_name, strict_glue_format=strict_glue_format\n )\n elif isinstance(task, QASRLTask):\n _write_simple_tsv_preds(task, preds_df, pred_dir, split_name)\n else:\n log.warning(\"Task '%s' not supported by write_preds().\", task.name)\n continue\n log.info(\"Task '%s': Wrote predictions to %s\", task.name, pred_dir)\n log.info(\"Wrote all preds for split '%s' to %s\", split_name, pred_dir)\n return\n\n\n# Exact file names per task required by the GLUE evaluation server\nGLUE_NAME_MAP = {\n \"cola\": \"CoLA\",\n \"glue-diagnostic\": \"AX\",\n \"mnli-mm\": \"MNLI-mm\",\n \"mnli-m\": \"MNLI-m\",\n \"mrpc\": \"MRPC\",\n \"qnli\": \"QNLI\",\n \"qqp\": \"QQP\",\n \"rte\": \"RTE\",\n \"sst\": \"SST-2\",\n \"sts-b\": \"STS-B\",\n \"wnli\": \"WNLI\",\n}\n\n# Exact file names per task required by the SuperGLUE evaluation server\nSUPERGLUE_NAME_MAP = {\n \"boolq\": \"BoolQ\",\n \"commitbank\": \"CB\",\n \"copa\": \"COPA\",\n \"multirc\": \"MultiRC\",\n \"record\": \"ReCoRD\",\n \"rte-superglue\": \"RTE\",\n \"wic\": \"WiC\",\n \"winograd-coreference\": \"WSC\",\n \"broadcoverage-diagnostic\": \"AX-b\",\n \"winogender-diagnostic\": \"AX-g\",\n}\n\n\ndef _get_pred_filename(task_name, pred_dir, split_name, strict_glue_format):\n if strict_glue_format and task_name in GLUE_NAME_MAP:\n if split_name == \"test\":\n file = \"%s.tsv\" % (GLUE_NAME_MAP[task_name])\n else:\n file = \"%s_%s.tsv\" % (GLUE_NAME_MAP[task_name], split_name)\n elif strict_glue_format and task_name in SUPERGLUE_NAME_MAP:\n if split_name == \"test\":\n file = \"%s.jsonl\" % (SUPERGLUE_NAME_MAP[task_name])\n else:\n file = \"%s_%s.jsonl\" % (SUPERGLUE_NAME_MAP[task_name], split_name)\n else:\n file = \"%s_%s.tsv\" % (task_name, split_name)\n return os.path.join(pred_dir, file)\n\n\ndef _write_edge_preds(\n task: EdgeProbingTask,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n join_with_input: bool = True,\n):\n \"\"\" Write predictions for edge probing task.\n\n This reads the task data and joins with predictions,\n taking the 'idx' field to represent the line number in the (preprocessed)\n task data file.\n\n Predictions are saved as JSON with one record per line.\n \"\"\"\n preds_file = os.path.join(pred_dir, f\"{task.name}_{split_name}.json\")\n # Each row of 'preds' is a NumPy object, need to convert to list for\n # serialization.\n preds_df = preds_df.copy()\n preds_df[\"preds\"] = [a.tolist() for a in preds_df[\"preds\"]]\n if join_with_input:\n preds_df.set_index([\"idx\"], inplace=True)\n # Load input data and join by row index.\n log.info(\"Task '%s': joining predictions with input split '%s'\", task.name, split_name)\n records = task.get_split_text(split_name)\n # TODO: update this with more prediction types, when available.\n records = (\n task.merge_preds(r, {\"proba\": preds_df.at[i, \"preds\"]}) for i, r in enumerate(records)\n )\n else:\n records = (row.to_dict() for _, row in preds_df.iterrows())\n\n with open(preds_file, \"w\") as fd:\n for record in records:\n fd.write(json.dumps(record))\n fd.write(\"\\n\")\n\n\ndef _write_wic_preds(\n task: str,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n strict_glue_format: bool = False,\n):\n \"\"\" Write predictions for WiC task. \"\"\"\n pred_map = {0: \"false\", 1: \"true\"}\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format)\n with open(preds_file, \"w\", encoding=\"utf-8\") as preds_fh:\n for row_idx, row in preds_df.iterrows():\n if strict_glue_format:\n out_d = {\"idx\": row[\"idx\"], \"label\": pred_map[row[\"preds\"]]}\n else:\n out_d = row.to_dict()\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n\n\ndef _write_winograd_preds(\n task: str,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n strict_glue_format: bool = False,\n):\n \"\"\" Write predictions for Winograd Coreference task. \"\"\"\n pred_map = {0: \"False\", 1: \"True\"}\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format)\n with open(preds_file, \"w\", encoding=\"utf-8\") as preds_fh:\n for row_idx, row in preds_df.iterrows():\n if strict_glue_format:\n out_d = {\"idx\": int(row[\"idx\"]), \"label\": pred_map[row[\"preds\"]]}\n else:\n out_d = row.to_dict()\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n\n\ndef _write_boolq_preds(\n task: str,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n strict_glue_format: bool = False,\n):\n \"\"\" Write predictions for Boolean Questions task. \"\"\"\n pred_map = {0: \"false\", 1: \"true\"}\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format)\n with open(preds_file, \"w\", encoding=\"utf-8\") as preds_fh:\n for row_idx, row in preds_df.iterrows():\n if strict_glue_format:\n out_d = {\"idx\": int(row[\"idx\"]), \"label\": pred_map[row[\"preds\"]]}\n else:\n out_d = row.to_dict()\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n\n\ndef _write_commitment_preds(\n task: str,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n strict_glue_format: bool = False,\n):\n \"\"\" Write predictions for CommitmentBank task. \"\"\"\n pred_map = {0: \"neutral\", 1: \"entailment\", 2: \"contradiction\"}\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format)\n with open(preds_file, \"w\", encoding=\"utf-8\") as preds_fh:\n for row_idx, row in preds_df.iterrows():\n if strict_glue_format:\n out_d = {\"idx\": row[\"idx\"], \"label\": pred_map[row[\"preds\"]]}\n else:\n out_d = row.to_dict()\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n\n\ndef _write_copa_preds(\n task, preds_df: pd.DataFrame, pred_dir: str, split_name: str, strict_glue_format: bool = False\n):\n \"\"\" Write COPA predictions to JSONL \"\"\"\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format)\n with open(preds_file, \"w\", encoding=\"utf-8\") as preds_fh:\n for row_idx, row in preds_df.iterrows():\n if strict_glue_format:\n out_d = {\"idx\": int(row[\"idx\"]), \"label\": int(row[\"preds\"])}\n else:\n out_d = row.to_dict()\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n\n\ndef _write_multirc_preds(\n task: str,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n strict_glue_format: bool = False,\n):\n \"\"\" Write predictions for MultiRC task. \"\"\"\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format)\n with open(preds_file, \"w\", encoding=\"utf-8\") as preds_fh:\n if strict_glue_format:\n par_qst_ans_d = defaultdict(lambda: defaultdict(list))\n for row_idx, row in preds_df.iterrows():\n ans_d = {\"idx\": int(row[\"ans_idx\"]), \"label\": int(row[\"preds\"])}\n par_qst_ans_d[int(row[\"psg_idx\"])][int(row[\"qst_idx\"])].append(ans_d)\n for par_idx, qst_ans_d in par_qst_ans_d.items():\n qst_ds = []\n for qst_idx, answers in qst_ans_d.items():\n qst_d = {\"idx\": qst_idx, \"answers\": answers}\n qst_ds.append(qst_d)\n out_d = {\"idx\": par_idx, \"passage\": {\"questions\": qst_ds}}\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n else:\n for row_idx, row in preds_df.iterrows():\n out_d = row.to_dict()\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n\n\ndef _write_record_preds(\n task: str,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n strict_glue_format: bool = False,\n):\n \"\"\" Write predictions for ReCoRD task. \"\"\"\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format)\n with open(preds_file, \"w\", encoding=\"utf-8\") as preds_fh:\n if strict_glue_format:\n par_qst_ans_d = defaultdict(lambda: defaultdict(list))\n for row_idx, row in preds_df.iterrows():\n ans_d = {\n \"idx\": int(row[\"ans_idx\"]),\n \"str\": row[\"ans_str\"],\n \"logit\": torch.FloatTensor(row[\"preds\"]),\n }\n par_qst_ans_d[row[\"psg_idx\"]][row[\"qst_idx\"]].append(ans_d)\n for par_idx, qst_ans_d in par_qst_ans_d.items():\n for qst_idx, ans_ds in qst_ans_d.items():\n\n # get prediction\n logits_and_anss = [(d[\"logit\"], d[\"str\"]) for d in ans_ds]\n logits_and_anss.sort(key=lambda x: x[1])\n logits, anss = list(zip(*logits_and_anss))\n pred_idx = torch.softmax(torch.stack(logits), dim=-1)[:, -1].argmax().item()\n answer = anss[pred_idx]\n\n # write out answer\n qst_d = {\"idx\": qst_idx, \"label\": answer}\n preds_fh.write(\"{0}\\n\".format(json.dumps(qst_d)))\n else:\n for row_idx, row in preds_df.iterrows():\n out_d = row.to_dict()\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n\n\ndef _write_rte_preds(\n task: str,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n strict_glue_format: bool = False,\n):\n \"\"\" Write predictions for RTE task in SuperGLUE prediction format. \"\"\"\n trg_map = {0: \"not_entailment\", 1: \"entailment\"}\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format)\n with open(preds_file, \"w\", encoding=\"utf-8\") as preds_fh:\n for row_idx, row in preds_df.iterrows():\n if strict_glue_format:\n out_d = {\"idx\": row[\"idx\"], \"label\": trg_map[row[\"preds\"]]}\n else:\n out_d = row.to_dict()\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n\n\ndef _write_simple_tsv_preds(task, preds_df: pd.DataFrame, pred_dir: str, split_name: str):\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format=False)\n preds_df.to_csv(preds_file, sep=\"\\t\")\n\n\ndef _write_diagnostics_preds(\n task: str,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n strict_glue_format: bool = False,\n):\n \"\"\" Write predictions for GLUE/SuperGLUE diagnostics task. \"\"\"\n\n if task.n_classes == 2:\n pred_map = {0: \"not_entailment\", 1: \"entailment\"}\n elif task.n_classes == 3:\n pred_map = {0: \"neutral\", 1: \"entailment\", 2: \"contradiction\"}\n else:\n raise ValueError(\"Invalid number of output classes detected\")\n\n preds_file = _get_pred_filename(task.name, pred_dir, split_name, strict_glue_format)\n with open(preds_file, \"w\", encoding=\"utf-8\") as preds_fh:\n for row_idx, row in preds_df.iterrows():\n if strict_glue_format:\n out_d = {\"idx\": row[\"idx\"], \"label\": pred_map[row[\"preds\"]]}\n else:\n out_d = row.to_dict()\n preds_fh.write(\"{0}\\n\".format(json.dumps(out_d)))\n\n\ndef _write_glue_preds(\n task_name: str,\n preds_df: pd.DataFrame,\n pred_dir: str,\n split_name: str,\n strict_glue_format: bool = False,\n):\n \"\"\" Write predictions to separate files located in pred_dir.\n We write special code to handle various GLUE tasks.\n\n Use strict_glue_format to guarantee compatibility with GLUE website.\n\n Args:\n task_name: task name\n preds_df: predictions DataFrame for a single task, as returned by\n evaluate().\n pred_dir: directory to write predictions\n split_name: name of this split ('train', 'val', or 'test')\n strict_glue_format: if true, writes format compatible with GLUE\n website.\n \"\"\"\n\n def _apply_pred_map(preds_df, pred_map, key=\"prediction\"):\n \"\"\" Apply preds_map, in-place. \"\"\"\n preds_df[key] = [pred_map[p] for p in preds_df[key]]\n\n def _write_preds_with_pd(preds_df: pd.DataFrame, pred_file: str, write_type=int):\n \"\"\" Write TSV file in GLUE format, using Pandas. \"\"\"\n\n required_cols = [\"index\", \"prediction\"]\n if strict_glue_format:\n cols_to_write = required_cols\n quoting = QUOTE_NONE\n log.info(\n \"Task '%s', split '%s': writing %s in \" \"strict GLUE format.\",\n task_name,\n split_name,\n pred_file,\n )\n else:\n all_cols = set(preds_df.columns)\n # make sure we write index and prediction as first columns,\n # then all the other ones we can find.\n cols_to_write = required_cols + sorted(list(all_cols.difference(required_cols)))\n quoting = QUOTE_MINIMAL\n preds_df.to_csv(\n pred_file,\n sep=\"\\t\",\n index=False,\n float_format=\"%.3f\",\n quoting=quoting,\n columns=cols_to_write,\n )\n\n if len(preds_df) == 0: # catch empty lists\n log.warning(\"Task '%s': predictions are empty!\", task_name)\n return\n\n def _add_default_column(df, name: str, val):\n \"\"\" Ensure column exists and missing values = val. \"\"\"\n if name not in df:\n df[name] = val\n df[name].fillna(value=val, inplace=True)\n\n preds_df = preds_df.copy()\n _add_default_column(preds_df, \"idx\", -1)\n _add_default_column(preds_df, \"sent1_str\", \"\")\n _add_default_column(preds_df, \"sent2_str\", \"\")\n _add_default_column(preds_df, \"labels\", -1)\n # Rename columns to match output headers.\n preds_df.rename(\n {\n \"idx\": \"index\",\n \"preds\": \"prediction\",\n \"sent1_str\": \"sentence_1\",\n \"sent2_str\": \"sentence_2\",\n \"labels\": \"true_label\",\n },\n axis=\"columns\",\n inplace=True,\n )\n\n if task_name == \"mnli\" and split_name == \"test\": # 9796 + 9847 = 19643\n assert len(preds_df) == 19643, \"Missing predictions for MNLI!\"\n log.info(\"There are %d examples in MNLI, 19643 were expected\", len(preds_df))\n # Sort back to original order to split matched and mismatched, which are\n # treated as a single dataset by jiant.\n preds_df.sort_index(inplace=True)\n pred_map = {0: \"neutral\", 1: \"entailment\", 2: \"contradiction\"}\n _apply_pred_map(preds_df, pred_map, \"prediction\")\n _write_preds_with_pd(\n preds_df.iloc[:9796],\n _get_pred_filename(\"mnli-m\", pred_dir, split_name, strict_glue_format),\n )\n _write_preds_with_pd(\n preds_df.iloc[9796:],\n _get_pred_filename(\"mnli-mm\", pred_dir, split_name, strict_glue_format),\n )\n elif task_name in [\"rte\", \"qnli\"]:\n pred_map = {0: \"not_entailment\", 1: \"entailment\"}\n _apply_pred_map(preds_df, pred_map, \"prediction\")\n _write_preds_with_pd(\n preds_df, _get_pred_filename(task_name, pred_dir, split_name, strict_glue_format)\n )\n elif task_name in [\"sts-b\"]:\n preds_df[\"prediction\"] = [min(max(0.0, pred * 5.0), 5.0) for pred in preds_df[\"prediction\"]]\n _write_preds_with_pd(\n preds_df,\n _get_pred_filename(task_name, pred_dir, split_name, strict_glue_format),\n write_type=float,\n )\n elif task_name in [\"wmt\"]:\n # convert each prediction to a single string if we find a list of\n # tokens\n if isinstance(preds_df[\"prediction\"][0], list):\n assert isinstance(preds_df[\"prediction\"][0][0], str)\n preds_df[\"prediction\"] = [\" \".join(pred) for pred in preds_df[\"prediction\"]]\n _write_preds_with_pd(\n preds_df,\n _get_pred_filename(task_name, pred_dir, split_name, strict_glue_format),\n write_type=str,\n )\n else:\n _write_preds_with_pd(\n preds_df,\n _get_pred_filename(task_name, pred_dir, split_name, strict_glue_format),\n write_type=int,\n )\n\n log.info(\"Wrote predictions for task: %s\", task_name)\n\n\ndef write_results(results, results_file, run_name):\n \"\"\" Aggregate results by appending results to results_file \"\"\"\n all_metrics_str = \", \".join([\"%s: %.3f\" % (metric, score) for metric, score in results.items()])\n with open(results_file, \"a\") as results_fh:\n results_fh.write(\"%s\\t%s\\n\" % (run_name, all_metrics_str))\n log.info(all_metrics_str)\n" ]
[ [ "torch.stack", "pandas.DataFrame", "torch.no_grad", "torch.FloatTensor", "pandas.concat" ] ]
hengwei-chan/3D_SBDD
[ "eda6d51aaf01ef25581a46920a25161678fab76d" ]
[ "models/sample.py" ]
[ "import random\nimport torch\nimport numpy as np\nfrom torch.nn import functional as F\nfrom torch_geometric.data import Batch\nfrom tqdm.auto import tqdm\nfrom sklearn.cluster import DBSCAN, KMeans, OPTICS\n\nfrom .common import split_tensor_by_batch, concat_tensors_to_batch\n\n\nDEFAULT_FOLLOW_BATCH = ['protein_element', 'ligand_context_element',]\n\n\ndef uniform_ball_sample(num_points, r_min, r_max, device):\n phi = torch.rand(size=(num_points, 1), device=device) * (2*np.pi)\n costheta = torch.rand(size=(num_points, 1), device=device) * 2 - 1\n u = (r_max**3 - r_min**3) * torch.rand(size=(num_points, 1), device=device) + r_min**3\n\n theta = torch.arccos(costheta)\n r = u**(1/3)\n\n samples = torch.cat([\n r * torch.sin(theta) * torch.cos(phi),\n r * torch.sin(theta) * torch.sin(phi),\n r * torch.cos(theta),\n ], dim=1)\n return samples\n \n\ndef filter_too_close_points(x, y, r):\n \"\"\"\n Filter out points in `x` which are too close to some point in `y`\n Args:\n x: (N, 3)\n y: (M, 3)\n \"\"\"\n dist = torch.norm(x.unsqueeze(1) - y.unsqueeze(0), dim=-1) # (N, M)\n mask = (dist > r).all(dim=-1)\n return x[mask]\n\n\ndef sample_init(batch, model, num_points=1000, noise_std=2.0, follow_batch=DEFAULT_FOLLOW_BATCH):\n \"\"\"\n Sample `num_points` positions which are more likely to have atoms for each graph.\n \"\"\"\n\n device = batch.protein_pos.device\n data_list = batch.to_data_list()\n for i, data in enumerate(data_list):\n data._internal = i\n\n # Random starting points\n pos_query = []\n batch_query = []\n for i, data in enumerate(data_list):\n pos_query_modes = torch.cat([data.ligand_context_pos, data.protein_pos], dim=0) # (N_ctx+N_prot, 3)\n mode_idx = np.random.choice(np.arange(pos_query_modes.size(0)), size=[num_points, ])\n pos_query_modes = pos_query_modes[mode_idx]\n # Append to `pos_query` and `batch_query`\n pos_query.append(pos_query_modes + torch.randn_like(pos_query_modes) * noise_std)\n batch_query.append(torch.ones([pos_query_modes.size(0)], device=device).long() * i)\n pos_query = torch.cat(pos_query, dim=0)\n batch_query = torch.cat(batch_query, dim=0)\n\n pos_results = [None] * batch.num_graphs\n y_cls_results = [None] * batch.num_graphs\n y_ind_results = [None] * batch.num_graphs\n num_finished = 0\n # Start sampling points\n with torch.no_grad():\n while len(data_list) > 0:\n batch = Batch.from_data_list(data_list, follow_batch=follow_batch)\n # print('InternalID:', batch._internal, )\n y_cls, y_ind = model(\n pos_query = pos_query,\n protein_pos = batch.protein_pos,\n protein_atom_feature = batch.protein_atom_feature.float(),\n ligand_pos = batch.ligand_context_pos,\n ligand_atom_feature = batch.ligand_context_feature_full.float(), \n batch_query = batch_query,\n batch_protein = batch.protein_element_batch,\n batch_ligand = batch.ligand_context_element_batch,\n )\n\n has_atom = (y_cls.logsumexp(-1) > 0).cpu()\n batch_result = batch_query[has_atom]\n pos_result_list = split_tensor_by_batch(pos_query[has_atom], batch_result)\n y_cls_result_list = split_tensor_by_batch(y_cls[has_atom], batch_result)\n y_ind_result_list = split_tensor_by_batch(y_ind[has_atom], batch_result)\n\n pos_query_next = []\n data_list_next = []\n for i in range(len(pos_result_list)):\n if pos_result_list[i].size(0) >= num_points:\n idx = data_list[i]._internal\n pos_results[idx] = pos_result_list[i][:num_points]\n y_cls_results[idx] = y_cls_result_list[i][:num_points]\n y_ind_results[idx] = y_ind_result_list[i][:num_points]\n num_finished += 1\n # print('Finished: %d' % idx)\n else:\n pos_next = pos_result_list[i].repeat(2, 1)\n noise = torch.randn_like(pos_next) * noise_std\n noise[:pos_result_list[i].size(0)] = 0\n pos_next = pos_next + noise\n pos_query_next.append(pos_next[:num_points])\n data_list_next.append(data_list[i])\n \n data_list = data_list_next\n if len(data_list) > 0:\n pos_query, batch_query = concat_tensors_to_batch(pos_query_next)\n # print('Next PosQuery:', [p.size() for p in pos_query_next])\n # print('DataList Length:', len(data_list))\n # else:\n # print('Ending')\n\n batch._internal_id = None\n return pos_results, y_cls_results, y_ind_results\n\n\ndef sample_bonded(batch, model, mask=None, num_points_per_mode=100, d_min=0.9, d_max=1.5, follow_batch=DEFAULT_FOLLOW_BATCH):\n device = batch.protein_pos.device\n data_list = batch.to_data_list()\n \n if mask is not None:\n mask_list = split_tensor_by_batch(mask, batch.ligand_context_element_batch)\n\n pos_results = []\n y_results = []\n\n with torch.no_grad():\n for i, data in enumerate(data_list):\n pos_centroids = data.ligand_context_pos # (N_l, 3)\n\n if mask is not None:\n pos_centroids = pos_centroids[mask_list[i]]\n\n # Mask non-frontiers if needed\n assert pos_centroids.size(0) > 0\n noise = uniform_ball_sample(pos_centroids.size(0)*num_points_per_mode, d_min, d_max, device=device)\n pos_query = torch.repeat_interleave(pos_centroids, repeats=num_points_per_mode, dim=0) + noise # (N*n, 3)\n pos_query = filter_too_close_points(pos_query, pos_centroids, r=d_min)\n batch_query = torch.zeros([pos_query.size(0)], device=device, dtype=torch.long)\n\n # print(pos_query.size(), data.protein_pos.size(), data.ligand_context_pos.size())\n batch_protein = torch.zeros([data.protein_pos.size(0)], device=device, dtype=torch.long)\n batch_ligand = torch.zeros([data.ligand_context_pos.size(0)], device=device, dtype=torch.long)\n\n y, _ = model(\n pos_query = pos_query,\n protein_pos = data.protein_pos, # Note: not batch-wise\n protein_atom_feature = data.protein_atom_feature.float(),\n ligand_pos = data.ligand_context_pos,\n ligand_element = data.ligand_context_element,\n batch_query = batch_query,\n # batch_protein = data.protein_element_batch,\n # batch_ligand = data.ligand_context_element_batch,\n batch_protein = batch_protein,\n batch_ligand = batch_ligand,\n )\n\n has_atom = (y.logsumexp(-1) > 0).cpu()\n pos_results.append(pos_query[has_atom])\n y_results.append(y[has_atom])\n\n return pos_results, y_results\n\n\ndef sample_refine(batch, model, pos_results, num_iters=10, noise_std=0.1):\n \"\"\"\n Refine samples by discarding low-probability half each iteration.\n Args:\n pos_results: List of position matrices.\n \"\"\"\n pos_list = pos_results\n\n with torch.no_grad():\n for it in range(num_iters):\n pos_query, batch_query = concat_tensors_to_batch(pos_results)\n y_cls, y_ind = model(\n pos_query = pos_query,\n protein_pos = batch.protein_pos,\n protein_atom_feature = batch.protein_atom_feature.float(),\n ligand_pos = batch.ligand_context_pos,\n ligand_atom_feature = batch.ligand_context_feature_full.float(), \n batch_query = batch_query,\n batch_protein = batch.protein_element_batch,\n batch_ligand = batch.ligand_context_element_batch,\n )\n y_cls_results = split_tensor_by_batch(y_cls, batch_query)\n y_ind_results = split_tensor_by_batch(y_ind, batch_query)\n energy_list = split_tensor_by_batch(-y_cls.logsumexp(-1), batch_query)\n\n if it < num_iters - 1:\n pos_next = []\n for i in range(batch.num_graphs):\n energy = energy_list[i]\n top_idx = energy.argsort()[:energy.size(0) // 2]\n top_energy = energy[:energy.size(0) // 2]\n pos_top = pos_results[i][top_idx]\n pos_next.append(torch.cat([\n pos_top,\n pos_top + torch.randn_like(pos_top) * noise_std,\n ]))\n pos_results = pos_next\n \n return pos_results, y_cls_results, y_ind_results\n\n\ndef sample_refine_split(batch, model, pos_results, num_iters=10, noise_std=0.1, num_clusters=4):\n \"\"\"\n Refine samples by discarding low-probability half each iteration.\n Args:\n pos_results: List of position matrices.\n \"\"\"\n pos_list = pos_results\n\n with torch.no_grad():\n for it in range(num_iters):\n pos_query, batch_query = concat_tensors_to_batch(pos_results)\n y, _ = model(\n pos_query = pos_query,\n protein_pos = batch.protein_pos,\n protein_atom_feature = batch.protein_atom_feature.float(),\n ligand_pos = batch.ligand_context_pos,\n ligand_atom_feature = batch.ligand_context_feature_full.float(), \n batch_query = batch_query,\n batch_protein = batch.protein_element_batch,\n batch_ligand = batch.ligand_context_element_batch,\n )\n y_list = split_tensor_by_batch(y, batch_query)\n energy_list = split_tensor_by_batch(-y.logsumexp(-1), batch_query)\n\n pos_next = []\n y_results = []\n for i in range(batch.num_graphs):\n clusterer = KMeans(n_clusters=num_clusters)\n clusterer.fit(pos_results[i].clone().cpu().numpy())\n pos_top, y_top = [], []\n for j in range(clusterer.labels_.max() + 1):\n # Cluster mask\n cmask = (clusterer.labels_ == j)\n # Position\n pos_cluster = pos_results[i][cmask]\n energy = energy_list[i][cmask]\n top_idx = energy.argsort()[:energy.size(0) // 2]\n top_energy = energy[:energy.size(0) // 2]\n pos_top_cluster = pos_cluster[top_idx]\n pos_top.append(pos_top_cluster)\n # Y: atom type\n y_cluster = y_list[i][cmask]\n y_top.append(y_cluster[top_idx])\n\n pos_top = torch.cat(pos_top, dim=0)\n y_top = torch.cat(y_top, dim=0)\n\n if it < num_iters - 1:\n pos_next.append(torch.cat([\n pos_top,\n pos_top + torch.randn_like(pos_top) * noise_std,\n ]))\n else:\n pos_next.append(pos_top)\n y_results.append(y_top)\n pos_results = pos_next\n \n return pos_results, y_results\n\n\ndef cluster_and_select_best(pos_results, y_cls_results, y_ind_results, eps=0.2):\n \"\"\"\n Args:\n pos_results: List of position tensors.\n y_results: List of `y` tensors.\n \"\"\"\n num_graphs = len(pos_results)\n num_types = y_cls_results[0].size(1)\n pos_selected = []\n y_cls_selected = []\n y_ind_selected = []\n type_selected = []\n for i in range(num_graphs):\n clustering = DBSCAN(eps=eps, min_samples=1).fit(pos_results[i].clone().detach().cpu().numpy())\n num_clusters = clustering.labels_.max() + 1\n\n # print(pos_results[i].size(), y_results[i].size(), clustering.labels_.shape)\n\n pos_cluster = []\n y_cls_cluster = []\n y_ind_cluster = []\n type_cluster = []\n for clus_id in range(num_clusters):\n \n clus_pos = pos_results[i][clustering.labels_ == clus_id]\n clus_y_cls = y_cls_results[i][clustering.labels_ == clus_id]\n clus_y_ind = y_ind_results[i][clustering.labels_ == clus_id]\n type_id = clus_y_cls[clus_y_cls.argmax(0), range(num_types)].argmax().view(1)\n type_cluster.append(type_id)\n point_id = clus_y_cls.argmax(0)[type_id]\n pos_cluster.append(clus_pos[point_id].view(1, 3))\n y_cls_cluster.append(clus_y_cls[point_id].view(1, -1))\n y_ind_cluster.append(clus_y_ind[point_id].view(1, -1))\n\n if len(pos_cluster) > 0:\n pos_selected.append(torch.cat(pos_cluster, dim=0))\n y_cls_selected.append(torch.cat(y_cls_cluster, dim=0))\n y_ind_selected.append(torch.cat(y_ind_cluster, dim=0))\n type_selected.append(torch.cat(type_cluster, dim=0))\n else:\n pos_selected.append(None)\n y_cls_selected.append(None)\n y_ind_selected.append(None)\n type_selected.append(None)\n\n return pos_selected, y_cls_selected, y_ind_selected, type_selected\n\n\ndef sample_frontier_and_bond(batch, model):\n ligand_context_element_list = split_tensor_by_batch(\n batch.ligand_context_element,\n batch = batch.ligand_context_element_batch,\n )\n query_bond = []\n query_bond_list = []\n query_bond_batch = []\n cum_nodes = 0\n for i, element in enumerate(ligand_context_element_list):\n num_prev_nodes = element.size(0) - 1\n bond_index = torch.cat([\n torch.full([1, num_prev_nodes], num_prev_nodes, device=element.device, dtype=torch.long),\n torch.arange(num_prev_nodes, device=element.device).view(1, -1),\n ], dim=0)\n query_bond_list.append(bond_index)\n query_bond.append(bond_index + cum_nodes)\n query_bond_batch.append(torch.full([bond_index.size(1)], i))\n cum_nodes += element.size(0)\n\n query_bond = torch.cat(query_bond, dim=1)\n query_bond_batch = torch.cat(query_bond_batch, dim=0)\n\n y_frontier, y_bond = model(\n protein_pos = batch.protein_pos,\n protein_atom_feature = batch.protein_atom_feature.float(),\n ligand_pos = batch.ligand_context_pos,\n ligand_atom_feature = batch.ligand_context_feature_full.float(), \n query_bond_index = query_bond,\n batch_protein = batch.protein_element_batch,\n batch_ligand = batch.ligand_context_element_batch,\n )\n\n y_frontier = split_tensor_by_batch(y_frontier, batch.ligand_context_element_batch)\n\n y_bond = split_tensor_by_batch(y_bond, query_bond_batch, num_graphs=batch.num_graphs)\n\n return y_frontier, y_bond, query_bond_list\n\n\ndef add_ligand_atom_to_data(data, pos, atom_type, y_ind, type_map=[1,6,7,8,9,15,16,17]):\n \"\"\"\n \"\"\"\n data = data.clone()\n\n data.ligand_context_pos = torch.cat([\n data.ligand_context_pos, pos.view(1, 3).to(data.ligand_context_pos)\n ], dim=0)\n\n data.ligand_context_feature_full = torch.cat([\n data.ligand_context_feature_full,\n torch.cat([\n F.one_hot(atom_type.view(1), len(type_map)).to(data.ligand_context_feature_full), # (1, num_elements)\n # y_ind.sigmoid().bernoulli().to(data.ligand_context_feature_full).view(1, -1), # (n, num_indicators)\n (y_ind >= 0).to(data.ligand_context_feature_full).view(1, -1), # (n, num_indicators)\n ], dim=1)\n ], dim=0)\n\n element = torch.LongTensor([type_map[atom_type.item()]])\n data.ligand_context_element = torch.cat([\n data.ligand_context_element, element.view(1).to(data.ligand_context_element)\n ])\n\n return data\n \n\ndef enum_conbination(options, limit=3):\n if limit == 0:\n return\n for i in range(len(options)):\n yield [options[i], ]\n for item_j in enum_conbination(options[i+1:], limit-1):\n yield [options[i], ] + item_j\n\n\ndef get_next_step_comb(batch, pos_selected, y_selected, type_selected, type_map=[1,6,7,8,9,15,16,17], follow_batch=DEFAULT_FOLLOW_BATCH, num_data_limit=20, max_next_atoms=1, dist_thres=0.9):\n \"\"\"\n \"\"\"\n data_list = batch.to_data_list()\n results = []\n for i in range(len(data_list)):\n pos_next, y_next, type_next = pos_selected[i], y_selected[i], type_selected[i]\n if pos_next is None:\n results.append(data_list[i])\n continue\n # print(pos_next, y_next)\n\n for pos_comb in enum_conbination(list(range(pos_next.size(0))), max_next_atoms):\n\n if len(pos_comb) > 1:\n pdist = torch.norm(pos_next[pos_comb].view(1, -1, 3) - pos_next[pos_comb].view(-1, 1, 3), dim=-1, p=2)\n row, col = torch.triu_indices(pdist.size(0), pdist.size(1), offset=1)\n if pdist[row, col].min() < dist_thres:\n continue \n\n data_new = data_list[i]\n for j in pos_comb:\n data_new = add_ligand_atom_to_data(\n data_new,\n pos = pos_next[j],\n atom_type = type_next[j],\n type_map = type_map\n )\n\n energy_next = -1 * y_next[j][type_next[j]].item()\n if 'total_energy' not in data_new:\n data_new.total_energy = energy_next\n else:\n data_new.total_energy += energy_next\n data_new.average_energy = data_new.total_energy / data_new.ligand_context_pos.size(0)\n results.append(data_new)\n\n # results.sort(key=lambda data: data.average_energy)\n random.shuffle(results)\n results = results[:num_data_limit]\n return results\n\n\ndef get_next_step(batch, pos_selected, y_cls_selected, y_ind_selected, type_selected, type_map=[1,6,7,8,9,15,16,17], follow_batch=DEFAULT_FOLLOW_BATCH, num_data_limit=20):\n \"\"\"\n \"\"\"\n data_list = batch.to_data_list()\n results = []\n for i in range(len(data_list)):\n pos_next, y_cls_next, y_ind_next, type_next = pos_selected[i], y_cls_selected[i], y_ind_selected[i], type_selected[i]\n if pos_next is None:\n results.append(data_list[i])\n continue\n\n for j in range(len(pos_next)):\n data_new = add_ligand_atom_to_data(\n data_list[i],\n pos = pos_next[j],\n atom_type = type_next[j],\n y_ind = y_ind_next[j],\n type_map = type_map\n )\n\n logp = y_cls_next[j][type_next[j]].item() \n if 'logp_history' not in data_new:\n data_new.logp_history = [logp]\n else:\n data_new.logp_history.append(logp)\n data_new.total_logp = np.sum(data_new.logp_history)\n data_new.average_logp = np.mean(data_new.logp_history)\n results.append(data_new)\n\n # results.sort(key=lambda data: -1 * data.average_logp)\n random.shuffle(results)\n results = results[:num_data_limit]\n return results\n" ]
[ [ "torch.rand", "torch.cat", "torch.cos", "torch.arccos", "torch.sin", "torch.arange", "numpy.sum", "torch.no_grad", "sklearn.cluster.KMeans", "torch.repeat_interleave", "numpy.mean", "torch.randn_like", "sklearn.cluster.DBSCAN", "torch.full" ] ]
iamDecode/scipy
[ "ed587fb097eeceb1cc8c804a659cc18347b28f22" ]
[ "scipy/optimize/_minimize.py" ]
[ "\"\"\"\nUnified interfaces to minimization algorithms.\n\nFunctions\n---------\n- minimize : minimization of a function of several variables.\n- minimize_scalar : minimization of a function of one variable.\n\"\"\"\n\n__all__ = ['minimize', 'minimize_scalar']\n\n\nfrom warnings import warn\n\nimport numpy as np\n\n\n# unconstrained minimization\nfrom ._optimize import (_minimize_neldermead, _minimize_powell, _minimize_cg,\n _minimize_bfgs, _minimize_newtoncg,\n _minimize_scalar_brent, _minimize_scalar_bounded,\n _minimize_scalar_golden, MemoizeJac)\nfrom ._trustregion_dogleg import _minimize_dogleg\nfrom ._trustregion_ncg import _minimize_trust_ncg\nfrom ._trustregion_krylov import _minimize_trust_krylov\nfrom ._trustregion_exact import _minimize_trustregion_exact\nfrom ._trustregion_constr import _minimize_trustregion_constr\n\n# constrained minimization\nfrom ._lbfgsb_py import _minimize_lbfgsb\nfrom ._tnc import _minimize_tnc\nfrom ._cobyla_py import _minimize_cobyla\nfrom ._slsqp_py import _minimize_slsqp\nfrom ._constraints import (old_bound_to_new, new_bounds_to_old,\n old_constraint_to_new, new_constraint_to_old,\n NonlinearConstraint, LinearConstraint, Bounds)\nfrom ._differentiable_functions import FD_METHODS\n\nMINIMIZE_METHODS = ['nelder-mead', 'powell', 'cg', 'bfgs', 'newton-cg',\n 'l-bfgs-b', 'tnc', 'cobyla', 'slsqp', 'trust-constr',\n 'dogleg', 'trust-ncg', 'trust-exact', 'trust-krylov']\n\nMINIMIZE_SCALAR_METHODS = ['brent', 'bounded', 'golden']\n\ndef minimize(fun, x0, args=(), method=None, jac=None, hess=None,\n hessp=None, bounds=None, constraints=(), tol=None,\n callback=None, options=None):\n \"\"\"Minimization of scalar function of one or more variables.\n\n Parameters\n ----------\n fun : callable\n The objective function to be minimized.\n\n ``fun(x, *args) -> float``\n\n where ``x`` is an 1-D array with shape (n,) and ``args``\n is a tuple of the fixed parameters needed to completely\n specify the function.\n x0 : ndarray, shape (n,)\n Initial guess. Array of real elements of size (n,),\n where ``n`` is the number of independent variables.\n args : tuple, optional\n Extra arguments passed to the objective function and its\n derivatives (`fun`, `jac` and `hess` functions).\n method : str or callable, optional\n Type of solver. Should be one of\n\n - 'Nelder-Mead' :ref:`(see here) <optimize.minimize-neldermead>`\n - 'Powell' :ref:`(see here) <optimize.minimize-powell>`\n - 'CG' :ref:`(see here) <optimize.minimize-cg>`\n - 'BFGS' :ref:`(see here) <optimize.minimize-bfgs>`\n - 'Newton-CG' :ref:`(see here) <optimize.minimize-newtoncg>`\n - 'L-BFGS-B' :ref:`(see here) <optimize.minimize-lbfgsb>`\n - 'TNC' :ref:`(see here) <optimize.minimize-tnc>`\n - 'COBYLA' :ref:`(see here) <optimize.minimize-cobyla>`\n - 'SLSQP' :ref:`(see here) <optimize.minimize-slsqp>`\n - 'trust-constr':ref:`(see here) <optimize.minimize-trustconstr>`\n - 'dogleg' :ref:`(see here) <optimize.minimize-dogleg>`\n - 'trust-ncg' :ref:`(see here) <optimize.minimize-trustncg>`\n - 'trust-exact' :ref:`(see here) <optimize.minimize-trustexact>`\n - 'trust-krylov' :ref:`(see here) <optimize.minimize-trustkrylov>`\n - custom - a callable object (added in version 0.14.0),\n see below for description.\n\n If not given, chosen to be one of ``BFGS``, ``L-BFGS-B``, ``SLSQP``,\n depending on whether or not the problem has constraints or bounds.\n jac : {callable, '2-point', '3-point', 'cs', bool}, optional\n Method for computing the gradient vector. Only for CG, BFGS,\n Newton-CG, L-BFGS-B, TNC, SLSQP, dogleg, trust-ncg, trust-krylov,\n trust-exact and trust-constr.\n If it is a callable, it should be a function that returns the gradient\n vector:\n\n ``jac(x, *args) -> array_like, shape (n,)``\n\n where ``x`` is an array with shape (n,) and ``args`` is a tuple with\n the fixed parameters. If `jac` is a Boolean and is True, `fun` is\n assumed to return a tuple ``(f, g)`` containing the objective\n function and the gradient.\n Methods 'Newton-CG', 'trust-ncg', 'dogleg', 'trust-exact', and\n 'trust-krylov' require that either a callable be supplied, or that\n `fun` return the objective and gradient.\n If None or False, the gradient will be estimated using 2-point finite\n difference estimation with an absolute step size.\n Alternatively, the keywords {'2-point', '3-point', 'cs'} can be used\n to select a finite difference scheme for numerical estimation of the\n gradient with a relative step size. These finite difference schemes\n obey any specified `bounds`.\n hess : {callable, '2-point', '3-point', 'cs', HessianUpdateStrategy}, optional\n Method for computing the Hessian matrix. Only for Newton-CG, dogleg,\n trust-ncg, trust-krylov, trust-exact and trust-constr. If it is\n callable, it should return the Hessian matrix:\n\n ``hess(x, *args) -> {LinearOperator, spmatrix, array}, (n, n)``\n\n where ``x`` is a (n,) ndarray and ``args`` is a tuple with the fixed\n parameters. LinearOperator and sparse matrix returns are only allowed\n for 'trust-constr' method. Alternatively (not available for Newton-CG\n or dogleg), the keywords {'2-point', '3-point', 'cs'} select a finite\n difference scheme for numerical estimation. Or, objects implementing\n the `HessianUpdateStrategy` interface can be used to approximate the\n Hessian. Available quasi-Newton methods implementing this interface\n are:\n\n - `BFGS`;\n - `SR1`.\n\n Whenever the gradient is estimated via finite-differences,\n the Hessian cannot be estimated with options\n {'2-point', '3-point', 'cs'} and needs to be\n estimated using one of the quasi-Newton strategies.\n 'trust-exact' cannot use a finite-difference scheme, and must be used\n with a callable returning an (n, n) array.\n hessp : callable, optional\n Hessian of objective function times an arbitrary vector p. Only for\n Newton-CG, trust-ncg, trust-krylov, trust-constr.\n Only one of `hessp` or `hess` needs to be given. If `hess` is\n provided, then `hessp` will be ignored. `hessp` must compute the\n Hessian times an arbitrary vector:\n\n ``hessp(x, p, *args) -> ndarray shape (n,)``\n\n where ``x`` is a (n,) ndarray, ``p`` is an arbitrary vector with\n dimension (n,) and ``args`` is a tuple with the fixed\n parameters.\n bounds : sequence or `Bounds`, optional\n Bounds on variables for Nelder-Mead, L-BFGS-B, TNC, SLSQP, Powell, and\n trust-constr methods. There are two ways to specify the bounds:\n\n 1. Instance of `Bounds` class.\n 2. Sequence of ``(min, max)`` pairs for each element in `x`. None\n is used to specify no bound.\n\n constraints : {Constraint, dict} or List of {Constraint, dict}, optional\n Constraints definition. Only for COBYLA, SLSQP and trust-constr.\n\n Constraints for 'trust-constr' are defined as a single object or a\n list of objects specifying constraints to the optimization problem.\n Available constraints are:\n\n - `LinearConstraint`\n - `NonlinearConstraint`\n\n Constraints for COBYLA, SLSQP are defined as a list of dictionaries.\n Each dictionary with fields:\n\n type : str\n Constraint type: 'eq' for equality, 'ineq' for inequality.\n fun : callable\n The function defining the constraint.\n jac : callable, optional\n The Jacobian of `fun` (only for SLSQP).\n args : sequence, optional\n Extra arguments to be passed to the function and Jacobian.\n\n Equality constraint means that the constraint function result is to\n be zero whereas inequality means that it is to be non-negative.\n Note that COBYLA only supports inequality constraints.\n tol : float, optional\n Tolerance for termination. When `tol` is specified, the selected\n minimization algorithm sets some relevant solver-specific tolerance(s)\n equal to `tol`. For detailed control, use solver-specific\n options.\n options : dict, optional\n A dictionary of solver options. All methods accept the following\n generic options:\n\n maxiter : int\n Maximum number of iterations to perform. Depending on the\n method each iteration may use several function evaluations.\n disp : bool\n Set to True to print convergence messages.\n\n For method-specific options, see :func:`show_options()`.\n callback : callable, optional\n Called after each iteration. For 'trust-constr' it is a callable with\n the signature:\n\n ``callback(xk, OptimizeResult state) -> bool``\n\n where ``xk`` is the current parameter vector. and ``state``\n is an `OptimizeResult` object, with the same fields\n as the ones from the return. If callback returns True\n the algorithm execution is terminated.\n For all the other methods, the signature is:\n\n ``callback(xk)``\n\n where ``xk`` is the current parameter vector.\n\n Returns\n -------\n res : OptimizeResult\n The optimization result represented as a ``OptimizeResult`` object.\n Important attributes are: ``x`` the solution array, ``success`` a\n Boolean flag indicating if the optimizer exited successfully and\n ``message`` which describes the cause of the termination. See\n `OptimizeResult` for a description of other attributes.\n\n See also\n --------\n minimize_scalar : Interface to minimization algorithms for scalar\n univariate functions\n show_options : Additional options accepted by the solvers\n\n Notes\n -----\n This section describes the available solvers that can be selected by the\n 'method' parameter. The default method is *BFGS*.\n\n **Unconstrained minimization**\n\n Method :ref:`CG <optimize.minimize-cg>` uses a nonlinear conjugate\n gradient algorithm by Polak and Ribiere, a variant of the\n Fletcher-Reeves method described in [5]_ pp.120-122. Only the\n first derivatives are used.\n\n Method :ref:`BFGS <optimize.minimize-bfgs>` uses the quasi-Newton\n method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS) [5]_\n pp. 136. It uses the first derivatives only. BFGS has proven good\n performance even for non-smooth optimizations. This method also\n returns an approximation of the Hessian inverse, stored as\n `hess_inv` in the OptimizeResult object.\n\n Method :ref:`Newton-CG <optimize.minimize-newtoncg>` uses a\n Newton-CG algorithm [5]_ pp. 168 (also known as the truncated\n Newton method). It uses a CG method to the compute the search\n direction. See also *TNC* method for a box-constrained\n minimization with a similar algorithm. Suitable for large-scale\n problems.\n\n Method :ref:`dogleg <optimize.minimize-dogleg>` uses the dog-leg\n trust-region algorithm [5]_ for unconstrained minimization. This\n algorithm requires the gradient and Hessian; furthermore the\n Hessian is required to be positive definite.\n\n Method :ref:`trust-ncg <optimize.minimize-trustncg>` uses the\n Newton conjugate gradient trust-region algorithm [5]_ for\n unconstrained minimization. This algorithm requires the gradient\n and either the Hessian or a function that computes the product of\n the Hessian with a given vector. Suitable for large-scale problems.\n\n Method :ref:`trust-krylov <optimize.minimize-trustkrylov>` uses\n the Newton GLTR trust-region algorithm [14]_, [15]_ for unconstrained\n minimization. This algorithm requires the gradient\n and either the Hessian or a function that computes the product of\n the Hessian with a given vector. Suitable for large-scale problems.\n On indefinite problems it requires usually less iterations than the\n `trust-ncg` method and is recommended for medium and large-scale problems.\n\n Method :ref:`trust-exact <optimize.minimize-trustexact>`\n is a trust-region method for unconstrained minimization in which\n quadratic subproblems are solved almost exactly [13]_. This\n algorithm requires the gradient and the Hessian (which is\n *not* required to be positive definite). It is, in many\n situations, the Newton method to converge in fewer iteraction\n and the most recommended for small and medium-size problems.\n\n **Bound-Constrained minimization**\n\n Method :ref:`Nelder-Mead <optimize.minimize-neldermead>` uses the\n Simplex algorithm [1]_, [2]_. This algorithm is robust in many\n applications. However, if numerical computation of derivative can be\n trusted, other algorithms using the first and/or second derivatives\n information might be preferred for their better performance in\n general.\n\n Method :ref:`L-BFGS-B <optimize.minimize-lbfgsb>` uses the L-BFGS-B\n algorithm [6]_, [7]_ for bound constrained minimization.\n\n Method :ref:`Powell <optimize.minimize-powell>` is a modification\n of Powell's method [3]_, [4]_ which is a conjugate direction\n method. It performs sequential one-dimensional minimizations along\n each vector of the directions set (`direc` field in `options` and\n `info`), which is updated at each iteration of the main\n minimization loop. The function need not be differentiable, and no\n derivatives are taken. If bounds are not provided, then an\n unbounded line search will be used. If bounds are provided and\n the initial guess is within the bounds, then every function\n evaluation throughout the minimization procedure will be within\n the bounds. If bounds are provided, the initial guess is outside\n the bounds, and `direc` is full rank (default has full rank), then\n some function evaluations during the first iteration may be\n outside the bounds, but every function evaluation after the first\n iteration will be within the bounds. If `direc` is not full rank,\n then some parameters may not be optimized and the solution is not\n guaranteed to be within the bounds.\n\n Method :ref:`TNC <optimize.minimize-tnc>` uses a truncated Newton\n algorithm [5]_, [8]_ to minimize a function with variables subject\n to bounds. This algorithm uses gradient information; it is also\n called Newton Conjugate-Gradient. It differs from the *Newton-CG*\n method described above as it wraps a C implementation and allows\n each variable to be given upper and lower bounds.\n\n **Constrained Minimization**\n\n Method :ref:`COBYLA <optimize.minimize-cobyla>` uses the\n Constrained Optimization BY Linear Approximation (COBYLA) method\n [9]_, [10]_, [11]_. The algorithm is based on linear\n approximations to the objective function and each constraint. The\n method wraps a FORTRAN implementation of the algorithm. The\n constraints functions 'fun' may return either a single number\n or an array or list of numbers.\n\n Method :ref:`SLSQP <optimize.minimize-slsqp>` uses Sequential\n Least SQuares Programming to minimize a function of several\n variables with any combination of bounds, equality and inequality\n constraints. The method wraps the SLSQP Optimization subroutine\n originally implemented by Dieter Kraft [12]_. Note that the\n wrapper handles infinite values in bounds by converting them into\n large floating values.\n\n Method :ref:`trust-constr <optimize.minimize-trustconstr>` is a\n trust-region algorithm for constrained optimization. It swiches\n between two implementations depending on the problem definition.\n It is the most versatile constrained minimization algorithm\n implemented in SciPy and the most appropriate for large-scale problems.\n For equality constrained problems it is an implementation of Byrd-Omojokun\n Trust-Region SQP method described in [17]_ and in [5]_, p. 549. When\n inequality constraints are imposed as well, it swiches to the trust-region\n interior point method described in [16]_. This interior point algorithm,\n in turn, solves inequality constraints by introducing slack variables\n and solving a sequence of equality-constrained barrier problems\n for progressively smaller values of the barrier parameter.\n The previously described equality constrained SQP method is\n used to solve the subproblems with increasing levels of accuracy\n as the iterate gets closer to a solution.\n\n **Finite-Difference Options**\n\n For Method :ref:`trust-constr <optimize.minimize-trustconstr>`\n the gradient and the Hessian may be approximated using\n three finite-difference schemes: {'2-point', '3-point', 'cs'}.\n The scheme 'cs' is, potentially, the most accurate but it\n requires the function to correctly handles complex inputs and to\n be differentiable in the complex plane. The scheme '3-point' is more\n accurate than '2-point' but requires twice as many operations.\n\n **Custom minimizers**\n\n It may be useful to pass a custom minimization method, for example\n when using a frontend to this method such as `scipy.optimize.basinhopping`\n or a different library. You can simply pass a callable as the ``method``\n parameter.\n\n The callable is called as ``method(fun, x0, args, **kwargs, **options)``\n where ``kwargs`` corresponds to any other parameters passed to `minimize`\n (such as `callback`, `hess`, etc.), except the `options` dict, which has\n its contents also passed as `method` parameters pair by pair. Also, if\n `jac` has been passed as a bool type, `jac` and `fun` are mangled so that\n `fun` returns just the function values and `jac` is converted to a function\n returning the Jacobian. The method shall return an `OptimizeResult`\n object.\n\n The provided `method` callable must be able to accept (and possibly ignore)\n arbitrary parameters; the set of parameters accepted by `minimize` may\n expand in future versions and then these parameters will be passed to\n the method. You can find an example in the scipy.optimize tutorial.\n\n .. versionadded:: 0.11.0\n\n References\n ----------\n .. [1] Nelder, J A, and R Mead. 1965. A Simplex Method for Function\n Minimization. The Computer Journal 7: 308-13.\n .. [2] Wright M H. 1996. Direct search methods: Once scorned, now\n respectable, in Numerical Analysis 1995: Proceedings of the 1995\n Dundee Biennial Conference in Numerical Analysis (Eds. D F\n Griffiths and G A Watson). Addison Wesley Longman, Harlow, UK.\n 191-208.\n .. [3] Powell, M J D. 1964. An efficient method for finding the minimum of\n a function of several variables without calculating derivatives. The\n Computer Journal 7: 155-162.\n .. [4] Press W, S A Teukolsky, W T Vetterling and B P Flannery.\n Numerical Recipes (any edition), Cambridge University Press.\n .. [5] Nocedal, J, and S J Wright. 2006. Numerical Optimization.\n Springer New York.\n .. [6] Byrd, R H and P Lu and J. Nocedal. 1995. A Limited Memory\n Algorithm for Bound Constrained Optimization. SIAM Journal on\n Scientific and Statistical Computing 16 (5): 1190-1208.\n .. [7] Zhu, C and R H Byrd and J Nocedal. 1997. L-BFGS-B: Algorithm\n 778: L-BFGS-B, FORTRAN routines for large scale bound constrained\n optimization. ACM Transactions on Mathematical Software 23 (4):\n 550-560.\n .. [8] Nash, S G. Newton-Type Minimization Via the Lanczos Method.\n 1984. SIAM Journal of Numerical Analysis 21: 770-778.\n .. [9] Powell, M J D. A direct search optimization method that models\n the objective and constraint functions by linear interpolation.\n 1994. Advances in Optimization and Numerical Analysis, eds. S. Gomez\n and J-P Hennart, Kluwer Academic (Dordrecht), 51-67.\n .. [10] Powell M J D. Direct search algorithms for optimization\n calculations. 1998. Acta Numerica 7: 287-336.\n .. [11] Powell M J D. A view of algorithms for optimization without\n derivatives. 2007.Cambridge University Technical Report DAMTP\n 2007/NA03\n .. [12] Kraft, D. A software package for sequential quadratic\n programming. 1988. Tech. Rep. DFVLR-FB 88-28, DLR German Aerospace\n Center -- Institute for Flight Mechanics, Koln, Germany.\n .. [13] Conn, A. R., Gould, N. I., and Toint, P. L.\n Trust region methods. 2000. Siam. pp. 169-200.\n .. [14] F. Lenders, C. Kirches, A. Potschka: \"trlib: A vector-free\n implementation of the GLTR method for iterative solution of\n the trust region problem\", :arxiv:`1611.04718`\n .. [15] N. Gould, S. Lucidi, M. Roma, P. Toint: \"Solving the\n Trust-Region Subproblem using the Lanczos Method\",\n SIAM J. Optim., 9(2), 504--525, (1999).\n .. [16] Byrd, Richard H., Mary E. Hribar, and Jorge Nocedal. 1999.\n An interior point algorithm for large-scale nonlinear programming.\n SIAM Journal on Optimization 9.4: 877-900.\n .. [17] Lalee, Marucha, Jorge Nocedal, and Todd Plantega. 1998. On the\n implementation of an algorithm for large-scale equality constrained\n optimization. SIAM Journal on Optimization 8.3: 682-706.\n\n Examples\n --------\n Let us consider the problem of minimizing the Rosenbrock function. This\n function (and its respective derivatives) is implemented in `rosen`\n (resp. `rosen_der`, `rosen_hess`) in the `scipy.optimize`.\n\n >>> from scipy.optimize import minimize, rosen, rosen_der\n\n A simple application of the *Nelder-Mead* method is:\n\n >>> x0 = [1.3, 0.7, 0.8, 1.9, 1.2]\n >>> res = minimize(rosen, x0, method='Nelder-Mead', tol=1e-6)\n >>> res.x\n array([ 1., 1., 1., 1., 1.])\n\n Now using the *BFGS* algorithm, using the first derivative and a few\n options:\n\n >>> res = minimize(rosen, x0, method='BFGS', jac=rosen_der,\n ... options={'gtol': 1e-6, 'disp': True})\n Optimization terminated successfully.\n Current function value: 0.000000\n Iterations: 26\n Function evaluations: 31\n Gradient evaluations: 31\n >>> res.x\n array([ 1., 1., 1., 1., 1.])\n >>> print(res.message)\n Optimization terminated successfully.\n >>> res.hess_inv\n array([[ 0.00749589, 0.01255155, 0.02396251, 0.04750988, 0.09495377], # may vary\n [ 0.01255155, 0.02510441, 0.04794055, 0.09502834, 0.18996269],\n [ 0.02396251, 0.04794055, 0.09631614, 0.19092151, 0.38165151],\n [ 0.04750988, 0.09502834, 0.19092151, 0.38341252, 0.7664427 ],\n [ 0.09495377, 0.18996269, 0.38165151, 0.7664427, 1.53713523]])\n\n\n Next, consider a minimization problem with several constraints (namely\n Example 16.4 from [5]_). The objective function is:\n\n >>> fun = lambda x: (x[0] - 1)**2 + (x[1] - 2.5)**2\n\n There are three constraints defined as:\n\n >>> cons = ({'type': 'ineq', 'fun': lambda x: x[0] - 2 * x[1] + 2},\n ... {'type': 'ineq', 'fun': lambda x: -x[0] - 2 * x[1] + 6},\n ... {'type': 'ineq', 'fun': lambda x: -x[0] + 2 * x[1] + 2})\n\n And variables must be positive, hence the following bounds:\n\n >>> bnds = ((0, None), (0, None))\n\n The optimization problem is solved using the SLSQP method as:\n\n >>> res = minimize(fun, (2, 0), method='SLSQP', bounds=bnds,\n ... constraints=cons)\n\n It should converge to the theoretical solution (1.4 ,1.7).\n\n \"\"\"\n x0 = np.asarray(x0)\n if x0.dtype.kind in np.typecodes[\"AllInteger\"]:\n x0 = np.asarray(x0, dtype=float)\n\n if not isinstance(args, tuple):\n args = (args,)\n\n if method is None:\n # Select automatically\n if constraints:\n method = 'SLSQP'\n elif bounds is not None:\n method = 'L-BFGS-B'\n else:\n method = 'BFGS'\n\n if callable(method):\n meth = \"_custom\"\n else:\n meth = method.lower()\n\n if options is None:\n options = {}\n # check if optional parameters are supported by the selected method\n # - jac\n if meth in ('nelder-mead', 'powell', 'cobyla') and bool(jac):\n warn('Method %s does not use gradient information (jac).' % method,\n RuntimeWarning)\n # - hess\n if meth not in ('newton-cg', 'dogleg', 'trust-ncg', 'trust-constr',\n 'trust-krylov', 'trust-exact', '_custom') and hess is not None:\n warn('Method %s does not use Hessian information (hess).' % method,\n RuntimeWarning)\n # - hessp\n if meth not in ('newton-cg', 'dogleg', 'trust-ncg', 'trust-constr',\n 'trust-krylov', '_custom') \\\n and hessp is not None:\n warn('Method %s does not use Hessian-vector product '\n 'information (hessp).' % method, RuntimeWarning)\n # - constraints or bounds\n if (meth in ('cg', 'bfgs', 'newton-cg', 'dogleg', 'trust-ncg')\n and (bounds is not None or np.any(constraints))):\n warn('Method %s cannot handle constraints nor bounds.' % method,\n RuntimeWarning)\n if meth in ('nelder-mead', 'l-bfgs-b', 'tnc', 'powell') and np.any(constraints):\n warn('Method %s cannot handle constraints.' % method,\n RuntimeWarning)\n if meth == 'cobyla' and bounds is not None:\n warn('Method %s cannot handle bounds.' % method,\n RuntimeWarning)\n # - callback\n if (meth in ('cobyla',) and callback is not None):\n warn('Method %s does not support callback.' % method, RuntimeWarning)\n # - return_all\n if (meth in ('l-bfgs-b', 'tnc', 'cobyla', 'slsqp') and\n options.get('return_all', False)):\n warn('Method %s does not support the return_all option.' % method,\n RuntimeWarning)\n\n # check gradient vector\n if callable(jac):\n pass\n elif jac is True:\n # fun returns func and grad\n fun = MemoizeJac(fun)\n jac = fun.derivative\n elif (jac in FD_METHODS and\n meth in ['trust-constr', 'bfgs', 'cg', 'l-bfgs-b', 'tnc', 'slsqp']):\n # finite differences with relative step\n pass\n elif meth in ['trust-constr']:\n # default jac calculation for this method\n jac = '2-point'\n elif jac is None or bool(jac) is False:\n # this will cause e.g. LBFGS to use forward difference, absolute step\n jac = None\n else:\n # default if jac option is not understood\n jac = None\n\n # set default tolerances\n if tol is not None:\n options = dict(options)\n if meth == 'nelder-mead':\n options.setdefault('xatol', tol)\n options.setdefault('fatol', tol)\n if meth in ('newton-cg', 'powell', 'tnc'):\n options.setdefault('xtol', tol)\n if meth in ('powell', 'l-bfgs-b', 'tnc', 'slsqp'):\n options.setdefault('ftol', tol)\n if meth in ('bfgs', 'cg', 'l-bfgs-b', 'tnc', 'dogleg',\n 'trust-ncg', 'trust-exact', 'trust-krylov'):\n options.setdefault('gtol', tol)\n if meth in ('cobyla', '_custom'):\n options.setdefault('tol', tol)\n if meth == 'trust-constr':\n options.setdefault('xtol', tol)\n options.setdefault('gtol', tol)\n options.setdefault('barrier_tol', tol)\n\n if meth == '_custom':\n # custom method called before bounds and constraints are 'standardised'\n # custom method should be able to accept whatever bounds/constraints\n # are provided to it.\n return method(fun, x0, args=args, jac=jac, hess=hess, hessp=hessp,\n bounds=bounds, constraints=constraints,\n callback=callback, **options)\n\n constraints = standardize_constraints(constraints, x0, meth)\n\n remove_vars = False\n if bounds is not None:\n if meth in {\"tnc\", \"slsqp\", \"l-bfgs-b\"}:\n # These methods can't take the finite-difference derivatives they\n # need when a variable is fixed by the bounds. To avoid this issue,\n # remove fixed variables from the problem.\n\n # convert to new-style bounds so we only have to consider one case\n bounds = standardize_bounds(bounds, x0, 'new')\n\n # determine whether any variables are fixed\n i_fixed = (bounds.lb == bounds.ub)\n\n # determine whether finite differences are needed for any grad/jac\n fd_needed = (not callable(jac))\n for con in constraints:\n if not callable(con.get('jac', None)):\n fd_needed = True\n\n # If finite differences are ever used, remove all fixed variables\n # Always remove fixed variables for TNC; see gh-14565\n remove_vars = i_fixed.any() and (fd_needed or meth == \"tnc\")\n if remove_vars:\n x_fixed = (bounds.lb)[i_fixed]\n x0 = x0[~i_fixed]\n bounds = _remove_from_bounds(bounds, i_fixed)\n fun = _remove_from_func(fun, i_fixed, x_fixed)\n if callable(callback):\n callback = _remove_from_func(callback, i_fixed, x_fixed)\n if callable(jac):\n jac = _remove_from_func(jac, i_fixed, x_fixed, remove=1)\n\n # make a copy of the constraints so the user's version doesn't\n # get changed. (Shallow copy is ok)\n constraints = [con.copy() for con in constraints]\n for con in constraints: # yes, guaranteed to be a list\n con['fun'] = _remove_from_func(con['fun'], i_fixed,\n x_fixed, min_dim=1,\n remove=0)\n if callable(con.get('jac', None)):\n con['jac'] = _remove_from_func(con['jac'], i_fixed,\n x_fixed, min_dim=2,\n remove=1)\n bounds = standardize_bounds(bounds, x0, meth)\n\n if meth == 'nelder-mead':\n res = _minimize_neldermead(fun, x0, args, callback, bounds=bounds,\n **options)\n elif meth == 'powell':\n res = _minimize_powell(fun, x0, args, callback, bounds, **options)\n elif meth == 'cg':\n res = _minimize_cg(fun, x0, args, jac, callback, **options)\n elif meth == 'bfgs':\n res = _minimize_bfgs(fun, x0, args, jac, callback, **options)\n elif meth == 'newton-cg':\n res = _minimize_newtoncg(fun, x0, args, jac, hess, hessp, callback,\n **options)\n elif meth == 'l-bfgs-b':\n res = _minimize_lbfgsb(fun, x0, args, jac, bounds,\n callback=callback, **options)\n elif meth == 'tnc':\n res = _minimize_tnc(fun, x0, args, jac, bounds, callback=callback,\n **options)\n elif meth == 'cobyla':\n res = _minimize_cobyla(fun, x0, args, constraints, **options)\n elif meth == 'slsqp':\n res = _minimize_slsqp(fun, x0, args, jac, bounds,\n constraints, callback=callback, **options)\n elif meth == 'trust-constr':\n res = _minimize_trustregion_constr(fun, x0, args, jac, hess, hessp,\n bounds, constraints,\n callback=callback, **options)\n elif meth == 'dogleg':\n res = _minimize_dogleg(fun, x0, args, jac, hess,\n callback=callback, **options)\n elif meth == 'trust-ncg':\n res = _minimize_trust_ncg(fun, x0, args, jac, hess, hessp,\n callback=callback, **options)\n elif meth == 'trust-krylov':\n res = _minimize_trust_krylov(fun, x0, args, jac, hess, hessp,\n callback=callback, **options)\n elif meth == 'trust-exact':\n res = _minimize_trustregion_exact(fun, x0, args, jac, hess,\n callback=callback, **options)\n else:\n raise ValueError('Unknown solver %s' % method)\n\n if remove_vars:\n res.x = _add_to_array(res.x, i_fixed, x_fixed)\n res.jac = _add_to_array(res.jac, i_fixed, np.nan)\n if \"hess_inv\" in res:\n res.hess_inv = None # unknown\n\n return res\n\n\ndef minimize_scalar(fun, bracket=None, bounds=None, args=(),\n method='brent', tol=None, options=None):\n \"\"\"Minimization of scalar function of one variable.\n\n Parameters\n ----------\n fun : callable\n Objective function.\n Scalar function, must return a scalar.\n bracket : sequence, optional\n For methods 'brent' and 'golden', `bracket` defines the bracketing\n interval and can either have three items ``(a, b, c)`` so that\n ``a < b < c`` and ``fun(b) < fun(a), fun(c)`` or two items ``a`` and\n ``c`` which are assumed to be a starting interval for a downhill\n bracket search (see `bracket`); it doesn't always mean that the\n obtained solution will satisfy ``a <= x <= c``.\n bounds : sequence, optional\n For method 'bounded', `bounds` is mandatory and must have two items\n corresponding to the optimization bounds.\n args : tuple, optional\n Extra arguments passed to the objective function.\n method : str or callable, optional\n Type of solver. Should be one of:\n\n - 'Brent' :ref:`(see here) <optimize.minimize_scalar-brent>`\n - 'Bounded' :ref:`(see here) <optimize.minimize_scalar-bounded>`\n - 'Golden' :ref:`(see here) <optimize.minimize_scalar-golden>`\n - custom - a callable object (added in version 0.14.0), see below\n\n tol : float, optional\n Tolerance for termination. For detailed control, use solver-specific\n options.\n options : dict, optional\n A dictionary of solver options.\n\n maxiter : int\n Maximum number of iterations to perform.\n disp : bool\n Set to True to print convergence messages.\n\n See :func:`show_options()` for solver-specific options.\n\n Returns\n -------\n res : OptimizeResult\n The optimization result represented as a ``OptimizeResult`` object.\n Important attributes are: ``x`` the solution array, ``success`` a\n Boolean flag indicating if the optimizer exited successfully and\n ``message`` which describes the cause of the termination. See\n `OptimizeResult` for a description of other attributes.\n\n See also\n --------\n minimize : Interface to minimization algorithms for scalar multivariate\n functions\n show_options : Additional options accepted by the solvers\n\n Notes\n -----\n This section describes the available solvers that can be selected by the\n 'method' parameter. The default method is *Brent*.\n\n Method :ref:`Brent <optimize.minimize_scalar-brent>` uses Brent's\n algorithm to find a local minimum. The algorithm uses inverse\n parabolic interpolation when possible to speed up convergence of\n the golden section method.\n\n Method :ref:`Golden <optimize.minimize_scalar-golden>` uses the\n golden section search technique. It uses analog of the bisection\n method to decrease the bracketed interval. It is usually\n preferable to use the *Brent* method.\n\n Method :ref:`Bounded <optimize.minimize_scalar-bounded>` can\n perform bounded minimization. It uses the Brent method to find a\n local minimum in the interval x1 < xopt < x2.\n\n **Custom minimizers**\n\n It may be useful to pass a custom minimization method, for example\n when using some library frontend to minimize_scalar. You can simply\n pass a callable as the ``method`` parameter.\n\n The callable is called as ``method(fun, args, **kwargs, **options)``\n where ``kwargs`` corresponds to any other parameters passed to `minimize`\n (such as `bracket`, `tol`, etc.), except the `options` dict, which has\n its contents also passed as `method` parameters pair by pair. The method\n shall return an `OptimizeResult` object.\n\n The provided `method` callable must be able to accept (and possibly ignore)\n arbitrary parameters; the set of parameters accepted by `minimize` may\n expand in future versions and then these parameters will be passed to\n the method. You can find an example in the scipy.optimize tutorial.\n\n .. versionadded:: 0.11.0\n\n Examples\n --------\n Consider the problem of minimizing the following function.\n\n >>> def f(x):\n ... return (x - 2) * x * (x + 2)**2\n\n Using the *Brent* method, we find the local minimum as:\n\n >>> from scipy.optimize import minimize_scalar\n >>> res = minimize_scalar(f)\n >>> res.x\n 1.28077640403\n\n Using the *Bounded* method, we find a local minimum with specified\n bounds as:\n\n >>> res = minimize_scalar(f, bounds=(-3, -1), method='bounded')\n >>> res.x\n -2.0000002026\n\n \"\"\"\n if not isinstance(args, tuple):\n args = (args,)\n\n if callable(method):\n meth = \"_custom\"\n else:\n meth = method.lower()\n if options is None:\n options = {}\n\n if tol is not None:\n options = dict(options)\n if meth == 'bounded' and 'xatol' not in options:\n warn(\"Method 'bounded' does not support relative tolerance in x; \"\n \"defaulting to absolute tolerance.\", RuntimeWarning)\n options['xatol'] = tol\n elif meth == '_custom':\n options.setdefault('tol', tol)\n else:\n options.setdefault('xtol', tol)\n\n # replace boolean \"disp\" option, if specified, by an integer value.\n disp = options.get('disp')\n if isinstance(disp, bool):\n options['disp'] = 2 * int(disp)\n\n if meth == '_custom':\n return method(fun, args=args, bracket=bracket, bounds=bounds, **options)\n elif meth == 'brent':\n return _minimize_scalar_brent(fun, bracket, args, **options)\n elif meth == 'bounded':\n if bounds is None:\n raise ValueError('The `bounds` parameter is mandatory for '\n 'method `bounded`.')\n return _minimize_scalar_bounded(fun, bounds, args, **options)\n elif meth == 'golden':\n return _minimize_scalar_golden(fun, bracket, args, **options)\n else:\n raise ValueError('Unknown solver %s' % method)\n\n\ndef _remove_from_bounds(bounds, i_fixed):\n \"\"\"Removes fixed variables from a `Bounds` instance\"\"\"\n lb = bounds.lb[~i_fixed]\n ub = bounds.ub[~i_fixed]\n return Bounds(lb, ub) # don't mutate original Bounds object\n\n\ndef _remove_from_func(fun_in, i_fixed, x_fixed, min_dim=None, remove=0):\n \"\"\"Wraps a function such that fixed variables need not be passed in\"\"\"\n def fun_out(x_in, *args, **kwargs):\n x_out = np.zeros_like(i_fixed, dtype=x_in.dtype)\n x_out[i_fixed] = x_fixed\n x_out[~i_fixed] = x_in\n y_out = fun_in(x_out, *args, **kwargs)\n y_out = np.array(y_out)\n\n if min_dim == 1:\n y_out = np.atleast_1d(y_out)\n elif min_dim == 2:\n y_out = np.atleast_2d(y_out)\n\n if remove == 1:\n y_out = y_out[..., ~i_fixed]\n elif remove == 2:\n y_out = y_out[~i_fixed, ~i_fixed]\n\n return y_out\n return fun_out\n\n\ndef _add_to_array(x_in, i_fixed, x_fixed):\n \"\"\"Adds fixed variables back to an array\"\"\"\n i_free = ~i_fixed\n if x_in.ndim == 2:\n i_free = i_free[:, None] @ i_free[None, :]\n x_out = np.zeros_like(i_free, dtype=x_in.dtype)\n x_out[~i_free] = x_fixed\n x_out[i_free] = x_in.ravel()\n return x_out\n\n\ndef standardize_bounds(bounds, x0, meth):\n \"\"\"Converts bounds to the form required by the solver.\"\"\"\n if meth in {'trust-constr', 'powell', 'nelder-mead', 'new'}:\n if not isinstance(bounds, Bounds):\n lb, ub = old_bound_to_new(bounds)\n bounds = Bounds(lb, ub)\n elif meth in ('l-bfgs-b', 'tnc', 'slsqp', 'old'):\n if isinstance(bounds, Bounds):\n bounds = new_bounds_to_old(bounds.lb, bounds.ub, x0.shape[0])\n return bounds\n\n\ndef standardize_constraints(constraints, x0, meth):\n \"\"\"Converts constraints to the form required by the solver.\"\"\"\n all_constraint_types = (NonlinearConstraint, LinearConstraint, dict)\n new_constraint_types = all_constraint_types[:-1]\n if constraints is None:\n constraints = []\n elif isinstance(constraints, all_constraint_types):\n constraints = [constraints]\n else:\n constraints = list(constraints) # ensure it's a mutable sequence\n\n if meth == 'trust-constr':\n for i, con in enumerate(constraints):\n if not isinstance(con, new_constraint_types):\n constraints[i] = old_constraint_to_new(i, con)\n else:\n # iterate over copy, changing original\n for i, con in enumerate(list(constraints)):\n if isinstance(con, new_constraint_types):\n old_constraints = new_constraint_to_old(con, x0)\n constraints[i] = old_constraints[0]\n constraints.extend(old_constraints[1:]) # appends 1 if present\n\n return constraints\n" ]
[ [ "numpy.zeros_like", "numpy.array", "numpy.asarray", "numpy.any", "numpy.atleast_1d", "numpy.atleast_2d" ] ]
qboticslabs/dragonfly
[ "f5166a2c6afa6a95ab05353b62a58f09d33874d4" ]
[ "dragonfly/distributions/unittest_continuous.py" ]
[ "\"\"\"\n Unit tests for continuous distributions.\n -- [email protected]\n\"\"\"\nfrom __future__ import absolute_import\n\n# pylint: disable=no-self-use\n\nimport numpy as np\nimport warnings\n\n# Local imports\nfrom .continuous import Beta, ContinuousUniform, Exponential, \\\n Normal, MultivariateGaussian\nfrom ..utils.base_test_class import BaseTestClass, execute_tests\n\nclass ContinuousDistributionsTestCase(BaseTestClass):\n \"\"\" Unit tests for distributions in continuous.py \"\"\"\n\n def setUp(self):\n \"\"\" Sets up unit tests. \"\"\"\n self.size = 1000000\n self.threshold = 0.01\n warnings.filterwarnings(\"ignore\", message=\"numpy.dtype size changed\")\n\n def _check_sample_sizes(self, samples):\n \"\"\" Compares the sample sizes with the size parameter\"\"\"\n assert self.size == len(samples)\n\n def _compute_mean(self, samples, axis=None):\n \"\"\" Computes Mean \"\"\"\n return np.mean(samples, axis)\n\n def _compute_variance(self, samples, axis=None):\n \"\"\" Computes Variance \"\"\"\n return np.var(samples, axis)\n\n def _compute_covariance(self, samples):\n \"\"\" Computes Covariance \"\"\"\n return np.cov(samples.T)\n\n def test_rand_sampling_normal(self):\n \"\"\" Tests random sampling from Normal distribution \"\"\"\n self.report('Test random sampling of Normal Distribution.')\n mean = 0\n variance = 1\n dist = Normal(mean, variance)\n samples = dist.draw_samples('random', self.size)\n mean_r = self._compute_mean(samples)\n var_r = self._compute_variance(samples)\n self._check_sample_sizes(samples)\n assert abs(mean - mean_r) <= self.threshold\n assert abs(variance - var_r) <= self.threshold\n self.report('%s :: test result: mean=%0.3f, variance=%0.3f'%\\\n (str(dist), mean_r, var_r), 'test_result')\n\n def test_rand_sampling_multi_normal(self):\n \"\"\" Tests random sampling from Multivariate Normal distribution \"\"\"\n self.report('Test random sampling of Multivariate Normal Distribution.')\n cov_thresh = 0.1\n mean_thresh = 0.01\n mean = np.arange(3)\n covariance = 3*np.identity(3)\n dist = MultivariateGaussian(mean, covariance)\n samples = dist.draw_samples('random', self.size)\n mean_r = self._compute_mean(samples, 0)\n self._check_sample_sizes(samples)\n assert (abs(mean - self._compute_mean(samples, 0)) <= mean_thresh).all()\n assert (abs(covariance - self._compute_covariance(samples)) <= cov_thresh).all()\n self.report('%s :: test result: mean=%s'%(str(dist), str(mean_r)), 'test_result')\n\n def test_rand_sampling_expo(self):\n \"\"\" Tests random sampling from Exponential distribution \"\"\"\n self.report('Test random sampling of Exponential Distribution.')\n lam = 2\n dist = Exponential(lam)\n mean = dist.get_mean()\n var = dist.get_variance()\n samples = dist.draw_samples('random', self.size)\n mean_r = self._compute_mean(samples)\n var_r = self._compute_variance(samples)\n self._check_sample_sizes(samples)\n assert abs(mean - mean_r) <= self.threshold\n assert abs(var - var_r) <= self.threshold\n self.report('%s, mean=%0.3f, variance=%0.3f :: test result: mean=%0.3f, '\n 'variance=%0.3f'%(str(dist), mean, var, mean_r, var_r), 'test_result')\n\n def test_rand_sampling_uniform(self):\n \"\"\" Tests random sampling from Continuous Uniform \"\"\"\n self.report('Test random sampling of Continuous Uniform Distribution.')\n lower = -5\n upper = 5\n dist = ContinuousUniform(lower, upper)\n mean = dist.get_mean()\n var = dist.get_variance()\n samples = dist.draw_samples('random', self.size)\n mean_r = self._compute_mean(samples)\n var_r = self._compute_variance(samples)\n self._check_sample_sizes(samples)\n assert abs(mean - mean_r) <= self.threshold\n assert abs(var - var_r) <= self.threshold\n self.report('%s, mean=%0.3f, variance=%0.3f :: test result: mean=%0.3f, '\n 'variance=%0.3f'%(str(dist), mean, var, mean_r, var_r), 'test_result')\n\n def test_rand_sampling_beta(self):\n \"\"\" Tests random sampling from Beta Distribution \"\"\"\n self.report('Test random sampling of Beta Distribution.')\n alpha = 1\n beta = 2\n dist = Beta(alpha, beta)\n mean = dist.get_mean()\n var = dist.get_variance()\n samples = dist.draw_samples('random', self.size)\n mean_r = self._compute_mean(samples)\n var_r = self._compute_variance(samples)\n self._check_sample_sizes(samples)\n assert abs(mean - mean_r) <= self.threshold\n assert abs(var - var_r) <= self.threshold\n self.report('%s, mean=%0.3f, variance=%0.3f :: test result: mean=%0.3f, '\n 'variance=%0.3f'%(str(dist), mean, var, mean_r, var_r), 'test_result')\n\nif __name__ == '__main__':\n execute_tests()\n" ]
[ [ "numpy.cov", "numpy.mean", "numpy.identity", "numpy.arange", "numpy.var" ] ]
rolando-contribute/dask
[ "a7e7ef16475e0486f3d3e8e80016f9a23f62c710" ]
[ "dask/utils.py" ]
[ "from __future__ import absolute_import, division, print_function\n\nfrom collections import Iterator\nfrom contextlib import contextmanager\nfrom errno import ENOENT\nimport functools\nimport io\nimport os\nimport sys\nimport shutil\nimport struct\nimport tempfile\nimport inspect\nimport codecs\nimport math\nfrom sys import getdefaultencoding\n\nfrom .compatibility import long, getargspec, BZ2File, GzipFile, LZMAFile\n\n\nsystem_encoding = getdefaultencoding()\nif system_encoding == 'ascii':\n system_encoding = 'utf-8'\n\n\ndef raises(err, lamda):\n try:\n lamda()\n return False\n except err:\n return True\n\n\ndef deepmap(func, *seqs):\n \"\"\" Apply function inside nested lists\n\n >>> inc = lambda x: x + 1\n >>> deepmap(inc, [[1, 2], [3, 4]])\n [[2, 3], [4, 5]]\n\n >>> add = lambda x, y: x + y\n >>> deepmap(add, [[1, 2], [3, 4]], [[10, 20], [30, 40]])\n [[11, 22], [33, 44]]\n \"\"\"\n if isinstance(seqs[0], (list, Iterator)):\n return [deepmap(func, *items) for items in zip(*seqs)]\n else:\n return func(*seqs)\n\n\n@contextmanager\ndef ignoring(*exceptions):\n try:\n yield\n except exceptions:\n pass\n\n\n@contextmanager\ndef tmpfile(extension='', dir=None):\n extension = '.' + extension.lstrip('.')\n handle, filename = tempfile.mkstemp(extension, dir=dir)\n os.close(handle)\n os.remove(filename)\n\n try:\n yield filename\n finally:\n if os.path.exists(filename):\n if os.path.isdir(filename):\n shutil.rmtree(filename)\n else:\n with ignoring(OSError):\n os.remove(filename)\n\n\n@contextmanager\ndef tmpdir(dir=None):\n dirname = tempfile.mkdtemp(dir=dir)\n\n try:\n yield dirname\n finally:\n if os.path.exists(dirname):\n if os.path.isdir(dirname):\n shutil.rmtree(dirname)\n else:\n with ignoring(OSError):\n os.remove(dirname)\n\n\n@contextmanager\ndef filetext(text, extension='', open=open, mode='w'):\n with tmpfile(extension=extension) as filename:\n f = open(filename, mode=mode)\n try:\n f.write(text)\n finally:\n try:\n f.close()\n except AttributeError:\n pass\n\n yield filename\n\n\ndef repr_long_list(seq):\n \"\"\"\n\n >>> repr_long_list(list(range(100)))\n '[0, 1, 2, ..., 98, 99]'\n \"\"\"\n if len(seq) < 8:\n return repr(seq)\n else:\n return repr(seq[:3])[:-1] + ', ..., ' + repr(seq[-2:])[1:]\n\n\nclass IndexCallable(object):\n \"\"\" Provide getitem syntax for functions\n\n >>> def inc(x):\n ... return x + 1\n\n >>> I = IndexCallable(inc)\n >>> I[3]\n 4\n \"\"\"\n __slots__ = 'fn',\n def __init__(self, fn):\n self.fn = fn\n\n def __getitem__(self, key):\n return self.fn(key)\n\n\n@contextmanager\ndef filetexts(d, open=open, mode='t'):\n \"\"\" Dumps a number of textfiles to disk\n\n d - dict\n a mapping from filename to text like {'a.csv': '1,1\\n2,2'}\n \"\"\"\n for filename, text in d.items():\n f = open(filename, 'w' + mode)\n try:\n f.write(text)\n finally:\n try:\n f.close()\n except AttributeError:\n pass\n\n yield list(d)\n\n for filename in d:\n if os.path.exists(filename):\n os.remove(filename)\n\n\ncompressions = {'gz': 'gzip', 'bz2': 'bz2', 'xz': 'xz'}\n\n\ndef infer_compression(filename):\n extension = os.path.splitext(filename)[-1].strip('.')\n return compressions.get(extension, None)\n\n\nopens = {'gzip': GzipFile, 'bz2': BZ2File, 'xz': LZMAFile}\n\n\ndef open(filename, mode='rb', compression=None, **kwargs):\n if compression == 'infer':\n compression = infer_compression(filename)\n return opens.get(compression, io.open)(filename, mode, **kwargs)\n\n\ndef get_bom(fn, compression=None):\n \"\"\"\n Get the Byte Order Mark (BOM) if it exists.\n \"\"\"\n boms = set((codecs.BOM_UTF16, codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE))\n with open(fn, mode='rb', compression=compression) as f:\n f.seek(0)\n bom = f.read(2)\n f.seek(0)\n if bom in boms:\n return bom\n else:\n return b''\n\n\ndef get_bin_linesep(encoding, linesep):\n \"\"\"\n Simply doing `linesep.encode(encoding)` does not always give you\n *just* the linesep bytes, for some encodings this prefix's the\n linesep bytes with the BOM. This function ensures we just get the\n linesep bytes.\n \"\"\"\n if encoding == 'utf-16':\n return linesep.encode('utf-16')[2:] # [2:] strips bom\n else:\n return linesep.encode(encoding)\n\n\ndef textblock(filename, start, end, compression=None, encoding=system_encoding,\n linesep=os.linesep, buffersize=4096):\n \"\"\"Pull out a block of text from a file given start and stop bytes.\n\n This gets data starting/ending from the next linesep delimiter. Each block\n consists of bytes in the range [start,end[, i.e. the stop byte is excluded.\n If `start` is 0, then `start` corresponds to the true start byte. If\n `start` is greater than 0 and does not point to the beginning of a new\n line, then `start` is incremented until it corresponds to the start byte of\n the next line. If `end` does not point to the beginning of a new line, then\n the line that begins before `end` is included in the block although its\n last byte exceeds `end`.\n\n Examples\n --------\n >> with open('myfile.txt', 'wb') as f:\n .. f.write('123\\n456\\n789\\nabc')\n\n In the example below, 1 and 10 don't line up with endlines.\n\n >> u''.join(textblock('myfile.txt', 1, 10))\n '456\\n789\\n'\n \"\"\"\n # Make sure `linesep` is not a byte string because\n # `io.TextIOWrapper` in Python versions other than 2.7 dislike byte\n # strings for the `newline` argument.\n linesep = str(linesep)\n\n # Get byte representation of the line separator.\n bin_linesep = get_bin_linesep(encoding, linesep)\n bin_linesep_len = len(bin_linesep)\n\n if buffersize < bin_linesep_len:\n error = ('`buffersize` ({0:d}) must be at least as large as the '\n 'number of line separator bytes ({1:d}).')\n raise ValueError(error.format(buffersize, bin_linesep_len))\n\n chunksize = end - start\n\n with open(filename, 'rb', compression) as f:\n with io.BufferedReader(f) as fb:\n # If `start` does not correspond to the beginning of the file, we\n # need to move the file pointer to `start - len(bin_linesep)`,\n # search for the position of the next a line separator, and set\n # `start` to the position after that line separator.\n if start > 0:\n # `start` is decremented by `len(bin_linesep)` to detect the\n # case where the original `start` value corresponds to the\n # beginning of a line.\n start = max(0, start - bin_linesep_len)\n # Set the file pointer to `start`.\n fb.seek(start)\n # Number of bytes to shift the file pointer before reading a\n # new chunk to make sure that a multi-byte line separator, that\n # is split by the chunk reader, is still detected.\n shift = 1 - bin_linesep_len\n while True:\n buf = f.read(buffersize)\n if len(buf) < bin_linesep_len:\n raise StopIteration\n try:\n # Find the position of the next line separator and add\n # `len(bin_linesep)` which yields the position of the\n # first byte of the next line.\n start += buf.index(bin_linesep)\n start += bin_linesep_len\n except ValueError:\n # No line separator was found in the current chunk.\n # Before reading the next chunk, we move the file\n # pointer back `len(bin_linesep) - 1` bytes to make\n # sure that a multi-byte line separator, that may have\n # been split by the chunk reader, is still detected.\n start += len(buf)\n start += shift\n fb.seek(shift, os.SEEK_CUR)\n else:\n # We have found the next line separator, so we need to\n # set the file pointer to the first byte of the next\n # line.\n fb.seek(start)\n break\n\n with io.TextIOWrapper(fb, encoding, newline=linesep) as fbw:\n # Retrieve and yield lines until the file pointer reaches\n # `end`.\n while start < end:\n line = next(fbw)\n # We need to encode the line again to get the byte length\n # in order to correctly update `start`.\n bin_line_len = len(line.encode(encoding))\n if chunksize < bin_line_len:\n error = ('`chunksize` ({0:d}) is less than the line '\n 'length ({1:d}). This may cause duplicate '\n 'processing of this line. It is advised to '\n 'increase `chunksize`.')\n raise IOError(error.format(chunksize, bin_line_len))\n\n yield line\n start += bin_line_len\n\n\ndef concrete(seq):\n \"\"\" Make nested iterators concrete lists\n\n >>> data = [[1, 2], [3, 4]]\n >>> seq = iter(map(iter, data))\n >>> concrete(seq)\n [[1, 2], [3, 4]]\n \"\"\"\n if isinstance(seq, Iterator):\n seq = list(seq)\n if isinstance(seq, (tuple, list)):\n seq = list(map(concrete, seq))\n return seq\n\n\ndef skip(func):\n pass\n\n\ndef pseudorandom(n, p, random_state=None):\n \"\"\" Pseudorandom array of integer indexes\n\n >>> pseudorandom(5, [0.5, 0.5], random_state=123)\n array([1, 0, 0, 1, 1], dtype=int8)\n\n >>> pseudorandom(10, [0.5, 0.2, 0.2, 0.1], random_state=5)\n array([0, 2, 0, 3, 0, 1, 2, 1, 0, 0], dtype=int8)\n \"\"\"\n import numpy as np\n p = list(p)\n cp = np.cumsum([0] + p)\n assert np.allclose(1, cp[-1])\n assert len(p) < 256\n\n if not isinstance(random_state, np.random.RandomState):\n random_state = np.random.RandomState(random_state)\n\n x = random_state.random_sample(n)\n out = np.empty(n, dtype='i1')\n\n for i, (low, high) in enumerate(zip(cp[:-1], cp[1:])):\n out[(x >= low) & (x < high)] = i\n return out\n\n\ndef different_seeds(n, random_state=None):\n \"\"\" A list of different 32 bit integer seeds\n\n Parameters\n ----------\n n: int\n Number of distinct seeds to return\n random_state: int or np.random.RandomState\n If int create a new RandomState with this as the seed\n Otherwise draw from the passed RandomState\n \"\"\"\n import numpy as np\n\n if not isinstance(random_state, np.random.RandomState):\n random_state = np.random.RandomState(random_state)\n\n big_n = np.iinfo(np.int32).max\n\n seeds = set(random_state.randint(big_n, size=n))\n while len(seeds) < n:\n seeds.add(random_state.randint(big_n))\n\n # Sorting makes it easier to know what seeds are for what chunk\n return sorted(seeds)\n\n\ndef is_integer(i):\n \"\"\"\n >>> is_integer(6)\n True\n >>> is_integer(42.0)\n True\n >>> is_integer('abc')\n False\n \"\"\"\n import numpy as np\n if isinstance(i, (int, long)):\n return True\n if isinstance(i, float):\n return (i).is_integer()\n if issubclass(type(i), np.integer):\n return i\n else:\n return False\n\n\ndef file_size(fn, compression=None):\n \"\"\" Size of a file on disk\n\n If compressed then return the uncompressed file size\n \"\"\"\n if compression == 'gzip':\n with open(fn, 'rb') as f:\n f.seek(-4, 2)\n result = struct.unpack('I', f.read(4))[0]\n elif compression:\n # depending on the implementation, this may be inefficient\n with open(fn, 'rb', compression) as f:\n result = f.seek(0, 2)\n else:\n result = os.stat(fn).st_size\n return result\n\n\nONE_ARITY_BUILTINS = set([abs, all, any, bool, bytearray, bytes, callable, chr,\n classmethod, complex, dict, dir, enumerate, eval, float, format, frozenset,\n hash, hex, id, int, iter, len, list, max, min, next, oct, open, ord, range,\n repr, reversed, round, set, slice, sorted, staticmethod, str, sum, tuple,\n type, vars, zip])\nif sys.version_info[0] == 3: # Python 3\n ONE_ARITY_BUILTINS |= set([ascii])\nif sys.version_info[:2] != (2, 6):\n ONE_ARITY_BUILTINS |= set([memoryview])\nMULTI_ARITY_BUILTINS = set([compile, delattr, divmod, filter, getattr, hasattr,\n isinstance, issubclass, map, pow, setattr])\n\ndef takes_multiple_arguments(func):\n \"\"\" Does this function take multiple arguments?\n\n >>> def f(x, y): pass\n >>> takes_multiple_arguments(f)\n True\n\n >>> def f(x): pass\n >>> takes_multiple_arguments(f)\n False\n\n >>> def f(x, y=None): pass\n >>> takes_multiple_arguments(f)\n False\n\n >>> def f(*args): pass\n >>> takes_multiple_arguments(f)\n True\n\n >>> class Thing(object):\n ... def __init__(self, a): pass\n >>> takes_multiple_arguments(Thing)\n False\n\n \"\"\"\n if func in ONE_ARITY_BUILTINS:\n return False\n elif func in MULTI_ARITY_BUILTINS:\n return True\n\n try:\n spec = getargspec(func)\n except:\n return False\n\n try:\n is_constructor = spec.args[0] == 'self' and isinstance(func, type)\n except:\n is_constructor = False\n\n if spec.varargs:\n return True\n\n if spec.defaults is None:\n return len(spec.args) - is_constructor != 1\n return len(spec.args) - len(spec.defaults) - is_constructor > 1\n\n\nclass Dispatch(object):\n \"\"\"Simple single dispatch.\"\"\"\n def __init__(self):\n self._lookup = {}\n\n def register(self, type, func):\n \"\"\"Register dispatch of `func` on arguments of type `type`\"\"\"\n if isinstance(type, tuple):\n for t in type:\n self.register(t, func)\n else:\n self._lookup[type] = func\n\n def __call__(self, arg):\n # We dispatch first on type(arg), and fall back to iterating through\n # the mro. This is significantly faster in the common case where\n # type(arg) is in the lookup, with only a small penalty on fall back.\n lk = self._lookup\n typ = type(arg)\n if typ in lk:\n return lk[typ](arg)\n for cls in inspect.getmro(typ)[1:]:\n if cls in lk:\n return lk[cls](arg)\n raise TypeError(\"No dispatch for {0} type\".format(typ))\n\n\ndef ensure_not_exists(filename):\n \"\"\"\n Ensure that a file does not exist.\n \"\"\"\n try:\n os.unlink(filename)\n except OSError as e:\n if e.errno != ENOENT:\n raise\n\n\ndef _skip_doctest(line):\n if '>>>' in line:\n return line + ' # doctest: +SKIP'\n else:\n return line\n\n\ndef derived_from(original_klass, version=None, ua_args=[]):\n \"\"\"Decorator to attach original class's docstring to the wrapped method.\n\n Parameters\n ----------\n original_klass: type\n Original class which the method is derived from\n version : str\n Original package version which supports the wrapped method\n ua_args : list\n List of keywords which Dask doesn't support. Keywords existing in\n original but not in Dask will automatically be added.\n \"\"\"\n def wrapper(method):\n method_name = method.__name__\n\n try:\n # do not use wraps here, as it hides keyword arguments displayed\n # in the doc\n original_method = getattr(original_klass, method_name)\n doc = original_method.__doc__\n if doc is None:\n doc = ''\n\n method_args = getargspec(method).args\n original_args = getargspec(original_method).args\n\n not_supported = [m for m in original_args if m not in method_args]\n if len(ua_args) > 0:\n not_supported.extend(ua_args)\n\n if len(not_supported) > 0:\n note = (\"\\n Notes\\n -----\\n\"\n \" Dask doesn't supports following argument(s).\\n\\n\")\n args = ''.join([' * {0}\\n'.format(a) for a in not_supported])\n doc = doc + note + args\n doc = '\\n'.join([_skip_doctest(line) for line in doc.split('\\n')])\n method.__doc__ = doc\n return method\n\n except AttributeError:\n module_name = original_klass.__module__.split('.')[0]\n @functools.wraps(method)\n def wrapped(*args, **kwargs):\n msg = \"Base package doesn't support '{0}'.\".format(method_name)\n if version is not None:\n msg2 = \" Use {0} {1} or later to use this method.\"\n msg += msg2.format(module_name, version)\n raise NotImplementedError(msg)\n return wrapped\n return wrapper\n\n\ndef funcname(func, full=False):\n \"\"\"Get the name of a function.\"\"\"\n while hasattr(func, 'func'):\n func = func.func\n try:\n if full:\n return func.__qualname__.strip('<>')\n else:\n return func.__name__.strip('<>')\n except:\n return str(func).strip('<>')\n\n\ndef ensure_bytes(s):\n \"\"\" Turn string or bytes to bytes\n\n >>> ensure_bytes(u'123')\n '123'\n >>> ensure_bytes('123')\n '123'\n >>> ensure_bytes(b'123')\n '123'\n \"\"\"\n if isinstance(s, bytes):\n return s\n if hasattr(s, 'encode'):\n return s.encode()\n raise TypeError(\n \"Object %s is neither a bytes object nor has an encode method\" % s)\n\n\ndef digit(n, k, base):\n \"\"\"\n\n >>> digit(1234, 0, 10)\n 4\n >>> digit(1234, 1, 10)\n 3\n >>> digit(1234, 2, 10)\n 2\n >>> digit(1234, 3, 10)\n 1\n \"\"\"\n return n // base**k % base\n\n\ndef insert(tup, loc, val):\n \"\"\"\n\n >>> insert(('a', 'b', 'c'), 0, 'x')\n ('x', 'b', 'c')\n \"\"\"\n L = list(tup)\n L[loc] = val\n return tuple(L)\n\ndef build_name_function(max_int):\n \"\"\" Returns a function that receives a single integer\n and returns it as a string padded by enough zero characters\n to align with maximum possible integer\n\n >>> name_f = build_name_function(57)\n\n >>> name_f(7)\n '07'\n >>> name_f(31)\n '31'\n >>> build_name_function(1000)(42)\n '0042'\n >>> build_name_function(999)(42)\n '042'\n >>> build_name_function(0)(0)\n '0'\n \"\"\"\n # handle corner cases max_int is 0 or exact power of 10\n max_int += 1e-8\n\n pad_length = int(math.ceil(math.log10(max_int)))\n\n def name_function(i):\n return str(i).zfill(pad_length)\n\n return name_function\n" ]
[ [ "numpy.empty", "numpy.random.RandomState", "numpy.allclose", "numpy.cumsum", "numpy.iinfo" ] ]
fastscape-lem/ipyfastscape
[ "d430ae66d510d4cb16005299603681686b33c55d" ]
[ "src/ipyfastscape/tests/conftest.py" ]
[ "import numpy as np\nimport pytest\nimport xarray as xr\n\nfrom ipyfastscape.xr_accessor import WidgetsAccessor # noqa: F401\n\n\[email protected]\ndef dataset() -> xr.Dataset:\n x = np.array([0, 1, 2])\n y = np.array([0, 1, 2])\n time = np.array([0, 100, 200])\n batch = np.array([1, 2, 3])\n\n elevation = (\n batch[:, None, None, None]\n * time[None, :, None, None]\n * y[None, None, :, None]\n * x[None, None, None, :]\n )\n other_var = np.ones_like(elevation)\n xy_var = x[None, :] * y[:, None]\n\n ds = xr.Dataset(\n data_vars={\n 'topography__elevation': (('batch', 'time', 'y', 'x'), elevation),\n 'other_var': (('batch', 'time', 'y', 'x'), other_var),\n 'xy_var': (('y', 'x'), xy_var),\n },\n coords={'batch': batch, 'time': time, 'y': y, 'x': x},\n )\n\n return ds\n\n\[email protected]\ndef dataset_init(dataset) -> xr.Dataset:\n dataset._widgets(time_dim='time')\n\n return dataset\n" ]
[ [ "numpy.array", "numpy.ones_like" ] ]
goodhamgupta/EpidemiOptim
[ "a4fe3fcfc2d82a10db16a168526982c03ca2c8d3" ]
[ "epidemioptim/environments/gym_envs/epidemic_discrete.py" ]
[ "import numpy as np\nimport gym\nfrom epidemioptim.environments.gym_envs.base_env import BaseEnv\n\n\nclass EpidemicDiscrete(BaseEnv):\n def __init__(\n self,\n cost_function,\n model,\n simulation_horizon,\n ratio_death_to_R=0.005, # death ratio among people who were infected\n time_resolution=7,\n seed=np.random.randint(1e6),\n ):\n \"\"\"\n EpidemicDiscrete environment is based on the Epidemiological SEIRAH model from Prague et al., 2020 and on a bi-objective\n cost function (death toll and gdp recess).\n\n Parameters\n ----------\n cost_function: BaseCostFunction\n A cost function.\n model: BaseModel\n An epidemiological model.\n simulation_horizon: int\n Simulation horizon in days.\n ratio_death_to_R: float\n Ratio of deaths among recovered individuals.\n time_resolution: int\n In days.\n \"\"\"\n\n # Initialize model\n self.model = model\n self.stochastic = self.model.stochastic\n self.simulation_horizon = simulation_horizon\n self.reset_same = (\n False # whether the next reset resets the same epidemiological model\n )\n\n # Initialize cost function\n self.cost_function = cost_function\n self.nb_costs = cost_function.nb_costs\n self.cumulative_costs = [0 for _ in range(self.nb_costs)]\n\n # Initialize states\n self.state_labels = (\n self.model.internal_states_labels\n + [\"previous_lockdown_state\", \"current_lockdown_state\"]\n + [\n \"cumulative_cost_{}\".format(id_cost)\n for id_cost in range(self.cost_function.nb_costs)\n ]\n + [\"level_b\"]\n )\n self.label_to_id = dict(\n zip(self.state_labels, np.arange(len(self.state_labels)))\n )\n self.normalization_factors = [self.model.current_internal_params[\"N_av\"]] * len(\n self.model.internal_states_labels\n ) + [1, 1, self.model.current_internal_params[\"N_av\"], 150, 1]\n\n super().__init__(\n cost_function=cost_function,\n model=model,\n simulation_horizon=simulation_horizon,\n dim_action=2,\n discrete=True,\n seed=seed,\n )\n\n self.ratio_death_to_R = ratio_death_to_R\n self.time_resolution = time_resolution\n self._max_episode_steps = simulation_horizon // time_resolution\n self.history = None\n\n # Action modalities\n self.level_b_splits = (\n 7,\n 14,\n 21,\n ) # switches between transmission rates, in days (4 stages)\n self.level_b = 0 # index of the stage\n self.b0 = self.model.current_internal_params[\n \"b_fit\"\n ] # initial transmission rate\n self.betas = [self.b0] + [\n np.exp(self.model.current_internal_params[\"beta{}\".format(i + 1)])\n for i in range(4)\n ] # factors of reduction for each stage\n self.bs = None\n\n def _compute_b(self, times_since_start, times_since_last):\n \"\"\"\n Computes the transmission rate depending on the number of days since the last lock-down or since beginning of the current lock-down.\n\n Parameters\n ----------\n times_since_start: nd.array of ints\n Time since the start of the current lock-down, for each day.\n times_since_last: nd.array of ints\n Time since the last lock-down, for each day.\n\n Returns\n -------\n list\n The values of transmission rates for each day.\n \"\"\"\n if self.lockdown_state == 0:\n # if new lock-down decrease the transmission rate of one stage\n if self.previous_lockdown_state != self.lockdown_state:\n self.level_b = max(self.level_b - 1, 0)\n\n # further decrease the transmission rate every 7 days until first stage.\n assert times_since_start.size == 0\n bs = []\n for t_i in times_since_last:\n if t_i in self.level_b_splits:\n self.level_b = max(self.level_b - 1, 0)\n bs.append(np.product(self.betas[: self.level_b + 1]))\n else:\n # if lock-down terminated, increase the transmission rate of one stage.\n if self.previous_lockdown_state != self.lockdown_state:\n self.level_b = min(self.level_b + 1, len(self.betas) - 1)\n\n # further increase the transmission rate every 7 days until last stage.\n assert times_since_last.size == 0\n bs = []\n for t_i in times_since_start:\n if t_i in self.level_b_splits:\n self.level_b = min(self.level_b + 1, len(self.betas) - 1)\n bs.append(np.product(self.betas[: self.level_b + 1]))\n return bs\n\n def _update_previous_env_state(self):\n \"\"\"\n Save previous env state.\n\n \"\"\"\n if self.env_state is not None:\n self.previous_env_state = self.env_state.copy()\n self.previous_env_state_labelled = self.env_state_labelled.copy()\n\n def _update_env_state(self):\n \"\"\"\n Update the environment state.\n\n \"\"\"\n\n # Update env state\n self.env_state_labelled = dict(\n zip(self.model.internal_states_labels, self.model_state)\n )\n self.env_state_labelled.update(\n previous_lockdown_state=self.previous_lockdown_state,\n current_lockdown_state=self.lockdown_state,\n level_b=self.level_b,\n )\n # track cumulative costs in the state.\n for id_cost in range(self.nb_costs):\n self.env_state_labelled[\n \"cumulative_cost_{}\".format(id_cost)\n ] = self.cumulative_costs[id_cost]\n assert sorted(list(self.env_state_labelled.keys())) == sorted(\n self.state_labels\n ), \"labels do not match\"\n self.env_state = np.array(\n [self.env_state_labelled[k] for k in self.state_labels]\n )\n\n # Set previous env state to env state if first step\n if self.previous_env_state is None:\n # happens at first step\n self.previous_env_state = self.env_state.copy()\n self.previous_env_state_labelled = self.env_state_labelled.copy()\n\n def reset_same_model(self):\n \"\"\"\n To call if you want to reset to the same model the next time you call reset.\n Will be cancelled after the first reset, it needs to be called again each time.\n\n\n \"\"\"\n self.reset_same = True\n\n def reset(self):\n \"\"\"\n Reset the environment and the tracking of data.\n\n Returns\n -------\n nd.array\n The initial environment state.\n\n \"\"\"\n # initialize history of states, internal model states, actions, cost_functions, deaths\n self.history = dict(\n env_states=[],\n model_states=[],\n env_timesteps=[],\n actions=[],\n aggregated_costs=[],\n costs=[],\n lockdown=[],\n deaths=[],\n b=[],\n )\n # initialize time and lockdown days counter\n self.t = 0\n self.count_lockdown = 0\n self.count_deaths = 0\n self.count_since_start_lockdown = 0\n self.count_since_last_lockdown = 0\n self.level_b = 0\n self.b = self.model.current_internal_params[\"b_fit\"]\n\n self.lockdown_state = 0 # 0 not lockdown, 1 lockdown\n self.previous_lockdown_state = self.lockdown_state\n self.cumulative_costs = [0 for _ in range(self.nb_costs)]\n\n # initialize model internal state and params\n if self.reset_same:\n self.model.reset_same_model()\n self.reset_same = False\n else:\n self.model.reset()\n self.model_state = self.model._get_current_state()\n\n self._update_previous_env_state()\n self._update_env_state()\n\n self.history[\"env_states\"].append(self.env_state.copy())\n self.history[\"model_states\"].append(self.model_state.copy().tolist())\n self.history[\"env_timesteps\"].append(self.t)\n\n return self._normalize_env_state(self.env_state)\n\n def update_with_action(self, action):\n \"\"\"\n Implement effect of action on transmission rate.\n\n Parameters\n ----------\n action: int\n Action is 0 (no lock-down) or 1 (lock-down).\n\n \"\"\"\n\n # Translate actions\n self.previous_lockdown_state = self.lockdown_state\n previous_count_start = self.count_since_start_lockdown\n previous_count_last = self.count_since_last_lockdown\n\n if action == 0:\n # no lock-down\n self.jump_of = min(self.time_resolution, self.simulation_horizon - self.t)\n self.lockdown_state = 0\n if self.previous_lockdown_state == self.lockdown_state:\n self.count_since_last_lockdown += self.jump_of\n else:\n self.count_since_last_lockdown = self.jump_of\n self.count_since_start_lockdown = 0\n else:\n self.jump_of = min(self.time_resolution, self.simulation_horizon - self.t)\n self.lockdown_state = 1\n if self.lockdown_state == self.previous_lockdown_state:\n self.count_since_start_lockdown += self.jump_of\n else:\n self.count_since_start_lockdown = self.jump_of\n self.count_since_last_lockdown = 0\n\n # Modify model parameters based on lockdown state\n since_start = np.arange(previous_count_start, self.count_since_start_lockdown)\n since_last = np.arange(previous_count_last, self.count_since_last_lockdown)\n self.bs = self._compute_b(\n times_since_start=since_start, times_since_last=since_last\n )\n self.model.current_internal_params[\"b_fit\"] = self.b\n\n def step(self, action):\n \"\"\"\n Traditional step function from OpenAI Gym envs. Uses the action to update the environment.\n\n Parameters\n ----------\n action: int\n Action is 0 (no lock-down) or 1 (lock-down).\n\n\n Returns\n -------\n state: nd.array\n New environment state.\n cost_aggregated: float\n Aggregated measure of the cost.\n done: bool\n Whether the episode is terminated.\n info: dict\n Further infos. In our case, the costs, icu capacity of the region and whether constraints are violated.\n\n \"\"\"\n action = int(action)\n assert 0 <= action < self.dim_action\n\n self.update_with_action(action)\n if self.lockdown_state == 1:\n self.count_lockdown += self.jump_of\n\n # Run model for jump_of steps\n model_state = [self.model_state]\n model_states = []\n for b in self.bs:\n self.model.current_internal_params[\"b_fit\"] = b\n model_state = self.model.run_n_steps(model_state[-1], 1)\n model_states += model_state.tolist()\n self.model_state = model_state[-1] # last internal state is the new current one\n self.t += self.jump_of\n\n # Update state\n self._update_previous_env_state()\n self._update_env_state()\n\n # Store history\n costs = [\n c.compute_cost(\n previous_state=np.atleast_2d(self.previous_env_state),\n state=np.atleast_2d(self.env_state),\n label_to_id=self.label_to_id,\n action=action,\n others=dict(jump_of=self.time_resolution),\n )[0]\n for c in self.cost_function.costs\n ]\n for i in range(len(costs)):\n self.cumulative_costs[i] += costs[i]\n n_deaths = self.cost_function.compute_deaths(\n previous_state=np.atleast_2d(self.previous_env_state),\n state=np.atleast_2d(self.env_state),\n label_to_id=self.label_to_id,\n action=action,\n )[0]\n\n self._update_env_state()\n\n self.history[\"actions\"] += [action] * self.jump_of\n self.history[\"env_states\"] += [self.env_state.copy()] * self.jump_of\n self.history[\"env_timesteps\"] += list(range(self.t - self.jump_of, self.t))\n self.history[\"model_states\"] += model_states\n self.history[\"lockdown\"] += [self.lockdown_state] * self.jump_of\n self.history[\"deaths\"] += [n_deaths / self.jump_of] * self.jump_of\n self.history[\"b\"] += self.bs\n\n # Compute cost_function\n cost_aggregated, costs, over_constraints = self.cost_function.compute_cost(\n previous_state=self.previous_env_state,\n state=self.env_state,\n label_to_id=self.label_to_id,\n action=action,\n others=dict(jump_of=self.jump_of),\n )\n costs = costs.flatten()\n\n self.history[\"aggregated_costs\"] += [\n cost_aggregated / self.jump_of\n ] * self.jump_of\n self.history[\"costs\"] += [costs / self.jump_of for _ in range(self.jump_of)]\n self.costs = costs.copy()\n\n if self.t >= self.simulation_horizon:\n done = 1\n else:\n done = 0\n\n return (\n self._normalize_env_state(self.env_state),\n cost_aggregated,\n done,\n dict(\n costs=costs,\n constraints=over_constraints.flatten(),\n n_icu=self.env_state[self.label_to_id[\"H\"]] * 0.25,\n ),\n )\n\n # Utils\n def _normalize_env_state(self, env_state):\n return (env_state / np.array(self.normalization_factors)).copy()\n\n def _set_rew_params(self, goal):\n self.cost_function.set_goal_params(goal.copy())\n\n def sample_cost_function_params(self):\n return self.cost_function.sample_goal_params()\n\n # Format data for plotting\n def get_data(self):\n\n data = dict(\n history=self.history.copy(),\n time_jump=1,\n model_states_labels=self.model.internal_states_labels,\n icu_capacity=self.model.current_internal_params[\"icu\"],\n )\n t = self.history[\"env_timesteps\"]\n cumulative_death = [\n np.sum(self.history[\"deaths\"][:i]) for i in range(len(t) - 1)\n ]\n cumulative_eco_cost = [\n np.array(self.history[\"costs\"])[:i, 1].sum() for i in range(len(t) - 1)\n ]\n betas = [0, 0.25, 0.5, 0.75, 1]\n costs = np.array(self.history[\"costs\"])\n aggregated = [\n self.cost_function.compute_aggregated_cost(costs, beta) for beta in betas\n ]\n to_plot = [\n np.array(self.history[\"deaths\"]),\n np.array(cumulative_death),\n aggregated,\n costs[:, 1],\n np.array(cumulative_eco_cost),\n np.array(self.history[\"b\"]),\n ]\n labels = [\n \"New Deaths\",\n \"Total Deaths\",\n r\"Aggregated Cost\",\n \"New GDP Loss (B)\",\n \"Total GDP Loss (B)\",\n \"Transmission rate\",\n ]\n legends = [\n None,\n None,\n [r\"$\\beta = $\" + str(beta) for beta in betas],\n None,\n None,\n None,\n ]\n stats_run = dict(to_plot=to_plot, labels=labels, legends=legends)\n data[\"stats_run\"] = stats_run\n data[\n \"title\"\n ] = \"Eco cost: {:.2f} B, Death Cost: {}, Aggregated Cost: {:.2f}\".format(\n cumulative_eco_cost[-1],\n int(cumulative_death[-1]),\n np.sum(self.history[\"aggregated_costs\"]),\n )\n return data\n\n\nif __name__ == \"__main__\":\n from epidemioptim.utils import plot_stats\n from epidemioptim.environments.cost_functions import get_cost_function\n from epidemioptim.environments.models import get_model\n\n simulation_horizon = 364\n stochastic = False\n region = \"IDF\"\n\n model = get_model(\n model_id=\"prague_seirah\", params=dict(region=region, stochastic=stochastic)\n )\n\n N_region = model.pop_sizes[region]\n N_country = np.sum(list(model.pop_sizes.values()))\n ratio_death_to_R = 0.005\n\n cost_func = get_cost_function(\n cost_function_id=\"multi_cost_death_gdp_controllable\",\n params=dict(\n N_region=N_region, N_country=N_country, ratio_death_to_R=ratio_death_to_R\n ),\n )\n\n env = gym.make(\n \"EpidemicDiscrete-v0\",\n cost_function=cost_func,\n model=model,\n simulation_horizon=simulation_horizon,\n )\n env.reset()\n\n actions = np.random.choice([0, 1], size=53)\n actions = np.zeros([53])\n actions[3 : 3 + 8] = 1\n t = 0\n r = 0\n done = False\n while not done:\n out = env.step(actions[t])\n t += 1\n r += out[1]\n done = out[2]\n stats = env.unwrapped.get_data()\n\n # plot model states\n plot_stats(\n t=stats[\"history\"][\"env_timesteps\"],\n states=np.array(stats[\"history\"][\"model_states\"]).transpose(),\n labels=stats[\"model_states_labels\"],\n lockdown=np.array(stats[\"history\"][\"lockdown\"]),\n icu_capacity=stats[\"icu_capacity\"],\n time_jump=stats[\"time_jump\"],\n )\n plot_stats(\n t=stats[\"history\"][\"env_timesteps\"][1:],\n states=stats[\"stats_run\"][\"to_plot\"],\n labels=stats[\"stats_run\"][\"labels\"],\n legends=stats[\"stats_run\"][\"legends\"],\n title=stats[\"title\"],\n lockdown=np.array(stats[\"history\"][\"lockdown\"]),\n time_jump=stats[\"time_jump\"],\n show=True,\n )\n" ]
[ [ "numpy.product", "numpy.array", "numpy.random.choice", "numpy.zeros", "numpy.sum", "numpy.random.randint", "numpy.arange", "numpy.atleast_2d" ] ]
marco-cardoso/pandas-profiling
[ "e7ea34547087d5ecf792604efb57facee2ec609a" ]
[ "pandas_profiling/model/base.py" ]
[ "\"\"\"Common parts to all other modules, mainly utility functions.\"\"\"\nimport sys\n\nimport pandas as pd\nfrom enum import Enum, unique\nfrom urllib.parse import urlparse\n\n\nfrom pandas_profiling.config import config\nfrom pandas_profiling.utils.data_types import str_is_path\n\n\n@unique\nclass Variable(Enum):\n \"\"\"The possible types of variables in the Profiling Report.\"\"\"\n\n TYPE_CAT = \"CAT\"\n \"\"\"A categorical variable\"\"\"\n\n TYPE_BOOL = \"BOOL\"\n \"\"\"A boolean variable\"\"\"\n\n TYPE_NUM = \"NUM\"\n \"\"\"A numeric variable\"\"\"\n\n TYPE_DATE = \"DATE\"\n \"\"\"A date variable\"\"\"\n\n TYPE_URL = \"URL\"\n \"\"\"A URL variable\"\"\"\n\n TYPE_PATH = \"PATH\"\n \"\"\"Absolute files\"\"\"\n\n S_TYPE_CONST = \"CONST\"\n \"\"\"A constant variable\"\"\"\n\n S_TYPE_UNIQUE = \"UNIQUE\"\n \"\"\"An unique variable\"\"\"\n\n S_TYPE_UNSUPPORTED = \"UNSUPPORTED\"\n \"\"\"An unsupported variable\"\"\"\n\n S_TYPE_CORR = \"CORR\"\n \"\"\"A highly correlated variable\"\"\"\n\n S_TYPE_RECODED = \"RECODED\"\n \"\"\"A recorded variable\"\"\"\n\n S_TYPE_REJECTED = \"REJECTED\"\n \"\"\"A rejected variable\"\"\"\n\n\ndef get_counts(series: pd.Series) -> dict:\n \"\"\"Counts the values in a series (with and without NaN, distinct).\n\n Args:\n series: Series for which we want to calculate the values.\n\n Returns:\n A dictionary with the count values (with and without NaN, distinct).\n \"\"\"\n value_counts_with_nan = series.value_counts(dropna=False)\n value_counts_without_nan = (\n value_counts_with_nan.reset_index().dropna().set_index(\"index\").iloc[:, 0]\n )\n\n distinct_count_with_nan = value_counts_with_nan.count()\n distinct_count_without_nan = value_counts_without_nan.count()\n\n # When the inferred type of the index is just \"mixed\" probably the types within the series are tuple, dict,\n # list and so on...\n if value_counts_without_nan.index.inferred_type == \"mixed\":\n raise TypeError(\"Not supported mixed type\")\n\n return {\n \"value_counts_with_nan\": value_counts_with_nan,\n \"value_counts_without_nan\": value_counts_without_nan,\n \"distinct_count_with_nan\": distinct_count_with_nan,\n \"distinct_count_without_nan\": distinct_count_without_nan,\n }\n\n\ndef is_boolean(series: pd.Series, series_description: dict) -> bool:\n \"\"\"Is the series boolean type?\n\n Args:\n series: Series\n series_description: Series description\n\n Returns:\n True is the series is boolean type in the broad sense (e.g. including yes/no, NaNs allowed).\n \"\"\"\n keys = series_description[\"value_counts_without_nan\"].keys()\n if pd.api.types.is_bool_dtype(keys):\n return True\n elif (\n series_description[\"distinct_count_without_nan\"] <= 2\n and pd.api.types.is_numeric_dtype(series)\n and series[~series.isnull()].between(0, 1).all()\n ):\n return True\n elif series_description[\"distinct_count_without_nan\"] <= 4:\n unique_values = set([str(value).lower() for value in keys.values])\n accepted_combinations = [\n [\"y\", \"n\"],\n [\"yes\", \"no\"],\n [\"true\", \"false\"],\n [\"t\", \"f\"],\n ]\n\n if len(unique_values) == 2 and any(\n [unique_values == set(bools) for bools in accepted_combinations]\n ):\n return True\n\n return False\n\n\ndef is_numeric(series: pd.Series, series_description: dict) -> bool:\n \"\"\"Is the series numeric type?\n\n Args:\n series: Series\n series_description: Series description\n\n Returns:\n True is the series is numeric type (NaNs allowed).\n \"\"\"\n return pd.api.types.is_numeric_dtype(series) and series_description[\n \"distinct_count_without_nan\"\n ] >= config[\"low_categorical_threshold\"].get(int)\n\n\ndef is_url(series: pd.Series, series_description: dict) -> bool:\n \"\"\"Is the series url type?\n\n Args:\n series: Series\n series_description: Series description\n\n Returns:\n True is the series is url type (NaNs allowed).\n \"\"\"\n if series_description[\"distinct_count_without_nan\"] > 0:\n try:\n result = series[~series.isnull()].astype(str).apply(urlparse)\n return result.apply(lambda x: all([x.scheme, x.netloc, x.path])).all()\n except ValueError:\n return False\n else:\n return False\n\n\ndef is_path(series, series_description) -> bool:\n if series_description[\"distinct_count_without_nan\"] > 0:\n try:\n result = series[~series.isnull()].astype(str).apply(str_is_path)\n return result.all()\n except ValueError:\n return False\n else:\n return False\n\n\ndef is_date(series) -> bool:\n \"\"\"Is the variable of type datetime? Throws a warning if the series looks like a datetime, but is not typed as\n datetime64.\n\n Args:\n series: Series\n\n Returns:\n True if the variable is of type datetime.\n \"\"\"\n is_date_value = pd.api.types.is_datetime64_dtype(series)\n\n return is_date_value\n\n\ndef get_var_type(series: pd.Series) -> dict:\n \"\"\"Get the variable type of a series.\n\n Args:\n series: Series for which we want to infer the variable type.\n\n Returns:\n The series updated with the variable type included.\n \"\"\"\n\n try:\n series_description = get_counts(series)\n\n distinct_count_with_nan = series_description[\"distinct_count_with_nan\"]\n distinct_count_without_nan = series_description[\"distinct_count_without_nan\"]\n\n if distinct_count_with_nan <= 1:\n var_type = Variable.S_TYPE_CONST\n elif is_boolean(series, series_description):\n var_type = Variable.TYPE_BOOL\n elif is_numeric(series, series_description):\n var_type = Variable.TYPE_NUM\n elif is_date(series):\n var_type = Variable.TYPE_DATE\n elif is_url(series, series_description):\n var_type = Variable.TYPE_URL\n elif is_path(series, series_description) and sys.version_info[1] > 5:\n var_type = Variable.TYPE_PATH\n elif distinct_count_without_nan == len(series):\n var_type = Variable.S_TYPE_UNIQUE\n else:\n var_type = Variable.TYPE_CAT\n except TypeError:\n series_description = {}\n var_type = Variable.S_TYPE_UNSUPPORTED\n\n series_description.update({\"type\": var_type})\n\n return series_description\n" ]
[ [ "pandas.api.types.is_bool_dtype", "pandas.api.types.is_datetime64_dtype", "pandas.api.types.is_numeric_dtype" ] ]
pulasthi/twister2
[ "40b73bdf7dcb778d957c3f146baf825a97dceae5" ]
[ "deeplearning/pytorch/src/main/python/examples/twister2worker.py" ]
[ "import numpy as np\nimport mpi4py\nimport torch\n\nmpi4py.rc(initialize=False, finalize=False)\nfrom mpi4py import MPI\n\nMPI.Init()\n\ncomm = MPI.COMM_WORLD\nparent = comm.Get_parent()\nworld_rank = parent.Get_rank()\nworld_size = parent.Get_size()\n\nrecv_data = np.array([0, 0, 0, 0], dtype=\"i\")\nif world_rank == 1:\n parent.Recv([recv_data, MPI.INT], source=0, tag=0)\n\nprint(\"From Slave: \", world_rank, world_size, parent, recv_data)\n#comm.Recv([recv_data, MPI.INT], source=0)\n\ntensor1 = torch.from_numpy(recv_data)\ntensor2 = torch.from_numpy(np.ones(4))\ntensor3 = tensor1 + tensor2\n\nprint(\"Results : \", tensor3)\n\n# comm.free()\nMPI.Finalize()\n" ]
[ [ "numpy.array", "numpy.ones", "torch.from_numpy" ] ]
anairabeze/simfempy
[ "144362956263cb9b81f4bade15664d9cc640f93a" ]
[ "simfempy/tools/analyticalfunction.py" ]
[ "# -*- coding: utf-8 -*-\n\"\"\"\n\n\"\"\"\n\nimport numpy as np\nimport sympy\n\n\n#=================================================================#\nclass AnalyticalFunction():\n \"\"\"\n computes numpy vectorized functions for the function and its dericatives up to two\n for a given expression, derivatives computed with sympy\n \"\"\"\n def __repr__(self):\n return f\"expr={str(self.expr)}\"\n return f\"dim={self.dim} expr={str(self.expr)}\"\n def __call__(self, *x):\n return self.fct(*x)\n def __init__(self, expr, dim=3):\n if expr.find('x0') == -1 and expr.find('x1') == -1 and expr.find('x2') == -1:\n expr = expr.replace('x', 'x0')\n expr = expr.replace('y', 'x1')\n expr = expr.replace('z', 'x2')\n if dim==1 and expr.find('x0') == -1:\n expr = expr.replace('x', 'x0')\n self.dim, self.expr = dim, expr\n symbc = \"\"\n for i in range(dim): symbc += f\"x{i},\"\n symbc = symbc[:-1]\n s = sympy.symbols(symbc)\n # print(f\"{expr=} {symbc=} {s=}\")\n self.fct = np.vectorize(sympy.lambdify(symbc,expr))\n self.fct_x = []\n self.fct_xx = []\n for i in range(dim):\n self.fct_xxxx = np.vectorize(sympy.lambdify(symbc, sympy.diff(expr, s[0], 4)),otypes=[float])\n if dim==1: fx = sympy.diff(expr, s)\n else: fx = sympy.diff(expr, s[i])\n self.fct_x.append(np.vectorize(sympy.lambdify(symbc, fx),otypes=[float]))\n self.fct_xx.append([])\n for j in range(dim):\n if dim == 1: fxx = sympy.diff(fx, s)\n else: fxx = sympy.diff(fx, s[j])\n self.fct_xx[i].append(np.vectorize(sympy.lambdify(symbc, fxx),otypes=[float]))\n def d(self, i, *x):\n return self.fct_x[i](*x)\n def x(self, *x):\n return self.fct_x[0](*x)\n def y(self, *x):\n return self.fct_x[1](*x)\n def z(self, *x):\n return self.fct_x[2](*x)\n def dd(self, i, j, *x):\n return self.fct_xx[i][j](*x)\n def dddd(self, *x):\n return self.fct_xxxx(*x)\n def xx(self, *x):\n return self.fct_xx[0][0](*x)\n def xy(self, *x):\n return self.fct_xx[0][1](*x)\n def xz(self, *x):\n return self.fct_xx[0][2](*x)\n def yy(self, *x):\n return self.fct_xx[1][1](*x)\n def yx(self, *x):\n return self.fct_xx[1][0](*x)\n def yz(self, *x):\n return self.fct_xx[1][2](*x)\n def zz(self, *x):\n return self.fct_xx[2][2](*x)\n def zx(self, *x):\n return self.fct_xx[2][0](*x)\n def zy(self, *x):\n return self.fct_xx[2][1](*x)\n\n#=================================================================#\ndef analyticalSolution(function, dim, ncomp=1, random=True):\n \"\"\"\n defines some analytical functions to be used in validation\n\n returns analytical function (if ncomp==1) or list of analytical functions (if ncomp>1)\n\n parameters:\n function: name of function\n ncomp: size of list\n random: use random coefficients\n \"\"\"\n solexact = []\n def _p(n):\n if random:\n p = (4 * np.random.rand(n) - 2) / 3\n else:\n p = [1.1 * (n - d) for d in range(n)]\n return p\n vars = ['x', 'y', 'z']\n p = _p(ncomp * 2*dim*dim)\n for i in range(ncomp):\n # print(f\"{p=}\")\n fct = '{:3.1f}'.format(p.pop())\n if function == 'Constant': pass\n elif function == 'Linear' or function == 'Quadratic':\n for d in range(dim): fct += \"{:+3.1f}*{:1s}\".format(p.pop(), vars[d])\n if function == 'Quadratic':\n for d in range(dim): fct += \"{:+3.1f}*{:1s}**2\".format(p.pop(), vars[d])\n elif function == 'Sinus':\n for d in range(dim): fct += \"{:+3.1f}*sin({:1s})\".format(p.pop(), vars[d])\n else:\n if ncomp==1: fct = function\n else: fct = function[i]\n solexact.append(AnalyticalFunction(expr=fct))\n if ncomp==1: return solexact[0]\n return solexact\n\n\n# ------------------------------------------------------------------- #\nif __name__ == '__main__':\n def test1D():\n u = AnalyticalFunction(dim=1, expr='x*x')\n print(\"u(2)\", u(2))\n x = np.meshgrid(np.linspace(0, 2, 3))\n print(\"x\", x, \"\\nu=\", u.expr, \"\\nu(x)\", u(x), \"\\nu.x(x)\", u.x(x), \"\\nu.xx(x)\", u.xx(x))\n def test2D():\n u = AnalyticalFunction(dim=2, expr='x*x*y + y*y')\n print(\"u(2,1)\", u(2,1))\n print(\"u(2,1)\", u(*(2,1)))\n x = np.meshgrid(np.linspace(0, 2, 3),np.linspace(0, 1, 2))\n print(\"x\", x, \"\\nu=\", u.expr, \"\\nu(x)\", u(*x), \"\\nu.x(x)\", u.x(*x), \"\\nu.xx(x)\", u.xx(*x))\n # test2D()\n test1D()\n\n\n\n" ]
[ [ "numpy.linspace", "numpy.random.rand" ] ]
atksh/datasets
[ "814058b31ebd99e418114016d60ab4d6f8f82070" ]
[ "tensorflow_datasets/image/places365_small.py" ]
[ "# coding=utf-8\n# Copyright 2019 The TensorFlow Datasets Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Dataset class for Places365-Standard small(256x256) dataset.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport csv\nimport os\nimport six\nimport six.moves.urllib as urllib\nimport tensorflow as tf\nimport tensorflow_datasets.public_api as tfds\n\n_BASE_URL = \"http://data.csail.mit.edu/places/places365/\"\n_TRAIN_URL = \"train_256_places365standard.tar\"\n_TEST_URL = \"test_256.tar\"\n_VALID_URL = \"val_256.tar\"\n_FILE_ANNOTATION_URL = \"filelist_places365-standard.tar\"\n\n_IMAGE_SHAPE = (256, 256, 3)\n\n_DESCRIPTION = (\n \"The Places365-Standard dataset contains 1.8 million train images from 365\"\n \" scene categories,which are used to train the Places365 CNNs.There are 50\"\n \" images per category in the validation set and 900 images per category in\"\n \" the testing set.\"\n)\n\n_LABELS_FNAME = \"image/categories_places365.txt\"\n\n_CITATION = \"\"\"\\\n\n @article{zhou2017places,\n title={Places: A 10 million Image Database for Scene Recognition},\n author={Zhou, Bolei and Lapedriza, Agata and Khosla, Aditya and Oliva, Aude and Torralba, Antonio},\n journal={IEEE Transactions on Pattern Analysis and Machine Intelligence},\n year={2017},\n publisher={IEEE}\n}\n\n\"\"\"\n\n\nclass Places365Small(tfds.core.GeneratorBasedBuilder):\n \"\"\"Places365 Images dataset.\"\"\"\n\n VERSION = tfds.core.Version(\"2.0.0\")\n\n def _info(self):\n names_file = tfds.core.get_tfds_path(_LABELS_FNAME)\n return tfds.core.DatasetInfo(\n builder=self,\n description=(_DESCRIPTION),\n features=tfds.features.FeaturesDict(\n {\n \"image\": tfds.features.Image(shape=_IMAGE_SHAPE),\n \"label\": tfds.features.ClassLabel(names_file=names_file),\n }\n ),\n supervised_keys=(\"image\", \"label\"),\n homepage=\"http://places2.csail.mit.edu/\",\n citation=_CITATION,\n )\n\n def _split_generators(self, dl_manager):\n output_files = dl_manager.download_and_extract(\n {\n \"train\": urllib.parse.urljoin(_BASE_URL, _TRAIN_URL),\n \"test\": urllib.parse.urljoin(_BASE_URL, _TEST_URL),\n \"validation\": urllib.parse.urljoin(_BASE_URL, _VALID_URL),\n \"annotation\": urllib.parse.urljoin(_BASE_URL, _FILE_ANNOTATION_URL),\n }\n )\n\n return [\n tfds.core.SplitGenerator(\n name=\"train\",\n gen_kwargs={\n \"data_dir_path\": os.path.join(output_files[\"train\"], \"data_256\"),\n \"annotation_path\": os.path.join(\n output_files[\"annotation\"], \"places365_train_standard.txt\"\n ),\n \"split_name\": \"train\",\n },\n ),\n tfds.core.SplitGenerator(\n name=\"test\",\n gen_kwargs={\n \"data_dir_path\": os.path.join(output_files[\"test\"], \"test_256\"),\n \"annotation_path\": os.path.join(\n output_files[\"annotation\"], \"places365_test.txt\"\n ),\n \"split_name\": \"test\",\n },\n ),\n tfds.core.SplitGenerator(\n name=\"validation\",\n gen_kwargs={\n \"data_dir_path\": os.path.join(\n output_files[\"validation\"], \"val_256\"\n ),\n \"annotation_path\": os.path.join(\n output_files[\"annotation\"], \"places365_val.txt\"\n ),\n \"split_name\": \"validation\",\n },\n ),\n ]\n\n def _generate_examples(self, data_dir_path, split_name, annotation_path):\n with tf.io.gfile.GFile(annotation_path) as f:\n if split_name == \"test\":\n # test split doesn't have labels assigned.\n file_to_class = {x[0]: -1 for x in csv.reader(f, delimiter=\" \")}\n else:\n file_to_class = {x[0]: int(x[1]) for x in csv.reader(f, delimiter=\" \")}\n\n for filepath, class_id in six.iteritems(file_to_class):\n yield filepath, {\n # it is a \"+\" instead of os.path.join on purpose.\n # as some annotation file entries contain paths starting with \"/\"\n \"image\": os.path.normpath(data_dir_path + \"/\" + filepath),\n \"label\": class_id,\n }\n" ]
[ [ "tensorflow.io.gfile.GFile" ] ]