Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save alessiot/58f23ee80bf80639a091b016436b8fbc to your computer and use it in GitHub Desktop.
Save alessiot/58f23ee80bf80639a091b016436b8fbc to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"metadata": {
"toc": true
},
"source": [
"<h1>Table of Contents<span class=\"tocSkip\"></span></h1>\n",
"<div class=\"toc\"><ul class=\"toc-item\"><li><span><a href=\"#Coding-Battleship\" data-toc-modified-id=\"Coding-Battleship-1\"><span class=\"toc-item-num\">1&nbsp;&nbsp;</span>Coding Battleship</a></span></li><li><span><a href=\"#Callback-and-Plotting\" data-toc-modified-id=\"Callback-and-Plotting-2\"><span class=\"toc-item-num\">2&nbsp;&nbsp;</span>Callback and Plotting</a></span></li><li><span><a href=\"#Playing-with-One-Ship-on-a-5x5-board\" data-toc-modified-id=\"Playing-with-One-Ship-on-a-5x5-board-3\"><span class=\"toc-item-num\">3&nbsp;&nbsp;</span>Playing with One Ship on a 5x5 board</a></span></li><li><span><a href=\"#Playing-with-One-Ship-on-a-Bigger-Board\" data-toc-modified-id=\"Playing-with-One-Ship-on-a-Bigger-Board-4\"><span class=\"toc-item-num\">4&nbsp;&nbsp;</span>Playing with One Ship on a Bigger Board</a></span></li><li><span><a href=\"#Visualizing-How-the-Agent-Plays\" data-toc-modified-id=\"Visualizing-How-the-Agent-Plays-5\"><span class=\"toc-item-num\">5&nbsp;&nbsp;</span>Visualizing How the Agent Plays</a></span></li><li><span><a href=\"#Optimizing-The-Algorithm-Parameters-with-Hyperopt\" data-toc-modified-id=\"Optimizing-The-Algorithm-Parameters-with-Hyperopt-6\"><span class=\"toc-item-num\">6&nbsp;&nbsp;</span>Optimizing The Algorithm Parameters with Hyperopt</a></span></li><li><span><a href=\"#Links\" data-toc-modified-id=\"Links-7\"><span class=\"toc-item-num\">7&nbsp;&nbsp;</span>Links</a></span></li><li><span><a href=\"#Reward-scheme\" data-toc-modified-id=\"Reward-scheme-8\"><span class=\"toc-item-num\">8&nbsp;&nbsp;</span>Reward scheme</a></span></li><li><span><a href=\"#Skeleton-Battleship-Environmnt\" data-toc-modified-id=\"Skeleton-Battleship-Environmnt-9\"><span class=\"toc-item-num\">9&nbsp;&nbsp;</span>Skeleton Battleship Environmnt</a></span></li></ul></div>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Coding Battleship"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import gym\n",
"from gym import spaces\n",
"import numpy as np"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"# randomly places a ship on a board\n",
"def set_ship(ship, ships, board, ship_locs):\n",
"\n",
" grid_size = board.shape[0]\n",
" \n",
" done = False\n",
" while not done:\n",
" init_pos_i = np.random.randint(0, grid_size)\n",
" init_pos_j = np.random.randint(0, grid_size)\n",
" \n",
" # for a cruiser, if init_oos_i = 0, move forward horizontally (+1)\n",
" # for a cruiser, if init_oos_j = 0, move downward vertically (+1)\n",
" move_j = grid_size - init_pos_j - ships[ship]# horizontal\n",
" if move_j > 0:\n",
" move_j = 1\n",
" else:\n",
" move_j = -1\n",
" move_i = grid_size - init_pos_i - ships[ship] # vertical\n",
" if move_i > 0:\n",
" move_i = 1\n",
" else:\n",
" move_i = -1\n",
" # choose if placing ship horizontally or vertically\n",
" choice_hv = np.random.choice(['h', 'v']) # horizontal, vertical\n",
" if choice_hv == 'h': #horizontal\n",
" j = [(init_pos_j + move_j*jj) for jj in range(ships[ship])]\n",
" i = [init_pos_i for ii in range(ships[ship])]\n",
" pos = set(zip(i,j)) \n",
" if all([board[i,j]==0 for (i,j) in pos]):\n",
" done = True\n",
" elif choice_hv == 'v':\n",
" i = [(init_pos_i + move_i*ii) for ii in range(ships[ship])]\n",
" j = [init_pos_j for jj in range(ships[ship])]\n",
" pos = set(zip(i,j)) \n",
" #check if empty board in this direction\n",
" if all([board[i,j]==0 for (i,j) in pos]):\n",
" done = True\n",
" # set ship - see convention\n",
" for (i,j) in pos:\n",
" board[i,j] = 1\n",
" ship_locs[ship].append((i,j))\n",
" \n",
" return board, ship_locs\n",
"\n",
"def board_rendering(grid_size, board):\n",
" for i in range(grid_size):\n",
" print(\"-\"*(4*grid_size+2))\n",
" for j in range(grid_size):\n",
" current_state_value = board[i,j]\n",
" current_state = ('S' if current_state_value==1 else ' ')\n",
" print(\" | \", end=\"\")\n",
" print(current_state, end='')\n",
" print(' |')\n",
" print(\"-\"*(4*grid_size+2))"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"class BattleshipEnv(gym.Env):\n",
" \n",
" \"\"\"Custom Environment that follows gym interface\"\"\"\n",
" \"\"\"see https://github.com/openai/gym/blob/master/gym/core.py\"\"\"\n",
" \n",
" metadata = {'render.modes': ['human']} \n",
"\n",
"\n",
" def __init__(self, enemy_board, ship_locs, grid_size, ships):\n",
" \n",
" super(BattleshipEnv, self).__init__()\n",
" \n",
" #ships\n",
" self.ships = ships\n",
" \n",
" # board size\n",
" self.grid_size = grid_size \n",
" # cell state encoding (empty, hit, miss)\n",
" self.cell = {'E': 0, 'X': 1, 'O': -1} \n",
" # boards, actions, rewards\n",
" self.board = self.cell['E']*np.ones((self.grid_size, self.grid_size), dtype='int')\n",
" # enemy_board must be encoded with 0: empy and 1: ship cell\n",
" self.is_enemy_set = False\n",
" self.enemy_board = enemy_board\n",
" self.ship_locs = ship_locs\n",
" if self.enemy_board is None:\n",
" self.enemy_board = 0*np.ones((self.grid_size, self.grid_size), dtype='int')\n",
" for ship in self.ships:\n",
" self.ship_locs[ship] = []\n",
" self.enemy_board, self.ship_locs = set_ship(ship, self.ships, self.enemy_board, self.ship_locs)\n",
" self.is_enemy_set = True\n",
" # reward discount\n",
" self.rdisc = 0\n",
" self.legal_actions = [] # legal (empty) cells available for moves\n",
" for i in range(self.grid_size):\n",
" for j in range(self.grid_size):\n",
" self.legal_actions.append((i,j))# this gets updated as an action is performed\n",
" \n",
" # Define action and observation space\n",
" # They must be gym.spaces objects\n",
" # In our case the action space is discrete: index of action\n",
" self.action_space = spaces.Discrete(self.grid_size * self.grid_size)\n",
" # The observation will be the state or configuration of the board\n",
" self.observation_space = spaces.Box(low=-1, high=1,shape=(self.grid_size, self.grid_size), \n",
" dtype=np.int)\n",
" #Ex: print(spaces.Box(0,1, shape=(10,10)).high)\n",
"\n",
" # action will be an index in action_space if from epsilon-greedy\n",
" # or from model prediction\n",
" def step(self, action):\n",
" \n",
" # board situation before the action\n",
" state = self.board.copy() \n",
" empty_cnts_pre, hit_cnts_pre, miss_cnts_pre = self.board_config(state)\n",
" \n",
" # action coordinates generated or predicted by the agent in the action_space\n",
" i, j = np.unravel_index(action, (self.grid_size,self.grid_size))\n",
" \n",
" #print('action', action, 'coords', i, j)\n",
" #print('legal_actions', self.legal_actions)\n",
" \n",
" # lose 1 point for any action\n",
" reward = -1\n",
" # assign a penalty for each illegal action used instead of a legal one\n",
" if (i,j) not in self.legal_actions:\n",
" reward -= 2*self.grid_size\n",
" action_idx = np.random.randint(0,len(self.legal_actions))\n",
" \n",
" i,j = self.legal_actions[action_idx] \n",
" action = np.ravel_multi_index((i,j), (self.grid_size,self.grid_size))\n",
" \n",
" # set new state after performing action (scoring board is updated)\n",
" self.set_state((i,j))\n",
" # update legal actions and action_space\n",
" self.set_legal_actions((i,j))\n",
"\n",
" # new state on scoring board - this includes last action\n",
" next_state = self.board\n",
" \n",
" # board situation after action\n",
" empty_cnts_post, hit_cnts_post, miss_cnts_post = self.board_config(next_state)\n",
"\n",
" # game completed?\n",
" done = bool(hit_cnts_post == sum(self.ships.values()))\n",
" \n",
" # reward for a hit\n",
" if hit_cnts_post-hit_cnts_pre==1: \n",
" # Update hit counts and use it to reward\n",
" r_discount = 1#0.5**self.rdisc\n",
" rp = (self.grid_size*self.grid_size if done else self.grid_size)\n",
" reward += rp*r_discount\n",
" #print('HIT!!!')\n",
" \n",
" #if done:\n",
" # print('done')\n",
" \n",
" # we discount the reward for a subsequent hit the longer it takes to score it\n",
" # after a hit, zero the discount \n",
" # don't start discounting though, if first hit hasn't happened yet\n",
" #if hit_cnts_post-hit_cnts_pre==1 or hit_cnts_pre==0:\n",
" # self.rdisc = 0\n",
" #else:\n",
" # self.rdisc += 1\n",
" \n",
" reward = float(reward)\n",
" \n",
" #print('reward:', reward)\n",
" # store the current value of the portfolio here\n",
" info = {}\n",
"\n",
" return next_state, reward, done, info\n",
"\n",
"\n",
" \n",
" def reset(self):\n",
" # Reset the state of the environment to an initial state\n",
" \"\"\"\n",
" Important: the observation must be a numpy array\n",
" :return: (np.array) \n",
" \"\"\"\n",
" \n",
" self.board = self.cell['E']*np.ones((self.grid_size, self.grid_size), dtype='int')\n",
" \n",
" self.legal_actions = [] # legal (empty) cells available for moves\n",
" for i in range(self.grid_size):\n",
" for j in range(self.grid_size):\n",
" self.legal_actions.append((i,j))# this gets updated as an action is performed\n",
" \n",
" # generate a random board again if it was set randomly before\n",
" if self.is_enemy_set:\n",
" self.enemy_board = 0*np.ones((self.grid_size, self.grid_size), dtype='int')\n",
" self.ship_locs = {}\n",
" for ship in self.ships:\n",
" self.ship_locs[ship] = []\n",
" self.enemy_board, self.ship_locs = set_ship(ship, self.ships, self.enemy_board, self.ship_locs)\n",
"\n",
" self.rdisc = 0\n",
"\n",
" return self.board\n",
" \n",
" # Render the environment to the screen\n",
" # board (i,j)\n",
" ## ------------>j\n",
" ## | (0,0) | (0,1) | (0,2) | |\n",
" ## | (1,0) | (1,1) | (1,2) | |\n",
" ## v i\n",
" def render(self, mode='human'):\n",
" for i in range(self.grid_size):\n",
" print(\"-\"*(4*self.grid_size+2))\n",
" for j in range(self.grid_size):\n",
" current_state_value = self.board[i,j]\n",
" current_state = list(self.cell.keys())[list(self.cell.values()).index(current_state_value)]\n",
" current_state = (current_state if current_state!='E' else ' ')\n",
" print(\" | \", end=\"\")\n",
" print(current_state, end='')\n",
" print(' |')\n",
" print(\"-\"*(4*self.grid_size+2))\n",
" \n",
" ####### HELPER FUNCTIONS ###########\n",
" \n",
" def board_config(self, state):\n",
" uni_states, uni_cnts = np.unique(state.ravel(), return_counts=True)\n",
" empty_cnts = uni_cnts[uni_states==self.cell['E']]\n",
" hit_cnts = uni_cnts[uni_states==self.cell['X']]\n",
" miss_cnts = uni_cnts[uni_states==self.cell['O']]\n",
" if len(empty_cnts)==0:\n",
" empty_cnts = 0\n",
" else:\n",
" empty_cnts = empty_cnts[0]\n",
" if len(hit_cnts)==0:\n",
" hit_cnts = 0\n",
" else:\n",
" hit_cnts = hit_cnts[0]\n",
" if len(miss_cnts)==0:\n",
" miss_cnts = 0\n",
" else:\n",
" miss_cnts = miss_cnts[0]\n",
" \n",
" return empty_cnts, hit_cnts, miss_cnts\n",
"\n",
" # set board configuration and state value after player action\n",
" def set_state(self, action):\n",
" i , j = action\n",
" if self.enemy_board[i,j]==1:\n",
" self.board[i,j]=self.cell['X']\n",
" else:\n",
" self.board[i,j]=self.cell['O']\n",
"\n",
" # set legal actions (empty board locations)\n",
" def set_legal_actions(self, action):\n",
" if action in self.legal_actions:\n",
" self.legal_actions.remove(action)\n",
" \n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING:tensorflow:\n",
"The TensorFlow contrib module will not be included in TensorFlow 2.0.\n",
"For more information, please see:\n",
" * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n",
" * https://github.com/tensorflow/addons\n",
" * https://github.com/tensorflow/io (for I/O related ops)\n",
"If you depend on functionality not listed there, please file an issue.\n",
"\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/env_checker.py:202: UserWarning: Your observation has an unconventional shape (neither an image, nor a 1D vector). We recommend you to flatten the observation to have only a 1D vector\n",
" warnings.warn(\"Your observation has an unconventional shape (neither an image, nor a 1D vector). \"\n"
]
}
],
"source": [
"# validate environment with one ship (either random or user-defined) on 5x5 board\n",
"\n",
"from stable_baselines.common.env_checker import check_env\n",
"\n",
"# ships\n",
"ships = {}\n",
"ships['cruiser'] = 3\n",
"\n",
"grid_size = 5\n",
"# for pre-determined board\n",
"enemy_board = 0*np.ones((grid_size, grid_size), dtype='int')\n",
"enemy_board[0,1] = 1\n",
"enemy_board[1,1] = 1\n",
"enemy_board[2,1] = 1\n",
"ship_locs = {}\n",
"ship_locs['cruiser'] = [(0,1),(1,1),(2,1)]\n",
"env = BattleshipEnv(enemy_board=enemy_board, ship_locs=ship_locs, grid_size=grid_size, ships=ships)\n",
"# for random board\n",
"#env = BattleshipEnv(enemy_board=None, ship_locs={}, grid_size=grid_size)\n",
"# If the environment doesn't follow the interface, an error will be thrown\n",
"check_env(env, warn=True)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(array([[0, 1, 0, 0, 0],\n",
" [0, 1, 0, 0, 0],\n",
" [0, 1, 0, 0, 0],\n",
" [0, 0, 0, 0, 0],\n",
" [0, 0, 0, 0, 0]]), {'cruiser': [(0, 1), (1, 1), (2, 1)]})"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"env.enemy_board, env.ship_locs"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Episode 0\n",
"Action 1 4 2\n",
"obs= [[ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 -1 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
"Action 2 1 0\n",
"obs= [[ 0 0 0 0 0]\n",
" [-1 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 -1 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | O | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
"Action 3 4 2\n",
"obs= [[ 0 0 0 0 0]\n",
" [-1 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 -1 0]\n",
" [ 0 0 -1 0 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | O | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | O | |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
"Action 4 3 3\n",
"obs= [[ 0 0 0 0 0]\n",
" [-1 0 0 0 0]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 0 0 -1 0]\n",
" [ 0 0 -1 0 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | O | | | | |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | | | O | |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
"Action 5 1 4\n",
"obs= [[ 0 0 0 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 0 0 -1 0]\n",
" [ 0 0 -1 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | | | O | |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
"Action 6 2 2\n",
"obs= [[ 0 0 0 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 1 0 -1 0]\n",
" [ 0 0 -1 0 0]] reward= -6.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | X | | O | |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
"Action 7 1 0\n",
"obs= [[ 0 0 0 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 1 0 -1 0]\n",
" [ 0 -1 -1 0 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | X | | O | |\n",
"----------------------\n",
" | | O | O | | |\n",
"----------------------\n",
"Action 8 4 3\n",
"obs= [[ 0 0 0 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 1 0 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | X | | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 9 1 4\n",
"obs= [[ 0 0 -1 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 1 0 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | X | | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 10 4 2\n",
"obs= [[ 0 0 -1 0 0]\n",
" [-1 0 0 -1 -1]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 1 0 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | X | | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 11 0 2\n",
"obs= [[ 0 0 -1 0 0]\n",
" [-1 0 0 -1 -1]\n",
" [ 0 0 -1 0 0]\n",
" [-1 1 0 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | O | X | | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 12 0 3\n",
"obs= [[ 0 0 -1 -1 0]\n",
" [-1 0 0 -1 -1]\n",
" [ 0 0 -1 0 0]\n",
" [-1 1 0 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | | O | O | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | O | X | | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 13 1 4\n",
"obs= [[ 0 0 -1 -1 0]\n",
" [-1 0 0 -1 -1]\n",
" [ 0 0 -1 0 0]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | | O | O | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 14 2 0\n",
"obs= [[ 0 0 -1 -1 0]\n",
" [-1 0 0 -1 -1]\n",
" [-1 0 -1 0 0]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | | O | O | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | O | | O | | |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 15 2 2\n",
"obs= [[ 0 -1 -1 -1 0]\n",
" [-1 0 0 -1 -1]\n",
" [-1 0 -1 0 0]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | O | | O | | |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 16 0 0\n",
"obs= [[-1 -1 -1 -1 0]\n",
" [-1 0 0 -1 -1]\n",
" [-1 0 -1 0 0]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | O | O | O | O | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | O | | O | | |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 17 0 3\n",
"obs= [[-1 -1 -1 -1 -1]\n",
" [-1 0 0 -1 -1]\n",
" [-1 0 -1 0 0]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | O | O | O | O | O |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | O | | O | | |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 18 2 4\n",
"obs= [[-1 -1 -1 -1 -1]\n",
" [-1 0 0 -1 -1]\n",
" [-1 0 -1 0 -1]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | O | O | O | O | O |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | O | | O | | O |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | |\n",
"----------------------\n",
"Action 19 4 1\n",
"obs= [[-1 -1 -1 -1 -1]\n",
" [-1 0 0 -1 -1]\n",
" [-1 0 -1 0 -1]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 -1]] reward= -11.0 done= False\n",
"----------------------\n",
" | O | O | O | O | O |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | O | | O | | O |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | O |\n",
"----------------------\n",
"Action 20 4 4\n",
"obs= [[-1 -1 -1 -1 -1]\n",
" [-1 0 0 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 -1]] reward= -11.0 done= False\n",
"----------------------\n",
" | O | O | O | O | O |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | O |\n",
"----------------------\n",
"Action 21 1 2\n",
"obs= [[-1 -1 -1 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 -1]] reward= -1.0 done= False\n",
"----------------------\n",
" | O | O | O | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | O |\n",
"----------------------\n",
"Action 22 1 2\n",
"obs= [[-1 -1 -1 -1 -1]\n",
" [-1 1 -1 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 -1]] reward= -6.0 done= False\n",
"----------------------\n",
" | O | O | O | O | O |\n",
"----------------------\n",
" | O | X | O | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | O |\n",
"----------------------\n",
"Action 23 2 3\n",
"obs= [[-1 -1 -1 -1 -1]\n",
" [-1 1 -1 -1 -1]\n",
" [-1 1 -1 -1 -1]\n",
" [-1 1 -1 -1 0]\n",
" [ 0 -1 -1 -1 -1]] reward= 14.0 done= True\n",
"----------------------\n",
" | O | O | O | O | O |\n",
"----------------------\n",
" | O | X | O | O | O |\n",
"----------------------\n",
" | O | X | O | O | O |\n",
"----------------------\n",
" | O | X | O | O | |\n",
"----------------------\n",
" | | O | O | O | O |\n",
"----------------------\n",
"Goal reached! reward= 14.0\n",
"Episode 1\n",
"Action 1 1 0\n",
"obs= [[ 0 0 0 0 0]\n",
" [-1 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | O | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 2 1 0\n",
"obs= [[ 0 0 0 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 3 0 1\n",
"obs= [[ 0 -1 0 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | O | | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 4 2 4\n",
"obs= [[ 0 -1 0 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 0 0 -1]\n",
" [ 0 0 0 0 0]\n",
" [ 0 0 0 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | O | | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | | | O |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 5 3 2\n",
"obs= [[ 0 -1 0 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 0 0 -1]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 0 0 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | O | | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | | | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 6 2 3\n",
"obs= [[ 0 -1 0 0 0]\n",
" [-1 0 0 0 -1]\n",
" [ 0 0 0 -1 -1]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 0 0 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | O | | | |\n",
"----------------------\n",
" | O | | | | O |\n",
"----------------------\n",
" | | | | O | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 7 1 3\n",
"obs= [[ 0 -1 0 0 0]\n",
" [-1 0 0 -1 -1]\n",
" [ 0 0 0 -1 -1]\n",
" [ 0 0 -1 0 0]\n",
" [ 0 0 0 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | O | | | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | | | | O | O |\n",
"----------------------\n",
" | | | O | | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 8 3 3\n",
"obs= [[ 0 -1 0 0 0]\n",
" [-1 0 0 -1 -1]\n",
" [ 0 0 0 -1 -1]\n",
" [ 0 0 -1 -1 0]\n",
" [ 0 0 0 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | O | | | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | | | | O | O |\n",
"----------------------\n",
" | | | O | O | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 9 0 1\n",
"obs= [[ 0 -1 0 0 0]\n",
" [-1 0 0 -1 -1]\n",
" [-1 0 0 -1 -1]\n",
" [ 0 0 -1 -1 0]\n",
" [ 0 0 0 0 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | O | | | |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | | | O | O | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 10 1 1\n",
"obs= [[ 0 -1 0 0 0]\n",
" [-1 1 0 -1 -1]\n",
" [-1 0 0 -1 -1]\n",
" [ 0 0 -1 -1 0]\n",
" [ 0 0 0 0 0]] reward= 4.0 done= False\n",
"----------------------\n",
" | | O | | | |\n",
"----------------------\n",
" | O | X | | O | O |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | | | O | O | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 11 1 1\n",
"obs= [[ 0 -1 0 0 -1]\n",
" [-1 1 0 -1 -1]\n",
" [-1 0 0 -1 -1]\n",
" [ 0 0 -1 -1 0]\n",
" [ 0 0 0 0 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | O | | | O |\n",
"----------------------\n",
" | O | X | | O | O |\n",
"----------------------\n",
" | O | | | O | O |\n",
"----------------------\n",
" | | | O | O | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 12 0 1\n",
"obs= [[ 0 -1 0 0 -1]\n",
" [-1 1 0 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [ 0 0 -1 -1 0]\n",
" [ 0 0 0 0 0]] reward= -11.0 done= False\n",
"----------------------\n",
" | | O | | | O |\n",
"----------------------\n",
" | O | X | | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | | | O | O | |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 13 3 4\n",
"obs= [[ 0 -1 0 0 -1]\n",
" [-1 1 0 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [ 0 0 -1 -1 -1]\n",
" [ 0 0 0 0 0]] reward= -1.0 done= False\n",
"----------------------\n",
" | | O | | | O |\n",
"----------------------\n",
" | O | X | | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | | | O | O | O |\n",
"----------------------\n",
" | | | | | |\n",
"----------------------\n",
"Action 14 4 4\n",
"obs= [[ 0 -1 0 0 -1]\n",
" [-1 1 0 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [ 0 0 -1 -1 -1]\n",
" [ 0 0 0 0 -1]] reward= -1.0 done= False\n",
"----------------------\n",
" | | O | | | O |\n",
"----------------------\n",
" | O | X | | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | | | O | O | O |\n",
"----------------------\n",
" | | | | | O |\n",
"----------------------\n",
"Action 15 3 1\n",
"obs= [[ 0 -1 0 0 -1]\n",
" [-1 1 0 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [ 0 1 -1 -1 -1]\n",
" [ 0 0 0 0 -1]] reward= 4.0 done= False\n",
"----------------------\n",
" | | O | | | O |\n",
"----------------------\n",
" | O | X | | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | | X | O | O | O |\n",
"----------------------\n",
" | | | | | O |\n",
"----------------------\n",
"Action 16 1 3\n",
"obs= [[ 0 -1 -1 0 -1]\n",
" [-1 1 0 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [ 0 1 -1 -1 -1]\n",
" [ 0 0 0 0 -1]] reward= -11.0 done= False\n",
"----------------------\n",
" | | O | O | | O |\n",
"----------------------\n",
" | O | X | | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | | X | O | O | O |\n",
"----------------------\n",
" | | | | | O |\n",
"----------------------\n",
"Action 17 1 3\n",
"obs= [[ 0 -1 -1 0 -1]\n",
" [-1 1 0 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [ 0 1 -1 -1 -1]\n",
" [ 0 0 0 -1 -1]] reward= -11.0 done= False\n",
"----------------------\n",
" | | O | O | | O |\n",
"----------------------\n",
" | O | X | | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | | X | O | O | O |\n",
"----------------------\n",
" | | | | O | O |\n",
"----------------------\n",
"Action 18 3 1\n",
"obs= [[ 0 -1 -1 0 -1]\n",
" [-1 1 -1 -1 -1]\n",
" [-1 0 -1 -1 -1]\n",
" [ 0 1 -1 -1 -1]\n",
" [ 0 0 0 -1 -1]] reward= -11.0 done= False\n",
"----------------------\n",
" | | O | O | | O |\n",
"----------------------\n",
" | O | X | O | O | O |\n",
"----------------------\n",
" | O | | O | O | O |\n",
"----------------------\n",
" | | X | O | O | O |\n",
"----------------------\n",
" | | | | O | O |\n",
"----------------------\n",
"Action 19 1 2\n",
"obs= [[ 0 -1 -1 0 -1]\n",
" [-1 1 -1 -1 -1]\n",
" [-1 1 -1 -1 -1]\n",
" [ 0 1 -1 -1 -1]\n",
" [ 0 0 0 -1 -1]] reward= 14.0 done= True\n",
"----------------------\n",
" | | O | O | | O |\n",
"----------------------\n",
" | O | X | O | O | O |\n",
"----------------------\n",
" | O | X | O | O | O |\n",
"----------------------\n",
" | | X | O | O | O |\n",
"----------------------\n",
" | | | | O | O |\n",
"----------------------\n",
"Goal reached! reward= 14.0\n"
]
}
],
"source": [
"# Test environment\n",
"# ships\n",
"ships = {}\n",
"ships['cruiser'] = 3\n",
"\n",
"grid_size=5\n",
"env = BattleshipEnv(enemy_board=None, ship_locs={}, grid_size=grid_size, ships=ships)\n",
"\n",
"for ep in range(2):\n",
" print('Episode', ep)\n",
" obs = env.reset()\n",
" #env.render()\n",
" #print(env.enemy_board)\n",
" done = False\n",
" t = 0\n",
" while not done:\n",
" action = env.action_space.sample()\n",
" i, j = np.unravel_index(action, (grid_size,grid_size)) \n",
" print(\"Action {}\".format(t + 1), i, j)\n",
" obs, reward, done, _ = env.step(action)\n",
" print('obs=', obs, 'reward=', reward, 'done=', done)\n",
" env.render()\n",
" t += 1\n",
" if done:\n",
" print(\"Goal reached!\", \"reward=\", reward)\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Callback and Plotting"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"from stable_baselines import DQN, PPO2, A2C, ACKTR, TRPO\n",
"from stable_baselines.bench import Monitor\n",
"from stable_baselines.common.vec_env import DummyVecEnv\n",
"import os\n",
"\n",
"from stable_baselines.results_plotter import load_results, ts2xy\n",
"\n",
"from tensorflow.keras.backend import clear_session #not sure if we need this but it does not hurt\n",
"\n",
"import matplotlib.pyplot as plt"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"## This callback function is legacy and needs to be replaced with object oriented functions\n",
"## to work with all policies. See next callback function\n",
"\n",
"def callback(_locals, _globals):\n",
" \"\"\"\n",
" Callback called at each step (for DQN an others) or after n steps (see ACER or PPO2)\n",
" :param _locals: (dict)\n",
" :param _globals: (dict)\n",
" \"\"\"\n",
" global n_steps, best_mean_reward\n",
" # Print stats every step_interval calls\n",
" if (n_steps + 1) % step_interval == 0:\n",
" # Evaluate policy training performance\n",
" x, y = ts2xy(load_results(log_dir), 'timesteps')\n",
" if len(x) > 0:\n",
" # NOTE: when done is True, timesteps are counted and reported to the log_dir\n",
" mean_reward = np.mean(y[-episode_interval:]) # mean reward over previous episode_interval episodes\n",
" mean_moves = np.mean(np.diff(x[-episode_interval:])) # mean moves over previous episode_interval episodes\n",
" print(x[-1], 'timesteps') # closest to step_interval step number\n",
" print(\"Best mean reward: {:.2f} - Last mean reward per episode: {:.2f} - Last mean moves per episode: {:.2f}\".format(best_mean_reward, \n",
" mean_reward, mean_moves))\n",
"\n",
" # New best model, you could save the agent here\n",
" if mean_reward > best_mean_reward:\n",
" best_mean_reward = mean_reward\n",
" # Example for saving best model\n",
" print(\"Saving new best model\")\n",
" _locals['self'].save(log_dir + 'best_model.pkl')\n",
" n_steps += 1\n",
" # Returning False will stop training early\n",
" return True"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"from stable_baselines.common.callbacks import BaseCallback\n",
"\n",
"class SaveOnBestTrainingRewardCallback(BaseCallback):\n",
" \"\"\"\n",
" Callback for saving a model (the check is done every ``check_freq`` steps)\n",
" based on the training reward (in practice, we recommend using ``EvalCallback``).\n",
"\n",
" :param check_freq: (int)\n",
" :param log_dir: (str) Path to the folder where the model will be saved.\n",
" It must contains the file created by the ``Monitor`` wrapper.\n",
" :param verbose: (int)\n",
" \"\"\"\n",
" def __init__(self, check_freq: int, episode_interval: int, log_dir: str, verbose=1):\n",
" super(SaveOnBestTrainingRewardCallback, self).__init__(verbose)\n",
" self.check_freq = check_freq\n",
" self.episode_interval = episode_interval\n",
" self.log_dir = log_dir\n",
" self.save_path = os.path.join(log_dir, 'best_model.pkl')\n",
" self.best_mean_reward = -np.inf\n",
"\n",
" def _init_callback(self) -> None:\n",
" # Create folder if needed\n",
" if self.save_path is not None:\n",
" os.makedirs(self.save_path, exist_ok=True)\n",
"\n",
" def _on_step(self) -> bool:\n",
" if self.n_calls % self.check_freq == 0:\n",
" # Evaluate policy training performance\n",
" x, y = ts2xy(load_results(self.log_dir), 'timesteps')\n",
" if len(x) > 0:\n",
" # NOTE: when done is True, timesteps are counted and reported to the log_dir\n",
" mean_reward = np.mean(y[-self.episode_interval:]) # mean reward over previous episode_interval episodes\n",
" mean_moves = np.mean(np.diff(x[-self.episode_interval:])) # mean moves over previous 100 episodes\n",
" if self.verbose > 0:\n",
" print(x[-1], 'timesteps') # closest to step_interval step number\n",
" print(\"Best mean reward: {:.2f} - Last mean reward per episode: {:.2f} - Last mean moves per episode: {:.2f}\".format(self.best_mean_reward, \n",
" mean_reward, mean_moves))\n",
"\n",
" # New best model, you could save the agent here\n",
" if mean_reward > self.best_mean_reward:\n",
" self.best_mean_reward = mean_reward\n",
" # Example for saving best model\n",
" if self.verbose > 0:\n",
" print(\"Saving new best model\")\n",
" self.model.save(self.save_path)\n",
"\n",
" return True"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"def moving_average(values, window):\n",
" \"\"\"\n",
" Smooth values by doing a moving average\n",
" :param values: (numpy array)\n",
" :param window: (int)\n",
" :return: (numpy array)\n",
" \"\"\"\n",
" weights = np.repeat(1.0, window) / window\n",
" return np.convolve(values, weights, 'valid')\n",
"\n",
"\n",
"def plot_results(log_folder, window = 100, title='Learning Curve'):\n",
" \"\"\"\n",
" plot the results\n",
"\n",
" :param log_folder: (str) the save location of the results to plot\n",
" :param title: (str) the title of the task to plot\n",
" \"\"\"\n",
" \n",
" x, y = ts2xy(load_results(log_folder), 'timesteps')\n",
" y = moving_average(y, window=window)\n",
" y_moves = moving_average(np.diff(x), window = window) \n",
" # Truncate x\n",
" x = x[len(x) - len(y):]\n",
" x_moves = x[len(x) - len(y_moves):]\n",
"\n",
" title = 'Smoothed Learning Curve of Rewards (every ' + str(window) +' steps)'\n",
" fig = plt.figure(title)\n",
" plt.plot(x, y)\n",
" plt.xlabel('Number of Timesteps')\n",
" plt.ylabel('Rewards')\n",
" plt.title(title)\n",
" plt.show()\n",
"\n",
" title = 'Smoothed Learning Curve of Moves (every ' + str(window) +' steps)'\n",
" fig = plt.figure(title)\n",
" plt.plot(x_moves, y_moves)\n",
" plt.xlabel('Number of Timesteps')\n",
" plt.ylabel('Moves')\n",
" plt.title(title)\n",
" plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Playing with One Ship on a 5x5 board"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/tf_util.py:191: The name tf.ConfigProto is deprecated. Please use tf.compat.v1.ConfigProto instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/tf_util.py:200: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/policies.py:116: The name tf.variable_scope is deprecated. Please use tf.compat.v1.variable_scope instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/input.py:25: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/policies.py:561: flatten (from tensorflow.python.layers.core) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Use keras.layers.flatten instead.\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/tensorflow_core/python/layers/core.py:332: Layer.apply (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please use `layer.__call__` method instead.\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/tf_layers.py:123: The name tf.get_variable is deprecated. Please use tf.compat.v1.get_variable instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/distributions.py:326: The name tf.random_uniform is deprecated. Please use tf.random.uniform instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/distributions.py:327: The name tf.log is deprecated. Please use tf.math.log instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/a2c/a2c.py:158: The name tf.summary.scalar is deprecated. Please use tf.compat.v1.summary.scalar instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/tf_util.py:449: The name tf.get_collection is deprecated. Please use tf.compat.v1.get_collection instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/common/tf_util.py:449: The name tf.GraphKeys is deprecated. Please use tf.compat.v1.GraphKeys instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/tensorflow_core/python/ops/clip_ops.py:301: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Use tf.where in 2.0, which has the same broadcast rule as np.where\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/a2c/a2c.py:182: The name tf.train.RMSPropOptimizer is deprecated. Please use tf.compat.v1.train.RMSPropOptimizer instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/tensorflow_core/python/training/rmsprop.py:119: calling Ones.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Call initializer instance with the dtype argument instead of passing it to the constructor\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/a2c/a2c.py:192: The name tf.global_variables_initializer is deprecated. Please use tf.compat.v1.global_variables_initializer instead.\n",
"\n",
"WARNING:tensorflow:From /Users/atambu310/anaconda3/lib/python3.7/site-packages/stable_baselines/a2c/a2c.py:194: The name tf.summary.merge_all is deprecated. Please use tf.compat.v1.summary.merge_all instead.\n",
"\n",
"9989 timesteps\n",
"Best mean reward: -inf - Last mean reward per episode: -58.99 - Last mean moves per episode: 19.24\n",
"Saving new best model\n",
"19988 timesteps\n",
"Best mean reward: -58.99 - Last mean reward per episode: -56.79 - Last mean moves per episode: 19.14\n",
"Saving new best model\n",
"29999 timesteps\n",
"Best mean reward: -56.79 - Last mean reward per episode: -54.75 - Last mean moves per episode: 18.89\n",
"Saving new best model\n",
"39992 timesteps\n",
"Best mean reward: -54.75 - Last mean reward per episode: -52.27 - Last mean moves per episode: 18.52\n",
"Saving new best model\n",
"49998 timesteps\n",
"Best mean reward: -52.27 - Last mean reward per episode: -49.64 - Last mean moves per episode: 18.19\n",
"Saving new best model\n",
"59996 timesteps\n",
"Best mean reward: -49.64 - Last mean reward per episode: -47.21 - Last mean moves per episode: 17.87\n",
"Saving new best model\n",
"69984 timesteps\n",
"Best mean reward: -47.21 - Last mean reward per episode: -45.26 - Last mean moves per episode: 17.60\n",
"Saving new best model\n",
"79998 timesteps\n",
"Best mean reward: -45.26 - Last mean reward per episode: -43.89 - Last mean moves per episode: 17.36\n",
"Saving new best model\n",
"89990 timesteps\n",
"Best mean reward: -43.89 - Last mean reward per episode: -42.83 - Last mean moves per episode: 17.20\n",
"Saving new best model\n",
"99992 timesteps\n",
"Best mean reward: -42.83 - Last mean reward per episode: -41.87 - Last mean moves per episode: 17.01\n",
"Saving new best model\n",
"109998 timesteps\n",
"Best mean reward: -41.87 - Last mean reward per episode: -41.10 - Last mean moves per episode: 16.86\n",
"Saving new best model\n",
"119985 timesteps\n",
"Best mean reward: -41.10 - Last mean reward per episode: -40.31 - Last mean moves per episode: 16.70\n",
"Saving new best model\n",
"129994 timesteps\n",
"Best mean reward: -40.31 - Last mean reward per episode: -39.70 - Last mean moves per episode: 16.57\n",
"Saving new best model\n",
"139990 timesteps\n",
"Best mean reward: -39.70 - Last mean reward per episode: -39.06 - Last mean moves per episode: 16.44\n",
"Saving new best model\n",
"149998 timesteps\n",
"Best mean reward: -39.06 - Last mean reward per episode: -38.37 - Last mean moves per episode: 16.31\n",
"Saving new best model\n",
"159993 timesteps\n",
"Best mean reward: -38.37 - Last mean reward per episode: -37.59 - Last mean moves per episode: 16.17\n",
"Saving new best model\n",
"170000 timesteps\n",
"Best mean reward: -37.59 - Last mean reward per episode: -35.67 - Last mean moves per episode: 15.86\n",
"Saving new best model\n",
"179999 timesteps\n",
"Best mean reward: -35.67 - Last mean reward per episode: -34.03 - Last mean moves per episode: 15.54\n",
"Saving new best model\n",
"189987 timesteps\n",
"Best mean reward: -34.03 - Last mean reward per episode: -32.54 - Last mean moves per episode: 15.25\n",
"Saving new best model\n",
"199988 timesteps\n",
"Best mean reward: -32.54 - Last mean reward per episode: -31.71 - Last mean moves per episode: 15.04\n",
"Saving new best model\n",
"209997 timesteps\n",
"Best mean reward: -31.71 - Last mean reward per episode: -30.81 - Last mean moves per episode: 14.83\n",
"Saving new best model\n",
"219998 timesteps\n",
"Best mean reward: -30.81 - Last mean reward per episode: -30.28 - Last mean moves per episode: 14.69\n",
"Saving new best model\n",
"229995 timesteps\n",
"Best mean reward: -30.28 - Last mean reward per episode: -29.47 - Last mean moves per episode: 14.52\n",
"Saving new best model\n",
"239992 timesteps\n",
"Best mean reward: -29.47 - Last mean reward per episode: -28.47 - Last mean moves per episode: 14.31\n",
"Saving new best model\n",
"249991 timesteps\n",
"Best mean reward: -28.47 - Last mean reward per episode: -27.78 - Last mean moves per episode: 14.15\n",
"Saving new best model\n",
"259995 timesteps\n",
"Best mean reward: -27.78 - Last mean reward per episode: -26.98 - Last mean moves per episode: 13.96\n",
"Saving new best model\n",
"269994 timesteps\n",
"Best mean reward: -26.98 - Last mean reward per episode: -26.03 - Last mean moves per episode: 13.77\n",
"Saving new best model\n",
"280000 timesteps\n",
"Best mean reward: -26.03 - Last mean reward per episode: -24.98 - Last mean moves per episode: 13.57\n",
"Saving new best model\n",
"289989 timesteps\n",
"Best mean reward: -24.98 - Last mean reward per episode: -24.37 - Last mean moves per episode: 13.41\n",
"Saving new best model\n",
"299991 timesteps\n",
"Best mean reward: -24.37 - Last mean reward per episode: -23.52 - Last mean moves per episode: 13.25\n",
"Saving new best model\n",
"309991 timesteps\n",
"Best mean reward: -23.52 - Last mean reward per episode: -22.70 - Last mean moves per episode: 13.11\n",
"Saving new best model\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"319999 timesteps\n",
"Best mean reward: -22.70 - Last mean reward per episode: -21.51 - Last mean moves per episode: 12.94\n",
"Saving new best model\n",
"330000 timesteps\n",
"Best mean reward: -21.51 - Last mean reward per episode: -20.37 - Last mean moves per episode: 12.75\n",
"Saving new best model\n",
"340000 timesteps\n",
"Best mean reward: -20.37 - Last mean reward per episode: -19.19 - Last mean moves per episode: 12.57\n",
"Saving new best model\n",
"349989 timesteps\n",
"Best mean reward: -19.19 - Last mean reward per episode: -18.13 - Last mean moves per episode: 12.39\n",
"Saving new best model\n",
"359988 timesteps\n",
"Best mean reward: -18.13 - Last mean reward per episode: -17.23 - Last mean moves per episode: 12.25\n",
"Saving new best model\n",
"369989 timesteps\n",
"Best mean reward: -17.23 - Last mean reward per episode: -16.05 - Last mean moves per episode: 12.11\n",
"Saving new best model\n",
"379995 timesteps\n",
"Best mean reward: -16.05 - Last mean reward per episode: -14.75 - Last mean moves per episode: 11.96\n",
"Saving new best model\n",
"389993 timesteps\n",
"Best mean reward: -14.75 - Last mean reward per episode: -13.17 - Last mean moves per episode: 11.78\n",
"Saving new best model\n",
"399993 timesteps\n",
"Best mean reward: -13.17 - Last mean reward per episode: -11.83 - Last mean moves per episode: 11.62\n",
"Saving new best model\n",
"409990 timesteps\n",
"Best mean reward: -11.83 - Last mean reward per episode: -9.49 - Last mean moves per episode: 11.35\n",
"Saving new best model\n",
"419998 timesteps\n",
"Best mean reward: -9.49 - Last mean reward per episode: -7.48 - Last mean moves per episode: 11.08\n",
"Saving new best model\n",
"429988 timesteps\n",
"Best mean reward: -7.48 - Last mean reward per episode: -5.50 - Last mean moves per episode: 10.85\n",
"Saving new best model\n",
"440000 timesteps\n",
"Best mean reward: -5.50 - Last mean reward per episode: -3.12 - Last mean moves per episode: 10.54\n",
"Saving new best model\n",
"449992 timesteps\n",
"Best mean reward: -3.12 - Last mean reward per episode: -0.53 - Last mean moves per episode: 10.21\n",
"Saving new best model\n",
"459997 timesteps\n",
"Best mean reward: -0.53 - Last mean reward per episode: 2.45 - Last mean moves per episode: 9.84\n",
"Saving new best model\n",
"469998 timesteps\n",
"Best mean reward: 2.45 - Last mean reward per episode: 5.42 - Last mean moves per episode: 9.46\n",
"Saving new best model\n",
"479996 timesteps\n",
"Best mean reward: 5.42 - Last mean reward per episode: 7.92 - Last mean moves per episode: 9.12\n",
"Saving new best model\n",
"489995 timesteps\n",
"Best mean reward: 7.92 - Last mean reward per episode: 10.11 - Last mean moves per episode: 8.84\n",
"Saving new best model\n",
"499997 timesteps\n",
"Best mean reward: 10.11 - Last mean reward per episode: 12.24 - Last mean moves per episode: 8.58\n",
"Saving new best model\n",
"509998 timesteps\n",
"Best mean reward: 12.24 - Last mean reward per episode: 14.10 - Last mean moves per episode: 8.35\n",
"Saving new best model\n",
"519997 timesteps\n",
"Best mean reward: 14.10 - Last mean reward per episode: 15.50 - Last mean moves per episode: 8.18\n",
"Saving new best model\n",
"529999 timesteps\n",
"Best mean reward: 15.50 - Last mean reward per episode: 16.69 - Last mean moves per episode: 8.05\n",
"Saving new best model\n",
"540000 timesteps\n",
"Best mean reward: 16.69 - Last mean reward per episode: 17.56 - Last mean moves per episode: 7.97\n",
"Saving new best model\n",
"549995 timesteps\n",
"Best mean reward: 17.56 - Last mean reward per episode: 18.30 - Last mean moves per episode: 7.89\n",
"Saving new best model\n",
"559994 timesteps\n",
"Best mean reward: 18.30 - Last mean reward per episode: 18.84 - Last mean moves per episode: 7.85\n",
"Saving new best model\n",
"569992 timesteps\n",
"Best mean reward: 18.84 - Last mean reward per episode: 19.57 - Last mean moves per episode: 7.77\n",
"Saving new best model\n",
"579991 timesteps\n",
"Best mean reward: 19.57 - Last mean reward per episode: 20.18 - Last mean moves per episode: 7.72\n",
"Saving new best model\n",
"589999 timesteps\n",
"Best mean reward: 20.18 - Last mean reward per episode: 20.43 - Last mean moves per episode: 7.70\n",
"Saving new best model\n",
"599999 timesteps\n",
"Best mean reward: 20.43 - Last mean reward per episode: 20.88 - Last mean moves per episode: 7.65\n",
"Saving new best model\n",
"609995 timesteps\n",
"Best mean reward: 20.88 - Last mean reward per episode: 21.29 - Last mean moves per episode: 7.60\n",
"Saving new best model\n",
"619988 timesteps\n",
"Best mean reward: 21.29 - Last mean reward per episode: 21.57 - Last mean moves per episode: 7.55\n",
"Saving new best model\n",
"630000 timesteps\n",
"Best mean reward: 21.57 - Last mean reward per episode: 21.90 - Last mean moves per episode: 7.52\n",
"Saving new best model\n",
"639997 timesteps\n",
"Best mean reward: 21.90 - Last mean reward per episode: 22.09 - Last mean moves per episode: 7.50\n",
"Saving new best model\n",
"649994 timesteps\n",
"Best mean reward: 22.09 - Last mean reward per episode: 22.17 - Last mean moves per episode: 7.49\n",
"Saving new best model\n",
"659998 timesteps\n",
"Best mean reward: 22.17 - Last mean reward per episode: 22.32 - Last mean moves per episode: 7.46\n",
"Saving new best model\n",
"669992 timesteps\n",
"Best mean reward: 22.32 - Last mean reward per episode: 22.50 - Last mean moves per episode: 7.44\n",
"Saving new best model\n",
"679996 timesteps\n",
"Best mean reward: 22.50 - Last mean reward per episode: 22.72 - Last mean moves per episode: 7.41\n",
"Saving new best model\n",
"689992 timesteps\n",
"Best mean reward: 22.72 - Last mean reward per episode: 22.79 - Last mean moves per episode: 7.40\n",
"Saving new best model\n",
"699999 timesteps\n",
"Best mean reward: 22.79 - Last mean reward per episode: 22.87 - Last mean moves per episode: 7.40\n",
"Saving new best model\n",
"709998 timesteps\n",
"Best mean reward: 22.87 - Last mean reward per episode: 22.77 - Last mean moves per episode: 7.40\n",
"719985 timesteps\n",
"Best mean reward: 22.87 - Last mean reward per episode: 22.65 - Last mean moves per episode: 7.40\n",
"729999 timesteps\n",
"Best mean reward: 22.87 - Last mean reward per episode: 22.57 - Last mean moves per episode: 7.42\n",
"739994 timesteps\n",
"Best mean reward: 22.87 - Last mean reward per episode: 22.58 - Last mean moves per episode: 7.42\n",
"749997 timesteps\n",
"Best mean reward: 22.87 - Last mean reward per episode: 22.50 - Last mean moves per episode: 7.42\n",
"760000 timesteps\n",
"Best mean reward: 22.87 - Last mean reward per episode: 22.52 - Last mean moves per episode: 7.43\n",
"770000 timesteps\n",
"Best mean reward: 22.87 - Last mean reward per episode: 22.73 - Last mean moves per episode: 7.42\n",
"779981 timesteps\n",
"Best mean reward: 22.87 - Last mean reward per episode: 23.10 - Last mean moves per episode: 7.38\n",
"Saving new best model\n",
"789997 timesteps\n",
"Best mean reward: 23.10 - Last mean reward per episode: 23.41 - Last mean moves per episode: 7.35\n",
"Saving new best model\n",
"799997 timesteps\n",
"Best mean reward: 23.41 - Last mean reward per episode: 23.98 - Last mean moves per episode: 7.30\n",
"Saving new best model\n",
"809994 timesteps\n",
"Best mean reward: 23.98 - Last mean reward per episode: 24.33 - Last mean moves per episode: 7.27\n",
"Saving new best model\n",
"819992 timesteps\n",
"Best mean reward: 24.33 - Last mean reward per episode: 24.73 - Last mean moves per episode: 7.24\n",
"Saving new best model\n",
"829997 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.89 - Last mean moves per episode: 7.22\n",
"Saving new best model\n",
"839995 timesteps\n",
"Best mean reward: 24.89 - Last mean reward per episode: 25.00 - Last mean moves per episode: 7.21\n",
"Saving new best model\n",
"849998 timesteps\n",
"Best mean reward: 25.00 - Last mean reward per episode: 25.07 - Last mean moves per episode: 7.20\n",
"Saving new best model\n",
"859982 timesteps\n",
"Best mean reward: 25.07 - Last mean reward per episode: 25.06 - Last mean moves per episode: 7.21\n",
"869996 timesteps\n",
"Best mean reward: 25.07 - Last mean reward per episode: 25.05 - Last mean moves per episode: 7.19\n",
"879999 timesteps\n",
"Best mean reward: 25.07 - Last mean reward per episode: 25.05 - Last mean moves per episode: 7.19\n",
"889997 timesteps\n",
"Best mean reward: 25.07 - Last mean reward per episode: 25.06 - Last mean moves per episode: 7.17\n",
"899993 timesteps\n",
"Best mean reward: 25.07 - Last mean reward per episode: 25.23 - Last mean moves per episode: 7.16\n",
"Saving new best model\n",
"909998 timesteps\n",
"Best mean reward: 25.23 - Last mean reward per episode: 25.23 - Last mean moves per episode: 7.16\n",
"Saving new best model\n",
"920000 timesteps\n",
"Best mean reward: 25.23 - Last mean reward per episode: 25.28 - Last mean moves per episode: 7.16\n",
"Saving new best model\n",
"930000 timesteps\n",
"Best mean reward: 25.28 - Last mean reward per episode: 25.41 - Last mean moves per episode: 7.14\n",
"Saving new best model\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"940000 timesteps\n",
"Best mean reward: 25.41 - Last mean reward per episode: 25.49 - Last mean moves per episode: 7.14\n",
"Saving new best model\n",
"949998 timesteps\n",
"Best mean reward: 25.49 - Last mean reward per episode: 25.56 - Last mean moves per episode: 7.14\n",
"Saving new best model\n",
"960000 timesteps\n",
"Best mean reward: 25.56 - Last mean reward per episode: 25.61 - Last mean moves per episode: 7.13\n",
"Saving new best model\n",
"969996 timesteps\n",
"Best mean reward: 25.61 - Last mean reward per episode: 25.75 - Last mean moves per episode: 7.13\n",
"Saving new best model\n",
"979997 timesteps\n",
"Best mean reward: 25.75 - Last mean reward per episode: 25.89 - Last mean moves per episode: 7.11\n",
"Saving new best model\n",
"989998 timesteps\n",
"Best mean reward: 25.89 - Last mean reward per episode: 25.85 - Last mean moves per episode: 7.14\n",
"1000000 timesteps\n",
"Best mean reward: 25.89 - Last mean reward per episode: 25.97 - Last mean moves per episode: 7.12\n",
"Saving new best model\n"
]
}
],
"source": [
"clear_session()\n",
"\n",
"# ships -- keep only one kind for 5x5 grid\n",
"ships = {}\n",
"ships['cruiser'] = 3\n",
"\n",
"grid_size = 5\n",
"num_timesteps = 1000000 # this is number of moves and not number of episodes\n",
"\n",
"best_mean_reward, n_steps, step_interval, episode_interval = -np.inf, 0, 10000, 10000\n",
"\n",
"# Instantiate the env\n",
"env = BattleshipEnv(enemy_board=None, ship_locs={}, grid_size=grid_size, ships=ships)\n",
"\n",
"# wrap it\n",
"log_dir = \"./gym/\"\n",
"os.makedirs(log_dir, exist_ok=True)\n",
"env = Monitor(env, filename=log_dir, allow_early_resets=True)\n",
"env = DummyVecEnv([lambda: env])\n",
"\n",
"# Train the agent - Note: best model is not save in Callback function for PPO2; save manually\n",
"model = A2C('MlpPolicy', env, verbose=0).learn(total_timesteps=num_timesteps, callback=callback)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#model.save(log_dir + 'best_model_cruiser_5x5.pkl')"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {
"scrolled": false
},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAY0AAAEWCAYAAACaBstRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nO3dd5wU9fnA8c/D0TsIKE2OXgRBRCygonRRURONxp/dGHvUWLCjWFBjYhKNJXZjj8aGWFBUbAgoTQEFAekgSBc47p7fH9/v7s3u7e7tHdzN7t3zfr3udTPf+c7MMzu780z9jqgqxhhjTDqqhB2AMcaY7GFJwxhjTNosaRhjjEmbJQ1jjDFps6RhjDEmbZY0jDHGpM2Sxm4kImeKyKe7aVq5IqIiUnV3TK+siMh4ETkj7DgyiYjUEpE3RWSDiLwcdjylJSIDRGRpKccdIiKv7e6YTNkQkb+KyPnp1M3KpCEi/UXkc/+jXCcin4nIAeUcQ6gbdRFZJCKDwph3kKoOV9WnymLaIlJfRO4TkZ9EZLOIzPf9TcpifrvRb4E9gT1U9cT4gSIyWkTy/DKt99/lg8s/zDJ1BzA27CCKIyKPiMg8ESkQkTMTDL9cRFb6bc3jIlIjMCxXRCaKyFYRmRv/e0w1bgni+0hEzi3VwpXMPcD1IlK9uIpZlzREpD7wFvBPoDHQErgF2B5mXBVRmEc5/sv7AbAPMAyoDxwCrAX6lmJ65bksbYDvVXVnijovqmpdoAkwEQj1iGR3fj5+B66Bqn65u6aZ5nxLswwzgAuBrxNMbygwChgI5ALtcNuaiOeBb4A9gOuB/4pI0zTHzSiqugKYCxybTuWs+gP6AOtTDD8T+Az4G7Ae+BG3sTkTWAKsBs4I1G8APA2sARYDNwBV/LAqvn+xH+9p3I8B4CdAgc3+72A/j0+BvwC/AAuB4XHzegxYASwDbgNy/LAcP97PPuaL/PSrJlnORcCgJMOOBqb75f8c2DcwbBSwANgEfAccn+SzW+fjK26ZPgLODYyfqm5b4BM/7wnAA8B/kizDucAqoG6Kda1Ah0D/k8BtvnsAsBS4BlgJPAPMAY4O1K/qP+/evv8g/3mtx21MBqSYd1e/7OuBb4FjffktwA4gz38vzkkw7ujgcgPd/LI0LW4dAmcBbwbqzQdeCvQvAXr57r/7/o3ANODQuBj+C/zHDz8XqOU/w1/8d+MqYGlgnGtw39tNwDxgYJLP5ibg0biyLsD7/ns1Dzgp8JmvxP8OfNnxwMzAbzDynV0LvAQ09sNy/ed2Du73+AkwDrgkbt4zgeOK2a58CpwZV/YccEegfyCw0nd3wu2o1gsMnwScX9y4CeZd06+HtX59T8Edqd4O5APb/Hfp/lSfZeA38JAfvgn4GGjjhwnut70a2OA/l+6Bca8Hnkj1OalqViaN+v7DfQoYDjSKG34msBP348rBbfh+wm2gagBD/IdZ19d/GngdqOe/hN/jf+jA2bgfZTugLvAq8EzcF7Zq3LzzgD/4eV8ALAfED38NeBioAzQDvgL+6Iedj8v0rXFHUBPjpx+3nItIkDSA3v5LcaCP4Qxft4YffiLQAvdj/B2wBWge99ldgtug1kpjmT4iNmmkqvsFLqFUB/rjNlbJksYLwFPFfBeKSxo7gbv8eq+F25g9G6g/Apjru1vivldH+c9msO9vmmC+1fz34jq/LEfivlOd/fDRyZYrfrgffywueVUtbh3ivovrfYzNcTs0y/x47XAb/MhOz//h9oKrAn/GbZxrBmLIA47z06rl45iE+/61BmbjkwbQGZeAWgS+/+2TLN/LwFWB/jp+3LN8LL398u7jhy8ABseNP8p3XwZ8CbTyy/8w8Hzcb/BpP49awEnA5MC0evr1WL2Y71KipDED+F2gv4mf3x64xDYnrv79wD+LGzfBvP8IvAnU9ut7f6B+/O8rzc/ySdx38TD/ef0d+NQPG4rbeWiISyBd8b99P/wE4Otit8G7Y0Ne3n9+YZ/E7UnuBN4A9vTDzgR+CNTt4VfWnoGytUAvv4K2A93iVuBHvvsD4MLAsM64H1pVkieN+YH+2r7OXrg9h+1ArcDwU4CJvvtD/F6K7x8SP/24z2ARiZPGg8CYuLJ5wOFJpjMdGBmI/6e44UmXKf5LXczy7+3XVe3A8P+QPGm8D4wt5ntQXNLYgd9I+rIOuB9Ubd//LHCT774Gv0MQqP8ugaPSQPmhuA1wlUDZ88Bo3z062XIFhu/Abfzz/fdxQLrrELfR6A2cDDyC2/nogtuQvJFivr8APQMxfBI3/EdgWKD/PAqTRgdcIhsEVCtmvbxP7Hf5d8CkuDoPAzf77tuAx313PdyOTBvfP4fAEQ0uUcb/BtsFhtfA7YF39P1/Af6VKl5fL1HSWBD3eVTz88sFTgO+jKt/O/BkceMmmPfZxJ0RCAz7iNikUdxn+STwQmBYXf8da43bufked3RXJcG8BgM/FvdZZd01DQBVnaOqZ6pqK6A7bs/5vkCVVYHuX/048WWR88nVcXtrEYtxe5346cYPq4pLAMmsDMS51XfWxZ3nrgas8Bc/1+NWdrPAvJbEzas02gB/jszDz6e1nz4icrqITA8M6477HCKWFJ1k0mVKJFndFsC6QFmyeUWsxW0gdsUaVd0WiGc+biN0jIjUxp2/fc4PbgOcGPe59U8SQwtgiaoWBMqC35t0vKSqDXHfpdm4vcuIlOsQd8phAG5v8mPchuVw//dxZCIi8mcRmeMvxK7HnR5Nta6Tfgf9Z3cZLtmsFpEXRKQFif2C2/gHl+fAuOU5FbczAW4dnOAvFEf2dhcHxv1fYLw5uI1g8DcYjVlVt+NOYf2fiFTB7Zg9kyTO4mzGndmIiHRvSjAsMnxTGuPGewa3g/KCiCwXkbtFpFqSmIr7LCH289iMS6ItVPVD3NHQA8AqfxNAMMZ6uB2ZlLIyaQSp6lxcdu1eitF/xu21tAmU7Y07bwvu1Er8sJ24pKQlnNcS3JFGE1Vt6P/qq+o+fvgK3IYhOK/SWALcHphHQ1WtrarPi0gb4N/AxbjD5Ia4DZYExi/pcqVrBdDYb6wjWierjLvmMVRE6qSosxV3NBOxV9zwRMvyPG5DMhL4zm8MwX1uz8R9bnVUNdEdQMuB1n6jFBH83qRNVX/GHd2OFpFIgkq6Dv3wSNI41Hd/TFzSEJFDcUdPJ+FO4TbEncdOta5TfgdV9TlV7Y/7TSju1F8iM3Hn/COWAB/HLU9dVb3AT/c7XIIaDvyewkQeGXd43Lg1VTX4Wccvx1O4DelAYKuqfpEkzuJ8izu9FdETWKWqa/2wdiJSL274t2mMG0NV81T1FlXthrv+ejRwemRwXPWUn6UXXYciUhd3unG5n9c/VHV/3A0mnXDXrSK64k6rpZR1SUNEuvg9qFa+vzVuI1DiOzVUNR+3V3K7iNTzG9UrcKdNwG1gLheRtv7DvwN318tO3IXzAtx55HTmtQJ4D7jX30paRUTai8jhvspLwKUi0kpEGuEu/hWnmojUDPxVxSWF80XkQHHqiMgI/+Wug/sSrgEQkbMoXbItMb/nOBW3cazubzE9JsUoz+B+IK/4dV5FRPYQketE5ChfZzrwexHJEZFhuI1mcV7Anfq7gNiN039wRyBD/fRqintOoVWCaUzGnUK5WkSqicgAvywvpDH/IvyOz7vA1b4o1ToElxiOwJ3qXIq7DjEMd679G1+nHm4HZw1QVURuouiecbyXgGtFpJFf7ksiA0Sks4gc6Y8GtuGO1vOTTOdtYtfFW0AnETnNf17VROQAEekaqPMccCnu6Cl4J9lDuN9nGx9HUxEZmWohfJIoAO6lmKMM/12siUumkd9TZLv4NHCOiHTzv8kbcDuoqOr3uO/fzX6c44F9gVeKGzdBDEeISA8RycFd58uj8LNdRew2Jp3P8ihxjyVUB8bgrvEs8fUO9EcxW3DrMbgODwfGp/q88AufVX+4UwAv4fbqtvj/D1N44ehM/IUfLTwXq3HTWAr0992NcBuMNbiN1E3E3j11ky9f4+s1CkznVl++HneeMGbevk70vDvu9MCDfv4bcD/wk/2wqrg7G9bi7jpK5+4pjfuLnM8fhrsDYz1u7/Fl/F0euPOu63BHWX/FbYDOTfTZpSgLLtNHxYwfrNset4HbhLte9AjwWIp13QB32nEJ7nB/gY95Dz+8D26PbhNu4/A8cXdPJZnuB7gN6l5x5Qf6z2OdX6/jgL2TTGMfX3cDRe9CG02aF8Lj5r0FaFbcOvTDVxC40wWXkMcH+nNwd+pt9HWvJnAdLEkMtXEbu/XE3T2F2yB+5T/rdbiNV4sUyzgFODDQ39l/nmtw3/EP8Xd5+eF74zb04+KmUwW3IzfPz3sB/q4kElxXDIx3A3HXO5LE+RFFf0cDAsOvwG24NwJP4G8oCcz/I1wCnUfcNcZU48bVO8WPv8XX/weFN0UcjLsO8Qvwj+I+S2LvntqMu6OsrR82EHcUuBn3+3+WwhuCmuO2SylvGFDV6F0txpQ7EXkRd/fSzWHHYnYvERmCu4nkuJDmfzpwnrrTaZWGiDyJS/Q3lHC8e4EFqvqv4upmdBMVpmIR99DXOtyR1BDcdYWMf2rYlJyqvoc7HVvu/HWzC4FiN4DGUdU/p1s3665pmKy2F+5wfjPuEPwCVf0m5RjGlIC4J7HX4E7zPFdMdVMKdnrKGGNM2uxIwxhjTNoqxDWNJk2aaG5ubthhGGNMVpk2bdrPqtq0JONUiKSRm5vL1KlTww7DGGOyioiUuOUJOz1ljDEmbZY0jDHGpM2ShjHGmLRZ0jDGGJM2SxrGGGPSZknDGGNM2ixpGGOMSZslDWOMySB3vTOX3FHj+GJBkfc1RakqG37NI4xmoCxpGGNMOcgvKNzA544aR+6ocdz9zlwKCpS5KzcCcPPrs3nwowUAnPLvL/n4+zXRutMWr4uOP/KBz+h5y3u0vfZtVm3cRnmqEA0W9unTR+2JcGNMQYHS7rq3ARh9TDfO7Nd2l6f59U+/cMK/PufrGwfTuE51AF6fvozFa7dyZJdm7NOiPht/3cmCnzfTe+9G0fFWbdzG1f+dycffr4mZ3kt/PJiTHi58A227JnX48ectPHZGH855KvV27JieLfjHyb1oe+3bMeWLxo4o1bKJyDRV7VOicSxpGGMymary9BeLGd59L5rVrxktP+mhL/hq0ToePLU3w7rvhYiw/5j3WbtlR7TO56OOZObSDbRsWIserRpEyxev3cKURb9w5cuxr8T+6rqBnP74V6zZtJ1pNw4md9S46LC+uY258zc9GHjvx0ljnXHzEOrXrFpko15Sc24dRteb3kmr7oybhtCgdrVSzceShjGmwvlu+UaO+sekmLLZtwyl+83vlmg6X1x7JM0b1IpJBPFqVK3C9p0FpYqzpKbfNJhet76fcFjkyGFnfgEiQl5+AV1ujE0iH/z5cNo3rbtLMZQmadg1DWNMRotPGECxCeP4/VoWKTv4zg9jriskUpKEcfS+zYut88Dve3NYp6a8csHBfHPj4Gj5rNFDaFi7OgvvPIpJVx/BKxccEh322kX9ot1Vc6qQU0WoWS2HH+84Kmbau5owSsuONIwxJVJQoHS58R1O7tuaW0d2323TjRwBzB0zjGmLf+HURyeXeBqHdWrK02f3BeCJzxZyy5vfxQzvvGc95q3aFFM26eoj2JaXz+C/fZJwmovGjqCgQPlswc+c9thXHNOzBf88ZT8A8vIL+DUvn/o1qxU5gkl0nWFbXj41qlZBREq8bOAupi9f/yt7NahJtZxd3+e301PGmCIm/bCG0x77itcu6kev1g1LNY3VG7fx7YqNHNG5Ga9+vZQrXnLXAuaOGUbNajkJx9mWl8+cFRvZL3BxeMxb3/HYpwupVS2Hf5/eh/4dm0SHpTptBG4jrKox1wtm3DyE7TvzeXTSQk4/uA2tGtWOGef16csY0LkZh98zkfVb86Lldxzfg98fuHdM3bkrN3LVyzMZ2LUZfxrYMeGGfcfOAqrlSNKNfmQZSnthurxZ0jDGFBG5YAyxG7OJc1fTr0MTqletQkGB8t53q+jXYQ96jH4PgLcvPZRuLepz65vf8fhnCwGYOXoI+/rhEQvvPIq3Zq6ga/N6dGhWL1oe2YB++OfDaedPpcQnhklXH0HrxrXZvH1nylNOu7oRjk82n406kpYNa+3SNCuC0iSNCvESJmMqi215+SxZt5UOzeoWe4rjtMcms3jtVn5atzVa9uHcVRzZZU8G3vsRC9ZsARInAnDXEg7v1DTmltFE9eLvFJp9y1B+XLM52j/qlVm8dP7BCWM89O6JLBo7gucmJ34X0P8uPIR9W5Xu6ChIRHju3AP5/aOTuWFEV0sYu8CShjFZ4pkvFnHj699G++fdNowaVROfGgKY9MPPRcrOfnIqj53RJ5owAI574LOk04h/xiAdB94+gS078qP9dWq4GFOd1bjj7bkA5O5Rm0VrC5Nc8NTWrjqkQ5OsOW2Uyez0lDFZINn5/h9uHw64c+37+NM7FwxoH32qeHfp06YRUxf/ElM2+bqBHHjHB2mNv2jsiOgy7FGnOq9f3I/+d00sUm/CFYfRoVk9xs1cwaBuzVImRbPr7PSUMRVQqh27jtePB6DLXoXXEhIljAV3HMUJD37OjCXrk07r/csP491vVzJi3xas27Kd3zxY+NTyf/0toZEN//SbBtOwdnUWjR3BsvW/UrtaDoP/9gk/b95eZLp1a8RuZvp1aFLkgnVE5JrIiDRuZzXhsOc0jAnZT2u3csK/PuOEf33GjgTPCQSvGSwaO4Lvbh1apM7clZuKlAH89aSeLBo7gpwqwpmHtImW92rdkLljhkX7TzuoDR33rMfFR3akbZM60eYw7j2xJwvvLHw+YNHYESwaO4KGtatHy1o2rEWjOtWZesMg/n5yr2j5mxf3B2Dz9p1s31l4umrMce423eB0Af59eol2eE1IQjvSEJHWwNPAXkAB8Iiq/l1EGgMvArnAIuAkVf0l2XSMyXanPe4uWAN0umE89WpUZdYtLjEEjzKO6NwUgNrVq7Lf3g355qeiRw29927I17581ugh1KtZ2LzEwe0Kb2+NPECW7By/iJTq/P/IXi15dNJCZi3bENNsR+cbCp9mblCrWnQeQYO77Vni+ZnyF+aRxk7gz6raFTgIuEhEugGjgA9UtSPwge83pkK65PlvogkjYtP2nUDR20QfP/OAaPf/LuxH2yZ1YsabesMgHji1d7Q/mDAA9mpQkxk3DYleBykrb17SP5pwIg/aRfzx8HYx/XZhOvuEdqShqiuAFb57k4jMAVoCI4EBvtpTwEfANSGEaEyZUlXenLE86bBgwmjftE6RPfOJVw5w05i5goFdmlGnRlW25eXHTypGaRu2K634xHbN0C5F6ljiyC4ZcU1DRHKB/YDJwJ4+oUQSS7Mk45wnIlNFZOqaNSW/LdCYsnbVyzPIHTWOgkB7Rw99vIBvl28A4LxnpsXUn3DFYfTrsAdQ9NmHty45NOE8RIRje7agjr/YXLNaDk3r1eCu3/TYbcuxK5rUrRHTX6VK6ZrPMJkj9LunRKQu8ApwmapuTLdNFlV9BHgE3C23ZRehMSVXUKC8PG0pALe8+S23jOzOzvwCxo6fy9jxRevPvmUodWtU5bP5Rd/WNuOmIdSqnv6tp1OuH1TquHe3ksRtskOoRxoiUg2XMJ5V1Vd98SoRae6HNwdWhxWfMaUVeREQwFNfLOaKF6fT4foE2QL3kF7kttTgHU0Ad/2mR7mfUtrdujWvH3YIZjcK8+4pAR4D5qjqXwOD3gDOAMb6/6+HEJ4xpZLsIbxXv1mWdJzgA2zBxv8iz0Jku9cv7kfH68fz/B8OCjsUsxuEeXqqH3AaMEtEpvuy63DJ4iUROQf4CTgxpPiMKZH7P/yhxOMkupOpol0YrpZTpcItU2UW5t1TnwLJLmAMLM9YjNlVqspf3vu+SHmw+Yxg2TNfLOL3B7Yhxy4MmywT+oVwY7Ldjp0FnPTwF0XKE+1df3+bO7I47eDcsg7LmDKREbfcGpNNrv/fLHJHjeMN/4xFpxvGMz3QptMdx/eISRgn+FePjhrehepV7Sdnspt9g40pgbz8Ap6d/BMAlz7/TZHhn1x1RJE3wl03oivdmtfnD4e2K1LfmGxjp6eMKYGOSW6bjWjduOjLfZrUrcHbf0r8cJ4x2caONIxJ0878oi3QnvxI7LWMdB9ONSZbWdIwJk2JHs778sd10W67rdRUBpY0jEnDZt/yLMBtx3Xnnt/uG2I0xoTHkoYxCXzz0y/kjhpH7qhxqCrd/atUAX53QGuO83dERcQ3/2FMRWUXwo1J4Ph/fR7tHv3Gt4Xl+7WkWo7b17psUEcGdG5Gz1YN7FqGqTQsaRhTjKe+WBztHhtocvyyQZ3CCMeYUNnpKWPi5Bckbmm/R8sGMY0LGlMZWdIwJs60xYlfSR95r7YxlZklDWPivDN7JQAHtWscU26NCxpjScOYIp7/yjUTckrfvblqaOeQozEms9iFcGPi/JqXD8CQbntRq3oO5/RvG/NyJGMqMzvSMCaJyPutLWEYU8iShjHGmLRZ0jAmIHK7bc9WDUKOxJjMZEnDmIAVG34FILdJnZAjMSYzWdIwJuCyF6YD8Pr05SFHYkxmsqRhTMBU/2Dfuf3bhhyJMZnJkoYxwPL1v5IXeMnSyX33TlHbmMrLntMwld5Tny/i5kBLtgAdmtUNKRpjMpsdaZhKbfWmbUUShjEmOTvSMJXWo5N+5LZxc4qUX3pkhxCiMSY72JGGqbQSJQyAywfbezKMScaShqm0DshtVKTsh9uH21v4jEnBTk+ZSmvKInd7bdsmdZh45YBwgzEmS9iRhqmUJs5dXdhtCcOYtFnSMJXOqo3bOOvJKWGHYUxWsqRhKp0D7/gg7BCMyVqWNEylNuX6QWGHYExWsaRhKpX7Jnwf7b7uqC40rVcjxGiMyT6WNEylsWTdVu6b8EO0/7zD2ocYjTHZyZKGqTQOvXti2CEYk/XsOQ1ToeUXKN+v2sSyX36NKf9s1JEhRWRMdrOkYSq0hz5ewD3vzospmztmGDWr5YQUkTHZzU5PmQotPmEAljCM2QUZmzREZJiIzBOR+SIyKux4TMXw9Y2Dww7BmKyWkUlDRHKAB4DhQDfgFBHpFm5UJtt9ee1AGtepHnYYxmS1TL2m0ReYr6o/AojIC8BI4LtQozJZ5ZctO6Ldi8aOCDESYyqOjDzSAFoCSwL9S31ZlIicJyJTRWTqmjVryjU4k/m25eWz35j3ww7DmAonU5NGohcaaEyP6iOq2kdV+zRt2rScwjLZ4tMffg47BGMqpExNGkuB1oH+VsDykGIxWWj6kvXR7r+e1DPESIypWDI1aUwBOopIWxGpDpwMvBFyTCaL3D9xPuDe931C71YhR2NMxZGRF8JVdaeIXAy8C+QAj6vqtyGHZbLQoG57hh2CMRVKRiYNAFV9G3g77DhM9tmWlx/t3rdVwxAjMabiydTTU8aUyuvTl9HlxnfCDsOYCsuShqkQBt77Ee/MXsGfXpgedijGVGiWNEzWW/jzFhas2cL5//k67FCMqfAsaZisty7w5HfQJ1cdUc6RGFPxZeyFcGPSFXyFa8Skq4+gdePaIURjTMVmRxom602Ke/r79wfubQnDmDJiRxqmwph9y1BemrKEs/u3DTsUYyosO9IwFUbdGlUtYRhTxixpmKw2I9DGlDGm7FnSMFntve9WAtBpz7ohR2JM5WBJw2QtVeWBiQsAuPmYfUKOxpjKIa2kISLtRaSG7x4gIpeKiDXqY0L1xYK10e6OdqRhTLlI90jjFSBfRDoAjwFtgefKLCpj0vD7RydHu5vVqxliJMZUHukmjQJV3QkcD9ynqpcDzcsuLGOMMZko3aSRJyKnAGcAb/myamUTkjEl88Ptw8MOwZhKI92kcRZwMHC7qi4UkbbAf8ouLGPSVy3H7ucwpryk9US4qn4HXBroXwiMLaugTOWVO2ocAN/eMpQ6NRJ/PT+at5rvVmwsz7CMMV7KpCEiswBNNlxV993tERkDHHb3RKbdOLhI+dyVGznziSnR/oPb7VGeYRlT6RV3pHG0/3+R//+M/38qsLVMIjIGWJukufNh902K6R9znD2fYUx5Spk0VHUxgIj0U9V+gUGjROQz4NayDM5ULtMW/1LicTo0q1cGkRhjkkm3lds6ItJfVT8FEJFDgDplF5apjJ76fFHC8vVbd9Dr1vfLNxhjTELpJo2zgSdEpAHuGscGX2ZMqagqba99G4BFY0cA8MaM5TF1hv99ErWr5yQ8ArlicCeO6mGPChlT3opNGiJSBeigqj1FpD4gqrqh7EMzFdnGbTuLrTMnyR1Spx3UhksHdtzdIRlj0lDsDe6qWgBc7Ls3WsIwu8NRf5+UdNg5xbwT40+DLGEYE5Z0n4p6X0SuFJHWItI48lemkZkKbdn6X2P68wsK7+y+amjnIvXb7FH4+tYmdWuUXWDGmJTSTRpn4267/QSY5v+mllVQpuL6efP26AN8ERu35dH+urej/TWr5RQZb/yfDi3z2IwxxUv3iXB7h6bZZZu25dHntglFyvcd/V6RsiM6N2XivDXR/trVq9KzdUOuGNypTGM0xqSW7t1TiEh3oBsQbYNaVZ8ui6BMxXTlyzOKrTN3zDAAnjirb5Fhr1/Ur0iZMaZ8pfsSppuBf/q/I4C7gWPLMC5TAdWvGdsw8jXDuhSpk+jUlDEmc6R7TeO3wEBgpaqeBfQE7GqkKZFOe7qnt5848wAWjR3BBQPaxwy36xbGZL50k8av/tbbnf5ZjdVAu7ILy1QkqsrsZRuYvHAdALWqJz6a6Nq8fnmGZYwphXSTxlT/TvB/4+6c+hr4qsyiMhXKnePncvQ/P2XCnFUAtGtS2ALN2BN6APDpNUeEEpsxpmTSvXvqQt/5kIi8A9RX1ZllF5apSB755MeY/uB7Mk7uuze/2b+VvUjJmCyRVtIQkaeBScAkVZ1btiGZii7+5UqWMIzJHun+Wp8EmgP/FJEFIvKKiPyp7MIyFdXzfzgo7BCMMbsgraShqh8CtwM3Ao8CfYALyjAuk0UKCpTr/zeL1Zu2paw3a1R1abAAABbdSURBVPQQDm5vb9ozJpule3rqA9z7M77AnaY6QFVXl2VgJnu0802APDv5p2gz54nUi3tOwxiTfdI9PTUT2AF0B/YFuotIrTKLylQIBQVapJ0pY0x2S/f01OWqehhwPLAWeAJYX9qZisg9IjJXRGaKyP/87byRYdeKyHwRmSciQ0s7D1M2VJU7x89h+858ILZ1WoBtea58xpL10SMQY0zFkW4zIheLyIvAdOA44HFg+C7M932gu6ruC3wPXOvn0w04GdgHGAb8S0SsXYkM0vbat3n44x/pfMM7/LJlB4vXbokZ3uXGdwAY+cBnMeUTrji83GI0xpSddBssrAX8FZimqsW/cq0Yqhps1vRLXDMlACOBF1R1O7BQROYDfXHXUkzINm+PXfX7jUn83u6CAuW0g9rwzJeLo2UdmtUt09iMMeUj3dNT9wDVgNMARKSpiOyu5tLPBsb77pbAksCwpb6sCBE5T0SmisjUNWvWJKpidrPuN7+bdNjIXi2i3e2ue5uf1m0F4ITeLZl0tT3tbUxFUZJWbq/Bn0bCJZD/FDPOBBGZneBvZKDO9cBO4NlIUYJJaYIyVPURVe2jqn2aNm2azmKYNM1etoHlcW/WK07/Dk1i+j/+3iXyv57Ui9aNaycaxRiThdI9PXU8sB+uzSlUdbmI1Es1gqoOSjVcRM4AjgYGqmokMSwFWgeqtQKWpxmj2Q3OeXIKH8x1d1PH3z6bU0WKXPiOOLFPa0b2akmnG8YnHG6MqRjSveV2h9+wK4CI1CmmfkoiMgx35HKsqm4NDHoDOFlEavjTXx2xhhHLzba8/GjCAOgQd/dTJGE894cDY8ojyaV6VWsOxJiKLt0jjZdE5GGgoYj8AXcd4tFdmO/9uPdxvC8iAF+q6vmq+q2IvAR8hzttdZGq5u/CfEwJrNwQ+0T3zsBRReQWW4BD2jfh7yf3onGd6jSqXb3c4jPGhC/dVm7/IiKDgY1AZ+AmVU1860x60+uQYtjtuCZLTDl7+ovFRcpe+2YZx+3Xks8XrI0pH9kr4f0JzL5lKO/MXpnWq12NMdkn7XeE+yTxPoCI5IjIqar6bDGjmSzy+GcLi5Rd9uJ0hvfYi7OemALADSO6ppxG3RpV+e3+rfh8wc9cemTHMonTGBOelCehRaS+f0L7fhEZIs7FwI/ASeUToikPUxati3aP6NE8ZtgDH86Pdh8buLU2lb+e1IvcJrt06csYk4GKu3L5DO501CzgXOA94ERgpKqOTDWiyR55+QWc+FDh85P3ndyLe0/sGe3/RyBpNKtXs1xjM8ZkluJOT7VT1R4AIvIo8DOwt6puKvPITLnpect7Mf3Vcqrwm/1b8ee46xLXDOtSnmEZYzJQcUcaeZEOfxfTQksY2W/D1jyOvf9TflyzGVVl647CO6OuGNwp2t2+aezppfMPb1duMRpjMlNxRxo9RWSj7xaglu8XQFW1fplGZ3a7WUs3cMz9nwJw5L0fFxl+6cDCi9fvX354TEu1/vZoY0wllvJIQ1VzVLW+/6unqlUD3ZYwslAkYaSjShXhkdP2L8NojDHZJu1bbk12mbdyE80b1qR+Cd6WN+PmIUXKhuyzF29e3J/6teyrYoyxpJFVVJW2177NoK578ugZfRLWWbJuK2Pfmcu4mSsAmDtmGDWrFf9KklaNatGgVuIE06NVg9IHbYypUKyxoCzy6tfLAJgwZ1XSOofePTGaMKDwpUi/7sjnL+/Oi5Z/fePgmPHeuLj/7gzVGFNB2ZFGlvh58/aYW2A3b99J3Rqxq2/1pm3xowHw4EcLuOuduTFljetUL9KKrTHGFMeSRpb48sfYtp8iL0SKbPgveu7rmCOMoPiE8eqFh5RBhMaYysBOT2WJi5/7JmH5mk3bUdUiCWPU8OQP4vXeu9Fujc0YU3lY0shyB9w+gdvHzYkp69dhD84/vH3C+sU1OGiMMalY0sgCefkF0e75tw8vMvzRT2Nbp3323IMAYtqPijj3UHuq2xhTenZNIwtMWVjYAm3VnCq8csHB/ObBLxLWrV298PbaE3q35K2Zy7nuqK60a1qXnCr2RLcxZtfYkUaGmzh3NZc8765n9M1tDMD+bRonvPPpwVN7xzygJyI8cVZfOu5ZzxKGMWa3sKSRwb5ftYmznpzC2i07APhjXIOB++3dMKZ/eI/mVMuxVWqMKTu2hclQG7flMeRvn8SUbdq2M6b/5ANal2dIxhhjSSNTTZy7ukjZYZ2axvSf1KcwaVw+qFN8dWOM2e0saYTsvW9XsmTd1piyF776iSc/XxRT9valh9K4TvWYsmBT5b/t06rMYjTGmAi7eypEG7flcd4z04DCJ7u378xn1KuzitTt1iJxS/QTrxzAK9OW0ry+vYbVGFP27EgjJKrKvqNjX7M6ce5qOt/wTkzZHw9vl7KNqLZN6nDl0M5UsbujjDHlwI40ykFefgGCe8YC4JctO/huxcaYOrmjxiUc99rh9gS3MSZzWNIoYz+t3cph90wE4Ifbh9Px+vFpj5vopUjGGBMmSxpl7PXpy6Ldb88q2gpttRwhL19jyr68diB7NbBrFMaYzGPXNMpQXn4B977/fbT/Ty9ML1Lno6uOiOlfNHaEJQxjTMayI41d9MDE+dzz7jyuGtqZCw5vT5UqwvL1v3LI2A/TGr9lw1qc3a8tfds2Ylj35mUcrTHG7Bo70iilDVvzOPjOD7jHv0L1nnfn0e66t1FVhv99Uspxj+3ZAoBT+u4NwE3HdLOEYYzJCqKqxdfKcH369NGpU6eW6zyT3e2UyO3Hd+f6/82O9ttrVo0xmUBEpqlqn5KMY6enytBNR3ejUZ1qHL9fK778cR1vzlhOq0a1wg7LGGNKzZJGGTq7f9to932/68UfD2tH95YNQozIGGN2jV3TKIVTH/0ypv/u3+7L7FuGxpTNHTMspj+niljCMMZkPTvSKKG1m7fz2fy10f5Zo4dQr2Y1AAZ13ZMJc1Yx5rju1KyWk2wSxhiTtSxplND+t02Idi+446iYN+I9ekaJricZY0zWsdNTJbAtLz+m316haoypbCxppCkvv4AuN75TfEVjjKnALGmk6bnJP8X0d22e+P0WxhhTkYWaNETkShFREWni+0VE/iEi80Vkpoj0DjO+oLkrN0W77/7Nvrx5cb8QozHGmHCEdiFcRFoDg4HgLvxwoKP/OxB40P8P3fNfFYZ50gGtU9Q0xpiKK8wjjb8BVwPBdkxGAk+r8yXQUEQyqlGmhXceFXYIxhgTmlCShogcCyxT1Rlxg1oCSwL9S31ZommcJyJTRWTqmjVryijShPMtt3kZY0ymKbPTUyIyAdgrwaDrgeuARK+lS7RFTtiioqo+AjwCrsHCUoaZltUbt5Xl5I0xJmuUWdJQ1UGJykWkB9AWmOH32lsBX4tIX9yRRfCCQStgeVnFmK6+d3wAQIdmdUOOxBhjwlXup6dUdZaqNlPVXFXNxSWK3qq6EngDON3fRXUQsEFVi74jNSQvnHdQ2CEYY0yoMq0ZkbeBo4D5wFbgrHDDidWkbo2wQzDGmFCFnjT80UakW4GLwoumqOCttsYYU9nZE+HFuPbVWWGHYIwxGcOSRpqePrtv2CEYY0zoLGmk6bBOTcMOwRhjQmdJI4UdOwvCDsEYYzKKJY0UOt0wPuwQjDEmo1jSSMMpffcOOwRjjMkIljTScMOIrmGHYIwxGcGSRhLrtuyIdtepEfrjLMYYkxEsaSQxblbGtF5ijDEZw5JGEje+NhuAs/rlhhuIMcZkEEsaxbjoiA5hh2CMMRnDkkYxrJFCY4wpZEkjgWXrfw07BGOMyUiWNBL4cM4qAHq1bhhyJMYYk1ksacT53zdLufH1bwEYM7J7yNEYY0xmsaQR5/IXZ0S77fWuxhgTy5JGCrWq54QdgjHGZBRLGgk0rlOdRWNHhB2GMcZkHEsaAW/NXA7ENiFijDGmkCWNgIuf+waACwa0DzkSY4zJTJY0EvjDoe3CDsEYYzKSJQ1vw9a8aHfjOtVDjMQYYzKXJQ2v563vhR2CMcZkPEsacZ4+u2/YIRhjTMaypBGnX4cmYYdgjDEZy5JGnJwqEnYIxhiTsSxpAFt37Aw7BGOMyQqWNIB+Yz8MOwRjjMkKljSAX/zttvu0qB9yJMYYk9ksaQS8csEhYYdgjDEZrdInjWe+XAxAneo51KxmrdoaY0wqlT5p3PjabAC27MgPORJjjMl8lTpp5OUXRLsvOsIaKTTGmOJU6qQxfcn6aPdVQ7uEGIkxxmSHSp00Js5dDcDNx3QLORJjjMkOVcMOIEwDuzZj47Y8TurTOuxQjDEmK1TqpLF/m8bs36Zx2GEYY0zWqNSnp4wxxpRMaElDRC4RkXki8q2I3B0ov1ZE5vthQ8OKzxhjTFGhnJ4SkSOAkcC+qrpdRJr58m7AycA+QAtggoh0UlV7iMIYYzJAWEcaFwBjVXU7gKqu9uUjgRdUdbuqLgTmA/ZWJGOMyRBhJY1OwKEiMllEPhaRA3x5S2BJoN5SX1aEiJwnIlNFZOqaNWvKOFxjjDFQhqenRGQCsFeCQdf7+TYCDgIOAF4SkXZAojcgaaLpq+ojwCMAffr0SVjHGGPM7lVmSUNVByUbJiIXAK+qqgJfiUgB0AR3ZBF8aKIVsLysYjTGGFMyYZ2eeg04EkBEOgHVgZ+BN4CTRaSGiLQFOgJfhRSjMcaYOOJ29st5piLVgceBXsAO4EpV/dAPux44G9gJXKaq49OY3hpgcTHVmuASU2VUWZfdlrtyseUuuTaq2rQkI4SSNMIgIlNVtU/YcYShsi67LXflYstdPuyJcGOMMWmzpGGMMSZtlSlpPBJ2ACGqrMtuy1252HKXg0pzTcMYY8yuq0xHGsYYY3aRJQ1jjDFpqxRJQ0SG+abW54vIqLDjSYeItBaRiSIyxzcf/ydf3lhE3heRH/z/Rr5cROQffhlnikjvwLTO8PV/EJEzAuX7i8gsP84/RERSzaOclz9HRL4Rkbd8f1vfVtkPIvKif9YH/yDoi34ZJotIbmAaCZvZT/Z9SDaP8iQiDUXkvyIy16/7gyvDOheRy/33fLaIPC8iNSviOheRx0VktYjMDpSFtn5TzSMpVa3Qf0AOsABoh3vyfAbQLey40oi7OdDbd9cDvge6AXcDo3z5KOAu330UMB7XftdBwGRf3hj40f9v5Lsb+WFfAQf7ccYDw315wnmU8/JfATwHvOX7XwJO9t0PARf47guBh3z3ycCLvrubX9c1gLb+O5CT6vuQbB7lvNxPAef67upAw4q+znGNki4EagXWw5kVcZ0DhwG9gdmBstDWb7J5pFyG8v5RlPef/wDfDfRfC1wbdlylWI7XgcHAPKC5L2sOzPPdDwOnBOrP88NPAR4OlD/sy5oDcwPl0XrJ5lGOy9oK+ADX1Mxb/gv9M1A1fp0C7wIH++6qvp7Er+dIvWTfh1TzKMflro/beEpceYVe5xS2bt3Yr8O3gKEVdZ0DucQmjdDWb7J5pIq/MpyeSru59UzlD7/3AyYDe6rqCgD/v5mvlmw5U5UvTVBOinmUl/uAq4EC378HsF5Vd/r+YKzR5fPDN/j6Jf08Us2jvLQD1gBPiDs196iI1KGCr3NVXQb8BfgJWIFbh9OoHOscwl2/Jd4+VoakkXZz65lIROoCr+Da4dqYqmqCMi1FeahE5GhgtapOCxYnqKrFDMvGz6Mq7tTFg6q6H7AFdyohmWxcxiL8+fWRuFNKLYA6wPAEVSviOk+lPJanxONUhqSRtc2ti0g1XMJ4VlVf9cWrRKS5H94ciLz1MNlypipvlaA81TzKQz/gWBFZBLyAO0V1H9BQRCJN+QdjjS6fH94AWEfJP4+fU8yjvCwFlqrqZN//X1wSqejrfBCwUFXXqGoe8CpwCJVjnUO467fE28fKkDSmAB39XRLVcRfO3gg5pmL5ux4eA+ao6l8Dg94AIndLnIG71hEpP93fDXEQsMEfhr4LDBGRRn6PbgjuvO0KYJOIHOTndXrctBLNo8yp6rWq2kpVc3Hr6kNVPRWYCPw2QUzBWH/r6yvJm9lP+H3w4ySbR7lQ1ZXAEhHp7IsGAt9Rwdc57rTUQSJS28cVWe4Kv869MNdvsnkkV14Xu8L8w90h8D3uDorrw44nzZj74w4TZwLT/d9RuPOwHwA/+P+NfX0BHvDLOAvoE5jW2bj3rc8HzgqU9wFm+3Hup7CFgITzCOEzGEDh3VPtcBuA+cDLQA1fXtP3z/fD2wXGv94v2zz8XSSpvg/J5lHOy9wLmOrX+2u4u2Mq/DoHbgHm+tiewd0BVeHWOfA87rpNHm4v/5ww12+qeST7s2ZEjDHGpK0ynJ4yxhizm1jSMMYYkzZLGsYYY9JmScMYY0zaLGkYY4xJmyUNk1FEREXk3kD/lSIyejdN+0kR+W3xNXd5PieKa6F2YqCsh4hM93/rRGSh754gIi1E5L9lGE8vETmqrKZvKhdLGibTbAdOEJEmYQcSJCI5Jah+DnChqh4RKVDVWaraS1V74R6ousr3D1LV5apalsmsF+45BWN2mSUNk2l24t55fHn8gPgjBRHZ7P8PEJGPReQlEfleRMaKyKki8pW4dwu0D0xmkIhM8vWO9uPniMg9IjLFv1Pgj4HpThSR53APPsXHc4qf/mwRucuX3YR7MPMhEbknnQUWkVzx71cQkTNF5DURedMfjVwsIleIa8DwSxFp7Ou1F5F3RGSaX54uvvxEH88MEfnEPwF9K/A7f2TzOxGpI+69DlP8dEcG5v26n+48EbnZl9cRkXF+mrNF5HfpLJepmKoWX8WYcvcAMFNE7i7BOD2Brrg2iH4EHlXVvuJeXnUJcJmvlwscDrQHJopIB1xzCxtU9QARqQF8JiLv+fp9ge6qujA4MxFpAdwF7A/8ArwnIsep6q0iciRwpapOLfGSO91xrRrXxD3xe42q7icif/Ox3odLrOer6g8iciDwL1w7XTcBQ1V1mYg0VNUdPpH1UdWLfex34JreOFtEGgJficiE4PICW4EpIjIOaAMsV9URfvwGpVwuUwHYkYbJOOpa830auLQEo01R1RWquh3XJEJkoz8LlygiXlLVAlX9AZdcuuDa7jldRKbjmp/fA9duEcBX8QnDOwD4SF0jezuBZ3Ev2NkdJqrqJlVdg2v2+83gsohr+fgQ4GUf88O4dyQAfAY8KSJ/wL18KJEhwCg/7ke45LS3H/a+qq5V1V9xDQf29/MdJCJ3icihqrphNy2nyUJ2pGEy1X3A18ATgbKd+B0d3yBb8NWc2wPdBYH+AmK/5/Ht5kSalL5EVd8NDhCRAbjmyRNJ1KT07lLcslTBvQeiV/yIqnq+P/IYAUwXkSJ1cLH/RlXnxRS68Yp8Pqr6vYjsj7sucqeIvKeqt5ZmwUz2syMNk5FUdR3uVZznBIoX4U4HgXv/QrVSTPpEEanir3O0wzVs9y5wgbim6BGRTuJefpTKZOBwEWniL5KfAnxcinhKzB+JLRSREyH6nueevru9qk5W1ZtwTX+3BjbhXhkc8S5wiU+8iMh+gWGDxb1PuhZwHO5UXQtgq6r+B/eypOLfI20qLEsaJpPdCwTvovo3bkP9FXAgyY8CUpmH27iPx10T2AY8imuK+2t/QfphijkKV9d89LW4prVnAF+rank2q30qcI6IzAC+xSVRgHsiF+eBT3xsE4FukQvhwBhcwp3p640JTPdTXCuz04FX/HWZHrjrHtNxrcjeVvaLZzKVtXJrjAHc3VMELpgbk4gdaRhjjEmbHWkYY4xJmx1pGGOMSZslDWOMMWmzpGGMMSZtljSMMcakzZKGMcaYtP0/+F8S1ibtwOMAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nO3dd3wVZfb48c9JQm9SVaQEUERBQYwCVlRUiquuqz/F3taua9nVYMWOZe1+V9F1saKsuraoKAqiKGBQUFCadEGqNKkh5/fH8+Qyubk3/d7JTc779corM8+0M+XOmfqMqCrGGGMMQFrYARhjjKk6LCkYY4yJsKRgjDEmwpKCMcaYCEsKxhhjIiwpGGOMibCkUAlE5HwR+aqSxpUpIioiGZUxvkQRkY9E5Lyw46hKRKSeiLwvIutE5L9hx1MRIlJHRH4Skd3CjsWUTET2F5GvK2NcKZUUROQwEfna/+jWiMgEETkoyTGEutMWkQUi0i+MaQep6gBVfTER4xaRxiLymIgsEpGNIjLXt7dIxPQq0anArkBzVT0tuqOIDPXbzjVR5df68qFJirM0LgHGq+pvYQdSHBH5f36fsElExsXo3kNEpvjuU0SkR6CbiMgDIrLa/z0oIlKaYcsQX6UdMBZHVX8A1orInyo6rpRJCiLSGPgAeBJoBuwB3AlsDTOu6ijMsxQRqQ18BnQF+gONgUOA1cDB5RhfMuelPTBbVfOK6Wc2EH2Gda4vr0ouBV5O5gT9Trqs+6Q1wGPAsBjjqw28C7wCNAVeBN715eAS38lAd2B/4ATcfJdm2KroVXz8FaKqKfEHZAFri+l+PjABeBRYC8zD7UzOBxYDK4DzAv03AV4CVgILgVuBNN8tzbcv9MO9BDTx3RYBCmz0f338NL4CHgZ+B+YDA6Km9W9gGfArcA+Q7rul++FW+Ziv9OPPiDOfC4B+cbqdAEz18/81sH+gWzbwC7AB+An4c5xlt8bHV9I8jQMuDgxfXL8dgPF+2mOAp4FX4szDxcByoGEx61qBPQPtI4B7fHNfYAlwE/Abbsf2M3BCoP8Mv7x7+vbefnmtBaYBfYuZ9j5+3tcCM4ATffmdwDZgu98uLoox7FDcTuZnoKsv6+rbXwGGBvr9KzDXr4/3gNa+/Bng4ajxvgtc75tbA2/htuv5wDWB/g4GcoH1fhk/Emce2wGbCWyDQB2/fhf5YZ8B6vlu5V6+flnei9v+NgP/AKZExXMD8E4J+4eLgXFRZcfhfm8SKFsE9PfNXwOXBLpdBEwszbBx9j/zcNv4fOAsv61sAXb4bWJtKZZlX9z2e7NfhguAswLTGYj7/W7w8f090G0PvwzrlHX/WmheKjJwMv9wR4yrcRl7ANA0xkrJAy7A7Wjv8Qv9ab8SjvMLsqHv/yXcj6kRkIk7UrvId7sQ94PsCDQE3gZe9t0yidpp+2lvx/2Q04HLgaUFGxTwDvAs0ABoBUwGLvXdLgNmAm1xZ0Bjo8cfNZ8LiJEUgJ64BNbLx3Ce77eO734aboeRBpwO/AHsHrXsrsb9oOuVYp7GUTgpFNfvN7gfQW3gMNxOKV5SeB14sYRtoaSkkAc84Nd7PeB24NVA/4OAmYEf0mrcjy0NONa3t4wx3Vp+u7jZz8vRuG1qb999aLz5Cnb3wz/gyx4EhhBICn68q/w6rYM7Ox7vux2BO8gpWLZNcTuCgnU7xc9vbdz2Ow84PrAezvHNDYHeceIcBMyIKnsMl5ya4X4z7wP3+27lXr5+O1qES44Zfn7XAPsExvc98JcStolYSeE64KOosg+AG3zzOqBXoFsWsKE0w0aVN8Bt0wXbwe7sTPrnA1+VYVn2xW2/j/hlcSTut1ow7mXA4YF13zNq3OsJHAyWa19bkYGT/YfLvCNwmTTPL9hdAwt/TqDf/XA7j10DZauBHrgd11Zg30C3Sws2KtzliysC3fbG7fQyiJ8U5gba6/t+dsNdY96KPxLw3QcDY33z58BlgW7HRY8/ahksIHZS+Bdwd1TZLODIOOOZCpwUiH9RVPe48+Tbx1E4KcSb/3Z+XdUPdH+F+EnhU2BYCdtBSUlhG1A30H1P3M67vm9/FbjdN9+ET/iB/kcTOKsMlB+OO/tIC5SNZOfOfGi8+Qp298tkES7JLMIdEASTwr+BBwPDNfTbXyYgfpgjfLe/Ap/75l4x1uMQ4D++eTzujKZFCcv3LPwRs28X3I6pU6CsDzC/osvXb0d3xdiW7/XNXXFnn8Ue/RI7KdwGvB5V9mpgOe8AugS67eW3LSlp2KjyBrizoL8Q+J0HfhtfBdpLWpZ9cb+XBoHuo4DbfPMi3L6qcZzl8GvBtlHev5S5pwCgqj+r6vmq2gbohjs6eizQy/JA82Y/THRZQ6AF7khqYaDbQtxRDX680d0ycDv4eCI35FR1k29siLvOXAtYJiJrRWQt7qyhVWBai6OmVR7tgRsKpuGn09aPHxE5V0SmBrp1wy2HAouLjjLuPMUSr9/WwJpAWbxpFViNO9KqiJWquiUQz1zcJY4/iUh94ETgNd+5PXBa1HI7LE4MrYHFqpofKAtuN6WiqotwZxz34Q5kopdHoe1PVTfilsse6n75r+MOLADOxO2sCualddS83MzO7fYioDMwU0S+FZET4oT4O+4ItkBLXKKfEhjvx768MpZv9Py/CJzpb/qeA4xS1fLcO9yIu8IQ1BiXwGJ1bwxs9Mu4pGEjVPUP3Nn3ZbjfeY6IdIkTU7HL0vvdj7PAQvzvGJd4BgILReQLEekTNf5GuARVbimVFIJUdSbuCLFbOQZfhTvyah8oa4fLsuAufUR3y8MlHS3jtBbjzhRaqOou/q+xqnb13Zfhdt7BaZXHYtzR1S6Bv/qqOlJE2gPPAVfhnozZBZiOO2opUNb5Kq1lQDO/syjQNl7PuHsOx4tIg2L62YT7YRWIfmwy1ryMxO1ITwJ+8jsycMvt5ajl1kBVi9y4xG0XbaNuhga3m7J4CXet/KU404lsf35ZNA9MZyRwql+vvXD3EArmZX7UvDRS1YEAqjpHVQfjDkgeAN6Ms5x/ADoGbtKvwh1QdQ2Mt4mqBg8QKrJ8C60vVZ2IO9s7HJf0ynvDewawf/CJItwN5RmB7t0D3bpHdStu2EJUdbSqHotLdjNxvzcoui2WZlk2jVov7XDbBKr6raqehFuH7+DOIgAQkda4g91ZsWIsrZRJCiLSRURuEJE2vr0tbiOcWNZxqeoO3MK8V0Qa+R/X9bhTeHAb+HUi0kFEGuKO6N5Q91TJSiAfd722NNNaBnwC/NM/apkmIp1E5EjfyyjgGhFpIyJNcTeES1JLROoG/jJwG+FlItLLP8XRQEQGiUgj3Omt+tgRkQsoXzItM1VdiLu5OVREavsjm+Iem3sZtyN5y6/zNBFpLiI3i8hA389U3JFkuoj0x113LcnruEtzl7PzKBbcOv+TiBzvx1dXRPoWbGdRJuFO/W8UkVoi0tfPy+ulmH60N3w8o2J0ew24wD8SWQe3/U1S1QUAqvo9bl0+D4xW1YIjw8nAehG5Sdw7E+ki0k38Y9sicraItPRnOgXD7IieuKouAebgn/by/T8HPCoirfy49hCR4wODVcbyDXoJeArIU9W4j3QWjBN3Jp/mx1/Ldx7n5+8ace9dXOXLPw9M43o/L61xSXpEKYcNxrCriJzod+RbcWcZBct1OdCm4KmlUi5LgDv97+Vw3AMk//XtZ4lIE1Xdjrt/EFx/fXGXEiv0RGbKJAXcaVsvYJKI/IFLBtNxK7I8rsb9wOfhnpx5DXjBd3sBt3Maj3uSYIvvv+DSyL3ABH/617sU0zoXl8F/wp2av8nO0+fncNdYpwHf4W5ql+RD3NFGwd9QVc3FXV9+yk9jLu56Jqr6E/BP3I3G5bj7LRNKMZ3Kchbuuulq3AMAbxDnUWK/QffDHW19itvwJ+MudU3yvf0NtzNe68f9TkkB+OT8De6JtDcC5YtxR7c343a0i3FPwBT5bajqNtylkQG4I77/A871Z61loqqbVXWMqm6O0e0z3DXtt3BnWp2AM6J6G4lbTq8FhtuBWy49cNvtKlziaOJ76Q/MEJGNwOPAGcHLbFGexV26KXATbpuaKCLrcWd0ewemXeHlG+Vl3IFLSWcJ5+B+A//CnVlsxh+l+/V1Mu73txb3AMnJvrxgHt8HfsTtS3J8WWmGDUrD7YeW4m6SHwlc4bt9jju7+E1EVvmyYpcl7lLs7358r+LuORZsY+cAC/xwlwFnB4Y7C/ckU4UUPMFgTNKIyBu4p1PuCDsWE5s/Q/keOMbv8JM9/Xq4p+l6quqcZE8/LP7s8xV/37Qsw+0HDFfV6HsMZZZKZwomRYnIQf6SWZq/3HMSpTi6N+FR1a2qum8YCcG7HPi2JiWEilDVHysjIYC7DmdMou2GuyzWHPc48eX+urgxRYjIAtxDECeHHEqNZJePjDHGRNjlI2OMMREpcfmoRYsWmpmZGXYYxhiTUqZMmbJKVVuW3OdOKZEUMjMzyc3NDTsMY4xJKSJS5hoS7PKRMcaYCEsKxhhjIiwpGGOMibCkYIwxJsKSgjHGmAhLCsYYYyIsKRhjjImo1klh6drN3JvzEx9PD6tOL2OMSS0p8fJaeR0yzH0P47kv5/Pj0ONoVLdWCUMYY0zNVq3PFNo0rRdpHj5+XoiRGGNMaqjWSWHc3/tGmp/8fG78Ho0xxgDVPClkpKcx+54BYYdhjDEpo1onBYDaGTtncfbyDSFGYowxVV+1TwpBxz06PuwQjDGmSqsRSWHikGPCDsEYY1JCjUgKuzWpG3YIxhiTEmpEUgia+dv6sEMwxpgqq8YlhTvenRF2CMYYU2XVmKQw9fZjAdiSlx9yJMYYU3XVmKSwS/3aAExbvDbkSIwxpuqqMUnBGGNMyWpUUqiVLgAsWr0p5EiMMaZqqlFJ4Yq+ewIwf/UfIUdijDFVU41KCsfs0wqA4eN/CTkSY4ypmhKWFETkBRFZISLTA2U9RGSiiEwVkVwROThR04+lY8uGAEyYuxpVTeakjTEmJSTyTGEE0D+q7EHgTlXtAdzu25OmYZ2d3xS67o2pyZy0McakhIQlBVUdD6yJLgYa++YmwNJETb8k70wNbdLGGFNlJfuewrXAQyKyGHgYGBKvRxG5xF9iyl25cmWlBTD3Xvu+gjHGxJPspHA5cJ2qtgWuA/4dr0dVHa6qWaqa1bJly0oLICN95yz/sTWv0sZrjDHVQbKTwnnA2775v0BSbzQX6Lu3SzIPfjwzjMkbY0yVleyksBQ40jcfDcxJ8vQBuODQDgC8+M3CMCZvjDFVVkbJvZSPiIwE+gItRGQJcAfwV+BxEckAtgCXJGr6xTlirxaR5k3b8qhfO2GLwRhjUkrC9oaqOjhOpwMTNc3SEhH6dGzON/NWs+/to1kwbFDYIRljTJVQo95oDrr48A5hh2CMMVVOjU0Kx+yza9ghGGNMlVNjk0LQNvvwjjHGAJYUALjhv9PCDsEYY6qEGp0Unj83C4D3p1mVF8YYAzU8KRzVpVWkeUe+1ZpqjDE1Oimkp0mkudPNH4YYiTHGVA01OilEy9thN5yNMTVbjU8KwRfXHvpkVoiRGGNM+Gp8UgB4+LTuADz7xbyQIzHGmHBZUgCO3Xfni2z5dsPZGFODWVIAmtSrFWnuaDecjTE1mCUFY4wxEZYUvNxb+4UdgjHGhM6SgteiYZ1I8zUjv7eX2YwxNZIlhRjem7aUpWs3hx2GMcYknSWFOMbPWRl2CMYYk3SWFAIm3XxMpPmW/00PMRJjjAmHJYWAXRvX5YZjO4cdhjHGhMaSQpSrj9kr0vzSNwtCi8MYY8JgSaEYt787I+wQjDEmqSwpxDD33gGR5szsnBAjMcaY5LKkEENGeuHF8t2i30OKxBhjkithSUFEXhCRFSIyPar8ahGZJSIzROTBRE2/orq33SXSfMr/fc3jY+aEGI0xxiRHIs8URgD9gwUichRwErC/qnYFHk7g9Cvk3SsPpVHdjEj7o2NmhxiNMcYkR8KSgqqOB9ZEFV8ODFPVrb6fFYmafmV498pDC7VnZudw9D/HoWpVYBhjqqdk31PoDBwuIpNE5AsROShejyJyiYjkikjuypXhvF3csWXDImXzVv5BhyEf2g1oY0y1lOykkAE0BXoD/wBGiYjE6lFVh6tqlqpmtWzZMpkxFhL8XGe0zOwcLn4xN4nRGGNMYiU7KSwB3lZnMpAPtEhyDGU2//6BcbuN+Xl5EiMxxpjEyii5l0r1DnA0ME5EOgO1gVVJjqHMRCRyxrB8/RZ63fdZoe7Tf11Htz2ahBGaMcZUqkQ+kjoS+AbYW0SWiMhFwAtAR/+Y6uvAeZpid213bVyXBcMG8eWNR0XKTnjyqxAjMsaYypOwMwVVHRyn09mJmmYytW1Wn/v+vB83/+9HAHbkKzvyldoZ9j6gMSZ12R6sAs7s1S7S3OnmD+l860chRmOMMRVnSaGC9tilXqF2e1TVGJPKLClUUJfdGhUpy8zOITM7h9wF0e/uGWNM1WZJoYKeOzcrbrdTn/kmiZEYY0zFWVKooLQ0Yc9WDem3TytaNKwds5+vf1llVWMYY1KCJYVKMOb6I3n+vIM49cC2Rbpd/GIuZz43ictemRJCZMYYUzaWFCpR9oAu1M5I46kzD4iUFbzxPHqGvflsjKn6LClUstn3DOCE/VvTukndIt02bs0LISJjjCk9SwoJMiH76CJl3e4YTWZ2jiUHY0yVZUkhQUSES47o6JsLd+t2x2jy89VuPhtjqhxJhR1TVlaW5uamZhXVi9dsYtfGdeO+7fzljUfRtln9JEdljKkJRGSKqsZ/bj4GO1NIsLbN6lM7I43Z9wzgmyFFLykd/uBYtu/IDyEyY4wpypJCktTOSGP3JvU4PavoY6t73fIRxz7yRQhRGWNMYZYUkuy2P+1L3VpFF/ucFRtDiMYYYwqzpJBkDetkMPPuAbx5WZ8i3bZs3xFCRMYYs5MlhZBkZTYr8pnPe3J+CikaY4xxLCmESEQYdWkf2jR11W+/MnFRyBEZY2o6SwohO7hDM8b9vW+kPfoR4cVrNjFt8dokR2WMqaksKVQBGek7V8O5L0yONKsqhz84lpOenhBGWMaYGsiSQhXRp2NzAL6cs4o/tubxw5K1fDV3VaT7r2s3hxWaMaYGyQg7AOMM3H93vpm3GoCud4wu0v3QYZ+zYNigZIdljKlh7Eyhijind/uwQzDGGEsKVUmtdCm5pyjnvjCZzOycBERjjKmJEpYUROQFEVkhItNjdPu7iKiItEjU9FPR5Jv7FSm76LAOkebM7Bx25O98Omnd5u2Mn70y0u3dqb8mPkhjTLWWsFpSReQIYCPwkqp2C5S3BZ4HugAHquqqOKOISOVaUstr49Y8duQrTerV4uiHxzFv1R8ApAnMu9/dW4h1hpBzzWF0bd0kqbEaY6qmKlVLqqqOB9bE6PQocCNQ9evsDlHDOhk0qVcLgPtP2S9Snq+wauPWuMMNeuKrhMdmjKm+knpPQUROBH5V1WnJnG6qO7hDs0Ltj3w6u9j+N2+zOpSMMeWTtKQgIvWBW4DbS9n/JSKSKyK5K1euTGxwVZyIFHoc9bVJiwpdOlowbBAj/9o70r7P7R8z6tvFSY3RGFM9JPNMoRPQAZgmIguANsB3IrJbrJ5VdbiqZqlqVsuWLZMYZtX10Kn7FynbrXFdAPp0al6oSu4b3/qBzOwcMrNzuPb175MWozEmtZUqKYhIAxFJ882dReREEalVlgmp6o+q2kpVM1U1E1gC9FTV38ocdQ116oFtipTd++fIPXymDz0+5nDvTF1Kfr7dwjHGlKy0ZwrjgboisgfwGXABMKK4AURkJPANsLeILBGRiyoSqHGXkaIds8+ukeZgHUrRrJoMY0xplDYpiKpuAk4BnlTVPwP7FjeAqg5W1d1VtZaqtlHVf0d1zyzN46imsPn3D2Tm3f0BeO3iXkW6/+P4vWMOd/iDYxMalzGmeiht3UciIn2As4CCI36rNykEIkLdWulx60G68qg9uaJvJ76cs4pD92zBP/47jbe/dy+1zV6+gc67NkpmuMaYFFPaM4VrgSHA/1R1hoh0BOzQs4oSEY7o3JL0NOGR03tEyo97dHyIURljUkGpjvZV9QvgCxFp4NvnAdckMjCTGJu37aBe7fSwwzDGVFGlffqoj4j8BPzs27uLyP8lNDKTEPvc/nHYIRhjqrDSXj56DDgeWA3g30g+IlFBmcr11uV9wg7BGJMiSv3ymqpGvyJrdSmkiAPbN2PGnbHfYTDGmKDSJoXFInIIoCJSW0T+jr+UZFJDgzo7bx+NnbWCf437JdK+eM0me7nNGAOU/rHSy4DHgT1wbyJ/AlyZqKBMYl3wn28BaNagFje99SMA1/bbi2v7dQ4zLGNMFVCWl9fOUtVdfVUVZ6vq6oRGZhKuICEAPDZmToiRGGOqitImha9F5BMRuUhEdkloRMYYY0JTqqSgqnsBtwJdcTWbfiAiZyc0MlPpvrzxqGK7b9luzw4YU9OV5emjyap6PXAw7otqLyYsKpMQbZvV54yD2hYq++W+gZHmLrfZOwzG1HSlutEsIo2BPwNn4L6L8D9ccjAp5v5T9uP9aUv5+NojaNusftjhGGOqmNKeKUwDegB3qWpnVb1JVackMC6TICLCjLv6F0oIU27tF2kelbsY1eIfT1VVZi/fkLAYjTHhKe0jqR1VVUWkkYg0VNWNCY3KJFXzhnVITxN25Cs3vvkDt787nS3b87n75G6c07t9pD9V5fpR0/ifr3X1npO7cXaguzEm9ZX2TKGriHwPTAd+EpEpItKtpIFM6vj2lp1nC1u25wNw2zvTueq17yLlHYZ8GEkIALe+Mz15ARpjkqK0SWE4cL2qtlfVdsANvsxUE80a1I5Z/sEPy1i3aTtD3v4xZndjTPVS2qTQQFUj309Q1XFAg4REZEJz10ldY5Z3v+sTRk5eFLPbuS9M5pFPZiUyLGNMEpU2KcwTkdtEJNP/3QrMT2RgJvnO7ZNZYj+nHtiGp8/syX57NAFg/OyVPPH53ARHZoxJltImhQuBlsDbuMdRWwIXJCooE57R1x7Bn7q35pmzD6TfPq2KdH/4tO4M2n93fvx1XaHyt79bkqwQjTEJVNovr/2OfWmtRth7t0Y8OfgAAPp3243Tnvmabxf8XuJw14+axik92yQ6PGNMghV7piAi7xX3l6wgTXiuOnqvSPOjp3ePNAffhC6QmZ3D81/OS0pcxpjEkOJeVBKRlcBiYCQwCZBgd//t5oTLysrS3NzcZEzKRFFVHv10Nif22IM9WzUs1G1bXj7L1m3myIfGFSpfMGxQEiM0xsQjIlNUNassw5R0+Wg34FhgMHAmkAOMVNUZ5QvRpBoR4frj9o7ZrXZGGu2b20NoxlQnxV4+UtUdqvqxqp4H9AbmAuNE5OqSRiwiL4jIChGZHih7SERmisgPIvI/q4a7eujfdbewQzDGVJISnz4SkToicgrwCu5ra0/gnkIqyQigf1TZp0A3Vd0fmA0MKVO0pkp65pwDWTBsEJce0bFItyMfGktmdg4zf1sfQmTGmLIq6Ubzi8DXQE/gTlU9SFXvVtVfixsOQFXH46rYDpZ9oqp5vnUiYI+rVCPPjnc3mSfN2/lRvoWrNwHQ/7EvefTT2aHEZYwpvZLOFM4BOgN/w319bb3/2yAiFT30uxD4KF5HEblERHJFJHflypUVnJRJptOHT2RHvrJ9R36h8sc/m0N+fvE1sBpjwlXSPYU0VW3k/xoH/hqpauPyTlREbgHygFeLmfZwVc1S1ayWLVuWd1Imie48cWc1GZ1u/pC9bima87Pf/iGZIRljyqjUX16rLCJyHnACcJaWVHG/SSln9WoXs/ym/l0izaNyd775/NPS9WRm55CZnZPw2IwxpZPUpCAi/YGbgBNVdVMyp20SLyM9jXkxXmq7vG8nJmQfXagsMzuHgU98GWl/b9pSMrNzGDtzRcLjNMbEl7CkICIjgW+AvUVkiYhcBDwFNAI+FZGpIvJMoqZvwpGWJoW+5FagdZO6kebpUfUmAVwz8nsALhjxLdvy8nl54kK7/2BMCEr75bUyU9XBMYr/najpmaqjecM6RcpEdr4Mf8KTXxU7fOdb3b2IOcs3cNdJ9i0nY5Ip6fcUTM1wSKfmFR7HS98srIRIjDFlYUnBJMTwc111K69d3CtuP9PvPJ6f74p+v7GwdZu2V2pcxpjiWVIwCdGwTgYLhg3ikD1bRMpGXdon0vy/Kw6hYZ0M6tVOZ/qdxzNwv90Y/4+jiozn619WJSVeY4yTsHsKxkQL1rJa8OU2cAnk/846ENhZw2rBY6prNm1LYoTGGDtTMEnTrEFtnjm7J9kDupCRXvym98zZPQH48MdlyQjNGOPZmYJJqv7ddi9VfwdlNgNgwtzVJfRpjKlMdqZgqqTgY632xrMxyWNJwRhjTIQlBVNlvXThwZFmqybLmOSwpGCqrCM676wdN3fh7yFGYkzNYUnBVGkN67hnIc58bmLIkRhTM1hSMFXauX3aA7B9h7Jus73dbEyiWVIwVdqNgW8xdL/zE775xR5RNSaRLCmYlDL4uYksXmOf4jAmUSwpmCrvhmM7F2o//MGxIUViTPVnScFUeVcfsxdPDj6gUFlmdg6vTrKqtY2pbJYUTEr4U/fWjLn+iEJlt/xvekjRGFN9WVIwKWPPVo3CDsGYas+Sgkkp8+8fyM0Du5TcozGmXCwpmJQiIlxyRKewwzCm2rKkYFJaZnYOPy9bH3YYxlQblhRMyrtwxLdhh2BMtWFJwaSk4JNIy9ZtCTESY6qXhCUFEXlBRFaIyPRAWTMR+VRE5vj/TRM1fVO97dmqEa9f0jvsMIypdhJ5pjAC6B9Vlg18pqp7AZ/5dmPKpXfH5pHm/Hz73oIxlSFhSUFVxwNroopPAl70zS8CJydq+qZm6Xjzh2GHYEy1kOx7Cruq6jIA/79VvB5F5BIRyRWR3JUrVyYtQJO6tuXlhx2CMSmvyt5oVtXhqpqlqlktW7YseQBTI719xSGR5oPuHRNiJMZUD8lOCstFZHcA/39Fkqdvqpme7Zryz9O6A7Bu83bufH9GyBEZk9qSnZJzkHkAABSxSURBVBTeA87zzecB7yZ5+qYa+suBbSLN/5mwILxAjKkGEvlI6kjgG2BvEVkiIhcBw4BjRWQOcKxvN6bCRHY2Z2bn8PTYueEFY0wKE9Wq/yhfVlaW5ubmhh2GqeIys3OKlM2483ga1MkIIRpjwiciU1Q1qyzDVNkbzcaU1YJhg4qUdb1jNBu2bA8hGmNSkyUFU638OPS4ImWXvjwlhEiMSU2WFEy10qhuLWbe3Z/aGTs37a9/WR1iRMakFksKptqpWyud2fcMKFSWCvfOjKkKLCmYauunu46PNI/4ekF4gRiTQiwpmGqrfu2dTx3d+f5P/LZuC5Pm2aUkY4pjScFUa6Mu7RNp7n3/Z5w+fCJb83aEGJExVZslBVOtHdyhWZGyvW/9OIRIjEkNlhSMMcZEWFIw1d5LFx4cdgjGpAxLCqbaO6JzSxYMG1Tojefl6+27zsbEYknB1ChpvuK8TdvsZrMxsVhSMDXK2b3bA3D7u9NDjsSYqsmSgqlR9tujCQBfzlkVciTGVE2WFEyNcmKP1mGHYEyVZknB1Ch1MtILtW/cmsf705aGFI0xVY99fcTUaN3uGA1A84a1OaRTi5CjMSZ8dqZgaqz7Pvw50vzCVwvCC8SYKsSSgqlx2jarB8Dw8fMiZWN+Xh5WOMZUKZYUTI2zeM3mmOU/LV0faVZVpixcY99hMDWOJQVT4wwZ0CVm+cAnviQzOweAt7/7lb/86xuufO07Xpm4kLwd+ckM0ZjQSCocCWVlZWlubm7YYZhqYke+0unmDwGYc+8A9rrloxKHOaJzS6tDyaQcEZmiqlllGcbOFEyNk54mvH/VYfww9Dhqpadx66B9Shxm/OyVTFn4exKiMyZcoZwpiMh1wMWAAj8CF6hq3BrK7EzBJMOqjVvJumdMsf0EK9UzpqpLiTMFEdkDuAbIUtVuQDpwRrLjMCZai4Z1iuz0O7VsUKg91kHUpHmreXniwoTGZkyyhHX5KAOoJyIZQH3AXik1VcbjZ/QA4N/nZfHZDX0Ldet6x2jmrthQqOz04RO57Z3p/LE1j9cmLaLHXZ/w1OdzkhWuMZUqrMtHfwPuBTYDn6jqWTH6uQS4BKBdu3YHLlxoR2ImHEPfm8GIrxcUKis4o9i0LY99bx8dc7g7T+zKeYdkxh1vwW9PRColTmOipcrlo6bASUAHoDXQQETOju5PVYerapaqZrVs2TLZYRoTMfTErvTuWPhbz5nZOUxZ+DuXvDQl7nB3vDeDxWs2xeyWmZ1DhyEf0mHIh6zcsLVS4zWmIsK4fNQPmK+qK1V1O/A2cEgIcRhTak+f2bNI2V/+9TVfzS2+Cu7DHxwLwOI1m1ixYQs78pX7A9VrABx0b/E3t6s6VWXDlu1hh2EqSRgV4i0CeotIfdzlo2MAe7TIVGnN/U3ogpfbop1xUFte/3YxALs2rsOZB7fn0TGzAeIOE6Sqpb6MNGPpOrq2blLKyBPv0GGfs3TdFl6/pDe9OzYPOxxTQUk/U1DVScCbwHe4x1HTgOHJjsOY8ph5d3++vPGoIuXD/rI/7191GADvXnkYf+u3V5nG+92itYXaN27NIzM7h+8XFX434pFPZjHoia94aPTMMkaeGEvXbmbpOvc0+RnDJ4YcjakM9kazMeUwcvIihrz9IxD/3YWzn58U9/LSo6d3588HtOGZL35h2Eczi4wneHYx+OB2TJi7ij+25rH6j22R8ujpqiodhnxIswa1+e62YyNl+epe2KssBbHNu28gHf2b4QV+GHocjevWqrRpmYpJiRvNxlQHgw9ux093Hc+se/rH7WfEBQdRt5b7iWU2r8+UW/tFuv35gDYA9N1750MUC1b94fqNutw0cvIiFq3ZVCghgEs6Qa9OWgTAmj+28f60pSxdu5kOQz6k080f8uLXC9heCfU3bdm+I9J8wpNfFem+/9BP+HXtzgoH83bkk5mdU6pLaKZqsDMFY5Joa94OaqWlkeaP3Lfl5dP51pLrXornX2f1ZMB+uwOlu3cB7hJY3VruC3QjJsznuK670XoXV5349h35zF2xkX12b1xomOe/nMc9OT8XGVcs71x5KD3a7lIkpoIzm7e/W8L1o6bZ2+FJYGcKxlRxdTLSIwkBoHZGGjcc2zlmv08OPiDSHO8e9OWvflfmo/Aut33MsnWbyczOYej7P3HIsM/Jz1cys3PY65aPGPD4l/y8bD35+e6AcVtefokJ4cb+e0eaT356ArOXb+C8FyYX6qffI1+wLS+f60dNA+AxfyPeVC12pmBMyAruBQQ9MfgATuzemqHvzeDiwzvQpmn9SLeHR8/iqbFzC/XfZbdGzPyt8JvWBb666SgOe2Bs5QcesGDYICbNW83pZbjZfF6f9rz4zcLI8AU2b9tBvdrp8QaLKxH3T1Jdec4ULCkYUwUU/A47DPmQERccRN+9WxXbf96OfPaMU+X3w6d15+//dUfj8+4bWOjMpDKu7b9/1WG89d2SyFveH15zOPu2bhwzuZXWA3/Zj9MPalckvj4dm9OrYzMuPKxDkRvYJz09gWmL1/LtLf2oVzs98r3tB0/dnxvf/IFf7htYKEHEupQVNH/VH7z0zQLu+FPXmDEWXOq77YR9ueiwDuWaz+Lk52vkxv2XNx5F22b1SxiiZJYUjKlBbhg1jYMym5Ltn4ICmH3PAGpnxL8qfP5/JjNu1spI+ykH7MHb3/8aaX/8jB787fWpMYc9onNLenVoxpVH7Qns3MkGd7BXvvYdOT8si7SfsP/uPHVmz0pJRl/ddBSjZyzn7g9+4qDMpny7oOSqzBcMG8Ty9Vs48amvWL5+55vj0cvp+jemRpbDPSd34+ze7fnox2W89M1CMls04P5T9is0D/ES91dzVtF5t4a0alQ3UvbmlCUc2L4p/5kwn44tGnD+oR3Ysn0HXW77mMEHt+Oek7uRniZFllFl3HOxpGBMDVTSEXC8/oM7tuAO/umxc3lo9KxCw3x+w5F0bNmwVPEUDP/QqftzWlZbAH5du5lDh30OwKSbj6HXfZ+ValwVNfbvfTnq4XFFyi88tAMvTJhPVvumjLjw4MhZRjyvXtyLs6Ke9jqkU3Ne+2tvAEbP+I1LXy5c5clHfzucAY9/WWRcn91wJMf884tCZbFejLSkUAxLCsbEV7AzmXbHcTSpV753BLbmuUdN62S4a/n5+UpamvDH1jzSRMp1jT8o+LW7gp1dwVvcyXxctUfbXZi6eG3JPZZSv31aMebnFWUapvOuDZm9fGOhslj3fabc2o/mDetUKD57+siYGmjm3f356qajyp0QwCWDgoQARO5DNKiTUeGEAO7m72t/7cW7Vx4aKYtVrUft9DQWDBsUSRzH7btr3CPm6Xcez4WH7ry2f2P/vSPDdtmtUcxhnj3nwLgxfnLdESXOx7z7BhZqL2tCAIokBCDmgwAHlvDBp0SxMwVjTKgeHzMnUk/U/PsHllgH1LpN28lIFxrUcVW3rd20jbq10iPvXhSId1lt2EczeeaLX4qMt6Cflycu5LZ3pvP/strQepd6PDZmTqF+gjeEY/n42sPp/9jOy0b3nNyND35YymsX9y403HtXHcqJT00oNOwbl/SmYd0MBj2x88XAMdcfyZ6tSnfpLppdPjLGpKTcBWvosntjGtapvDo6C5JC3VppzLx7QMx+fv9jG2s2bSOzeYO4j7Ku3LCVJvVqFbmBf9gDn7Pkd/f29isX9WL9lu30bNeU3ZrUZcX6LRx832d8fO3hdNlt54uAsRJVSWU92+3C21fsPMMqC0sKxhjjTZy3msHPTWT+/Yl7c3rO8g1s2JpHz3ZNS9V/fr6yafsOGtROj5wRbdm+g83bdtC0Qe1If8GkEP1YcVlYUjDGmGqg4BJVQcWJ5VWepBDG9xSMMcYUIy1NQqsbyp4+MsYYE2FJwRhjTIQlBWOMMRGWFIwxxkRYUjDGGBNhScEYY0yEJQVjjDERlhSMMcZEpMQbzSKyElhYQm8tgFVJCKeqsfmuWWy+a56KzHt7VW1ZlgFSIimUhojklvV17urA5rtmsfmueZI973b5yBhjTIQlBWOMMRHVKSkMDzuAkNh81yw23zVPUue92txTMMYYU3HV6UzBGGNMBVlSMMYYE1EtkoKI9BeRWSIyV0Syw46nNESkrYiMFZGfRWSGiPzNlzcTkU9FZI7/39SXi4g84efxBxHpGRjXeb7/OSJyXqD8QBH50Q/zhPjv/8WbRpLnP11EvheRD3x7BxGZ5GN6Q0Rq+/I6vn2u754ZGMcQXz5LRI4PlMfcHuJNI1lEZBcReVNEZvr13qcmrG8Ruc5v49NFZKSI1K2u61tEXhCRFSIyPVAW2joubhpxqWpK/wHpwC9AR6A2MA3YN+y4ShH37kBP39wImA3sCzwIZPvybOAB3zwQ+AgQoDcwyZc3A+b5/019c1PfbTLQxw/zETDAl8ecRpLn/3rgNeAD3z4KOMM3PwNc7puvAJ7xzWcAb/jmff26rgN08NtAenHbQ7xpJHGeXwQu9s21gV2q+/oG9gDmA/UC6+D86rq+gSOAnsD0QFlo6zjeNIqdh2T+KBK0EvoAowPtQ4AhYcdVjvl4FzgWmAXs7st2B2b55meBwYH+Z/nug4FnA+XP+rLdgZmB8kh/8aaRxHltA3wGHA184DfYVUBG9DoFRgN9fHOG70+i13NBf/G2h+KmkaR5bozbOUpUebVe37iksNjv4DL8+j6+Oq9vIJPCSSG0dRxvGsXFXx0uHxVsdAWW+LKU4U+RDwAmAbuq6jIA/7+V7y3efBZXviRGOcVMI1keA24E8n17c2Ctqub59mCskfnz3df5/su6PIqbRjJ0BFYC/xF32ex5EWlANV/fqvor8DCwCFiGW39TqP7rOyjMdVzm/WN1SAoSoyxlnrMVkYbAW8C1qrq+uF5jlGk5ykMlIicAK1R1SrA4Rq9aQrdUWx4ZuMsK/1LVA4A/cKf58aTa/MXkr22fhLvk0xpoAAyI0Wt1W9+lkYx5KvMw1SEpLAHaBtrbAEtDiqVMRKQWLiG8qqpv++LlIrK77747sMKXx5vP4srbxCgvbhrJcChwoogsAF7HXUJ6DNhFRDJixBqZP9+9CbCGsi+PVcVMIxmWAEtUdZJvfxOXJKr7+u4HzFfVlaq6HXgbOITqv76DwlzHZd4/Voek8C2wl3/SoDbu5tR7IcdUIv/UwL+Bn1X1kUCn94CCpw3Ow91rKCg/1z9N0BtY508TRwPHiUhTf1R2HO7a6TJgg4j09tM6N2pcsaaRcKo6RFXbqGombl19rqpnAWOBU2PEFIz1VN+/+vIz/NMqHYC9cDfhYm4Pfph400g4Vf0NWCwie/uiY4CfqObrG3fZqLeI1PdxFcx3tV7fUcJcx/GmEV8ybrwk4cbOQNzTO78At4QdTyljPgx3GvcDMNX/DcRdC/0MmOP/N/P9C/C0n8cfgazAuC4E5vq/CwLlWcB0P8xT7HyDPeY0QlgGfdn59FFH3I98LvBfoI4vr+vb5/ruHQPD3+LnbRb+KYzitod400ji/PYAcv06fwf3ZEm1X9/AncBMH9vLuCeIquX6Bkbi7p1sxx2lXxTmOi5uGvH+rJoLY4wxEdXh8pExxphKYknBGGNMhCUFY4wxEZYUjDHGRFhSMMYYE2FJwSSViKiI/DPQ/ncRGVpJ4x4hIqeW3GeFp3OauFpOxwbK9hORqf5vjYjM981jRKS1iLyZwHh6iMjARI3f1CyWFEyybQVOEZEWYQcSJCLpZej9IuAKVT2qoEBVf1TVHqraA/fC0D98ez9VXaqqiUxWPXDP6htTYZYUTLLl4b45e110h+gjfRHZ6P/3FZEvRGSUiMwWkWEicpaITBZXt3ynwGj6iciXvr8T/PDpIvKQiHzr65S/NDDesSLyGu7Fnuh4BvvxTxeRB3zZ7bgXD58RkYdKM8Mikim+fn0ROV9E3hGR9/3ZxFUicr24SvImikgz318nEflYRKb4+eniy0/z8UwTkfH+Ld67gNP9mcnpItJAXL3+3/rxnhSY9rt+vLNE5A5f3kBEcvw4p4vI6aWZL1M9ZZTcizGV7mngBxF5sAzDdAf2wdWDMw94XlUPFvdxoquBa31/mcCRQCdgrIjsiasOYJ2qHiQidYAJIvKJ7/9goJuqzg9OTERaAw8ABwK/A5+IyMmqepeIHA38XVVzyzznTjdcrbh1cW+s3qSqB4jIoz7Wx3CJ8zJVnSMivYD/w9UTdTtwvKr+KiK7qOo2n6iyVPUqH/t9uOohLhSRXYDJIjImOL/AJuBbEckB2gNLVXWQH75JOefLVAN2pmCSTl1tsC8B15RhsG9VdZmqbsW9sl+wU/8RlwgKjFLVfFWdg0seXXB1x5wrIlNx1ZM3x9WdAzA5OiF4BwHj1FXklge8ivuASmUYq6obVHUlrmro94PzIq7m3EOA//qYn8XVkQ8wARghIn/FfWAmluOAbD/sOFzyaee7faqqq1V1M65yusP8dPuJyAMicriqrquk+TQpyM4UTFgeA74D/hMoy8MfqPgKv4KfT9waaM4PtOdTeDuOrreloMrhq1V1dLCDiPTFVWEdS6wqhytLSfOShvsWQI/oAVX1Mn/mMAiYKiJF+sHF/hdVnVWo0A1XZPmo6mwRORB3X+J+EflEVe8qz4yZ1GdnCiYUqroG97nEiwLFC3CXa8DVwV+rHKM+TUTS/H2GjrjK00YDl4urqhwR6SzuAzfFmQQcKSIt/E3owcAX5YinzPyZ1HwROQ0i39nt7ps7qeokVb0dVz10W2AD7pOuBUYDV/vEiogcEOh2rLjv+dYDTsZdSmsNbFLVV3AfxCn5O76m2rKkYML0TyD4FNJzuB3xZKAX8Y/iizMLt/P+CHdNfgvwPK665u/8Dd9nKeEsWV31wkNw1S9PA75T1WRWvXwWcJGITANm4JIkwEMFN7+B8T62scC+BTeagbtxCfUH39/dgfF+haupdCrwlr8vsh/uvsNUXE2k9yR+9kxVZbWkGlNDiMj5BG5IGxOLnSkYY4yJsDMFY4wxEXamYIwxJsKSgjHGmAhLCsYYYyIsKRhjjImwpGCMMSbi/wOW+zPefL+xZQAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"plot_results(log_dir, 1000)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Playing with One Ship on a Bigger Board"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"9995 timesteps\n",
"Best mean reward: -inf - Last mean reward per episode: -103.43 - Last mean moves per episode: 27.14\n",
"Saving new best model\n",
"19977 timesteps\n",
"Best mean reward: -103.43 - Last mean reward per episode: -102.78 - Last mean moves per episode: 27.13\n",
"Saving new best model\n",
"29985 timesteps\n",
"Best mean reward: -102.78 - Last mean reward per episode: -101.01 - Last mean moves per episode: 27.01\n",
"Saving new best model\n",
"39979 timesteps\n",
"Best mean reward: -101.01 - Last mean reward per episode: -97.92 - Last mean moves per episode: 26.83\n",
"Saving new best model\n",
"49997 timesteps\n",
"Best mean reward: -97.92 - Last mean reward per episode: -94.76 - Last mean moves per episode: 26.63\n",
"Saving new best model\n",
"59982 timesteps\n",
"Best mean reward: -94.76 - Last mean reward per episode: -92.51 - Last mean moves per episode: 26.57\n",
"Saving new best model\n",
"69981 timesteps\n",
"Best mean reward: -92.51 - Last mean reward per episode: -89.68 - Last mean moves per episode: 26.38\n",
"Saving new best model\n",
"79976 timesteps\n",
"Best mean reward: -89.68 - Last mean reward per episode: -87.69 - Last mean moves per episode: 26.25\n",
"Saving new best model\n",
"89986 timesteps\n",
"Best mean reward: -87.69 - Last mean reward per episode: -85.75 - Last mean moves per episode: 26.09\n",
"Saving new best model\n",
"99987 timesteps\n",
"Best mean reward: -85.75 - Last mean reward per episode: -84.27 - Last mean moves per episode: 25.96\n",
"Saving new best model\n",
"109994 timesteps\n",
"Best mean reward: -84.27 - Last mean reward per episode: -83.16 - Last mean moves per episode: 25.82\n",
"Saving new best model\n",
"119999 timesteps\n",
"Best mean reward: -83.16 - Last mean reward per episode: -81.99 - Last mean moves per episode: 25.68\n",
"Saving new best model\n",
"129974 timesteps\n",
"Best mean reward: -81.99 - Last mean reward per episode: -80.97 - Last mean moves per episode: 25.57\n",
"Saving new best model\n",
"139994 timesteps\n",
"Best mean reward: -80.97 - Last mean reward per episode: -79.73 - Last mean moves per episode: 25.43\n",
"Saving new best model\n",
"149972 timesteps\n",
"Best mean reward: -79.73 - Last mean reward per episode: -78.46 - Last mean moves per episode: 25.23\n",
"Saving new best model\n",
"159991 timesteps\n",
"Best mean reward: -78.46 - Last mean reward per episode: -77.81 - Last mean moves per episode: 25.10\n",
"Saving new best model\n",
"169981 timesteps\n",
"Best mean reward: -77.81 - Last mean reward per episode: -77.07 - Last mean moves per episode: 24.98\n",
"Saving new best model\n",
"179991 timesteps\n",
"Best mean reward: -77.07 - Last mean reward per episode: -76.90 - Last mean moves per episode: 24.94\n",
"Saving new best model\n",
"189991 timesteps\n",
"Best mean reward: -76.90 - Last mean reward per episode: -76.30 - Last mean moves per episode: 24.83\n",
"Saving new best model\n",
"199997 timesteps\n",
"Best mean reward: -76.30 - Last mean reward per episode: -75.49 - Last mean moves per episode: 24.68\n",
"Saving new best model\n",
"209986 timesteps\n",
"Best mean reward: -75.49 - Last mean reward per episode: -74.85 - Last mean moves per episode: 24.56\n",
"Saving new best model\n",
"219973 timesteps\n",
"Best mean reward: -74.85 - Last mean reward per episode: -74.48 - Last mean moves per episode: 24.46\n",
"Saving new best model\n",
"229988 timesteps\n",
"Best mean reward: -74.48 - Last mean reward per episode: -74.09 - Last mean moves per episode: 24.38\n",
"Saving new best model\n",
"239987 timesteps\n",
"Best mean reward: -74.09 - Last mean reward per episode: -73.62 - Last mean moves per episode: 24.28\n",
"Saving new best model\n",
"249999 timesteps\n",
"Best mean reward: -73.62 - Last mean reward per episode: -72.37 - Last mean moves per episode: 24.11\n",
"Saving new best model\n",
"259982 timesteps\n",
"Best mean reward: -72.37 - Last mean reward per episode: -71.07 - Last mean moves per episode: 23.95\n",
"Saving new best model\n",
"269984 timesteps\n",
"Best mean reward: -71.07 - Last mean reward per episode: -69.96 - Last mean moves per episode: 23.80\n",
"Saving new best model\n",
"279969 timesteps\n",
"Best mean reward: -69.96 - Last mean reward per episode: -69.34 - Last mean moves per episode: 23.68\n",
"Saving new best model\n",
"289995 timesteps\n",
"Best mean reward: -69.34 - Last mean reward per episode: -68.49 - Last mean moves per episode: 23.53\n",
"Saving new best model\n",
"299980 timesteps\n",
"Best mean reward: -68.49 - Last mean reward per episode: -67.81 - Last mean moves per episode: 23.39\n",
"Saving new best model\n",
"309978 timesteps\n",
"Best mean reward: -67.81 - Last mean reward per episode: -67.73 - Last mean moves per episode: 23.29\n",
"Saving new best model\n",
"319982 timesteps\n",
"Best mean reward: -67.73 - Last mean reward per episode: -67.67 - Last mean moves per episode: 23.22\n",
"Saving new best model\n",
"329998 timesteps\n",
"Best mean reward: -67.67 - Last mean reward per episode: -67.32 - Last mean moves per episode: 23.10\n",
"Saving new best model\n",
"339997 timesteps\n",
"Best mean reward: -67.32 - Last mean reward per episode: -67.06 - Last mean moves per episode: 23.02\n",
"Saving new best model\n",
"349993 timesteps\n",
"Best mean reward: -67.06 - Last mean reward per episode: -66.68 - Last mean moves per episode: 22.95\n",
"Saving new best model\n",
"359991 timesteps\n",
"Best mean reward: -66.68 - Last mean reward per episode: -66.61 - Last mean moves per episode: 22.90\n",
"Saving new best model\n",
"369988 timesteps\n",
"Best mean reward: -66.61 - Last mean reward per episode: -66.90 - Last mean moves per episode: 22.87\n",
"379997 timesteps\n",
"Best mean reward: -66.61 - Last mean reward per episode: -67.15 - Last mean moves per episode: 22.87\n",
"389981 timesteps\n",
"Best mean reward: -66.61 - Last mean reward per episode: -67.01 - Last mean moves per episode: 22.87\n",
"399978 timesteps\n",
"Best mean reward: -66.61 - Last mean reward per episode: -66.97 - Last mean moves per episode: 22.86\n",
"409981 timesteps\n",
"Best mean reward: -66.61 - Last mean reward per episode: -66.45 - Last mean moves per episode: 22.80\n",
"Saving new best model\n",
"419982 timesteps\n",
"Best mean reward: -66.45 - Last mean reward per episode: -66.40 - Last mean moves per episode: 22.82\n",
"Saving new best model\n",
"429993 timesteps\n",
"Best mean reward: -66.40 - Last mean reward per episode: -66.35 - Last mean moves per episode: 22.85\n",
"Saving new best model\n",
"439995 timesteps\n",
"Best mean reward: -66.35 - Last mean reward per episode: -66.29 - Last mean moves per episode: 22.87\n",
"Saving new best model\n",
"449996 timesteps\n",
"Best mean reward: -66.29 - Last mean reward per episode: -65.80 - Last mean moves per episode: 22.85\n",
"Saving new best model\n",
"459982 timesteps\n",
"Best mean reward: -65.80 - Last mean reward per episode: -65.50 - Last mean moves per episode: 22.84\n",
"Saving new best model\n",
"469974 timesteps\n",
"Best mean reward: -65.50 - Last mean reward per episode: -65.45 - Last mean moves per episode: 22.84\n",
"Saving new best model\n",
"479977 timesteps\n",
"Best mean reward: -65.45 - Last mean reward per episode: -65.05 - Last mean moves per episode: 22.84\n",
"Saving new best model\n",
"489998 timesteps\n",
"Best mean reward: -65.05 - Last mean reward per episode: -64.33 - Last mean moves per episode: 22.78\n",
"Saving new best model\n",
"499997 timesteps\n",
"Best mean reward: -64.33 - Last mean reward per episode: -63.77 - Last mean moves per episode: 22.74\n",
"Saving new best model\n",
"509982 timesteps\n",
"Best mean reward: -63.77 - Last mean reward per episode: -63.36 - Last mean moves per episode: 22.70\n",
"Saving new best model\n",
"519986 timesteps\n",
"Best mean reward: -63.36 - Last mean reward per episode: -63.02 - Last mean moves per episode: 22.68\n",
"Saving new best model\n",
"529988 timesteps\n",
"Best mean reward: -63.02 - Last mean reward per episode: -62.68 - Last mean moves per episode: 22.66\n",
"Saving new best model\n",
"539988 timesteps\n",
"Best mean reward: -62.68 - Last mean reward per episode: -62.34 - Last mean moves per episode: 22.66\n",
"Saving new best model\n",
"549970 timesteps\n",
"Best mean reward: -62.34 - Last mean reward per episode: -61.56 - Last mean moves per episode: 22.63\n",
"Saving new best model\n",
"559995 timesteps\n",
"Best mean reward: -61.56 - Last mean reward per episode: -61.38 - Last mean moves per episode: 22.64\n",
"Saving new best model\n",
"569992 timesteps\n",
"Best mean reward: -61.38 - Last mean reward per episode: -61.43 - Last mean moves per episode: 22.64\n",
"579991 timesteps\n",
"Best mean reward: -61.38 - Last mean reward per episode: -61.19 - Last mean moves per episode: 22.63\n",
"Saving new best model\n",
"589978 timesteps\n",
"Best mean reward: -61.19 - Last mean reward per episode: -61.01 - Last mean moves per episode: 22.66\n",
"Saving new best model\n",
"599981 timesteps\n",
"Best mean reward: -61.01 - Last mean reward per episode: -60.01 - Last mean moves per episode: 22.61\n",
"Saving new best model\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"609983 timesteps\n",
"Best mean reward: -60.01 - Last mean reward per episode: -59.90 - Last mean moves per episode: 22.64\n",
"Saving new best model\n",
"619990 timesteps\n",
"Best mean reward: -59.90 - Last mean reward per episode: -59.27 - Last mean moves per episode: 22.57\n",
"Saving new best model\n",
"629988 timesteps\n",
"Best mean reward: -59.27 - Last mean reward per episode: -58.82 - Last mean moves per episode: 22.51\n",
"Saving new best model\n",
"639985 timesteps\n",
"Best mean reward: -58.82 - Last mean reward per episode: -58.70 - Last mean moves per episode: 22.48\n",
"Saving new best model\n",
"650000 timesteps\n",
"Best mean reward: -58.70 - Last mean reward per episode: -58.65 - Last mean moves per episode: 22.45\n",
"Saving new best model\n",
"659969 timesteps\n",
"Best mean reward: -58.65 - Last mean reward per episode: -58.28 - Last mean moves per episode: 22.45\n",
"Saving new best model\n",
"669995 timesteps\n",
"Best mean reward: -58.28 - Last mean reward per episode: -57.94 - Last mean moves per episode: 22.44\n",
"Saving new best model\n",
"679998 timesteps\n",
"Best mean reward: -57.94 - Last mean reward per episode: -57.73 - Last mean moves per episode: 22.45\n",
"Saving new best model\n",
"689971 timesteps\n",
"Best mean reward: -57.73 - Last mean reward per episode: -57.73 - Last mean moves per episode: 22.48\n",
"699998 timesteps\n",
"Best mean reward: -57.73 - Last mean reward per episode: -57.54 - Last mean moves per episode: 22.50\n",
"Saving new best model\n",
"709994 timesteps\n",
"Best mean reward: -57.54 - Last mean reward per episode: -57.05 - Last mean moves per episode: 22.44\n",
"Saving new best model\n",
"719996 timesteps\n",
"Best mean reward: -57.05 - Last mean reward per episode: -57.12 - Last mean moves per episode: 22.45\n",
"729996 timesteps\n",
"Best mean reward: -57.05 - Last mean reward per episode: -56.77 - Last mean moves per episode: 22.45\n",
"Saving new best model\n",
"739984 timesteps\n",
"Best mean reward: -56.77 - Last mean reward per episode: -56.59 - Last mean moves per episode: 22.46\n",
"Saving new best model\n",
"749982 timesteps\n",
"Best mean reward: -56.59 - Last mean reward per episode: -55.95 - Last mean moves per episode: 22.40\n",
"Saving new best model\n",
"759986 timesteps\n",
"Best mean reward: -55.95 - Last mean reward per episode: -55.64 - Last mean moves per episode: 22.40\n",
"Saving new best model\n",
"770000 timesteps\n",
"Best mean reward: -55.64 - Last mean reward per episode: -54.87 - Last mean moves per episode: 22.37\n",
"Saving new best model\n",
"779973 timesteps\n",
"Best mean reward: -54.87 - Last mean reward per episode: -54.60 - Last mean moves per episode: 22.32\n",
"Saving new best model\n",
"789974 timesteps\n",
"Best mean reward: -54.60 - Last mean reward per episode: -54.07 - Last mean moves per episode: 22.31\n",
"Saving new best model\n",
"799972 timesteps\n",
"Best mean reward: -54.07 - Last mean reward per episode: -53.31 - Last mean moves per episode: 22.27\n",
"Saving new best model\n",
"809974 timesteps\n",
"Best mean reward: -53.31 - Last mean reward per episode: -52.76 - Last mean moves per episode: 22.22\n",
"Saving new best model\n",
"819987 timesteps\n",
"Best mean reward: -52.76 - Last mean reward per episode: -52.52 - Last mean moves per episode: 22.22\n",
"Saving new best model\n",
"829980 timesteps\n",
"Best mean reward: -52.52 - Last mean reward per episode: -52.31 - Last mean moves per episode: 22.24\n",
"Saving new best model\n",
"839988 timesteps\n",
"Best mean reward: -52.31 - Last mean reward per episode: -51.79 - Last mean moves per episode: 22.24\n",
"Saving new best model\n",
"849977 timesteps\n",
"Best mean reward: -51.79 - Last mean reward per episode: -52.17 - Last mean moves per episode: 22.33\n",
"859993 timesteps\n",
"Best mean reward: -51.79 - Last mean reward per episode: -50.95 - Last mean moves per episode: 22.26\n",
"Saving new best model\n",
"869982 timesteps\n",
"Best mean reward: -50.95 - Last mean reward per episode: -50.38 - Last mean moves per episode: 22.25\n",
"Saving new best model\n",
"879996 timesteps\n",
"Best mean reward: -50.38 - Last mean reward per episode: -49.62 - Last mean moves per episode: 22.20\n",
"Saving new best model\n",
"889991 timesteps\n",
"Best mean reward: -49.62 - Last mean reward per episode: -49.72 - Last mean moves per episode: 22.23\n",
"899986 timesteps\n",
"Best mean reward: -49.62 - Last mean reward per episode: -49.42 - Last mean moves per episode: 22.22\n",
"Saving new best model\n",
"909995 timesteps\n",
"Best mean reward: -49.42 - Last mean reward per episode: -48.53 - Last mean moves per episode: 22.17\n",
"Saving new best model\n",
"919999 timesteps\n",
"Best mean reward: -48.53 - Last mean reward per episode: -47.84 - Last mean moves per episode: 22.11\n",
"Saving new best model\n",
"929984 timesteps\n",
"Best mean reward: -47.84 - Last mean reward per episode: -47.19 - Last mean moves per episode: 22.10\n",
"Saving new best model\n",
"939999 timesteps\n",
"Best mean reward: -47.19 - Last mean reward per episode: -46.65 - Last mean moves per episode: 22.09\n",
"Saving new best model\n",
"949986 timesteps\n",
"Best mean reward: -46.65 - Last mean reward per episode: -46.05 - Last mean moves per episode: 22.05\n",
"Saving new best model\n",
"959977 timesteps\n",
"Best mean reward: -46.05 - Last mean reward per episode: -45.21 - Last mean moves per episode: 22.02\n",
"Saving new best model\n",
"969999 timesteps\n",
"Best mean reward: -45.21 - Last mean reward per episode: -44.90 - Last mean moves per episode: 22.00\n",
"Saving new best model\n",
"979999 timesteps\n",
"Best mean reward: -44.90 - Last mean reward per episode: -44.32 - Last mean moves per episode: 21.95\n",
"Saving new best model\n",
"989995 timesteps\n",
"Best mean reward: -44.32 - Last mean reward per episode: -44.12 - Last mean moves per episode: 21.96\n",
"Saving new best model\n",
"999986 timesteps\n",
"Best mean reward: -44.12 - Last mean reward per episode: -43.76 - Last mean moves per episode: 21.99\n",
"Saving new best model\n",
"1009987 timesteps\n",
"Best mean reward: -43.76 - Last mean reward per episode: -43.26 - Last mean moves per episode: 21.96\n",
"Saving new best model\n",
"1019993 timesteps\n",
"Best mean reward: -43.26 - Last mean reward per episode: -42.69 - Last mean moves per episode: 21.92\n",
"Saving new best model\n",
"1029996 timesteps\n",
"Best mean reward: -42.69 - Last mean reward per episode: -41.97 - Last mean moves per episode: 21.86\n",
"Saving new best model\n",
"1039990 timesteps\n",
"Best mean reward: -41.97 - Last mean reward per episode: -40.98 - Last mean moves per episode: 21.75\n",
"Saving new best model\n",
"1049988 timesteps\n",
"Best mean reward: -40.98 - Last mean reward per episode: -40.19 - Last mean moves per episode: 21.65\n",
"Saving new best model\n",
"1059987 timesteps\n",
"Best mean reward: -40.19 - Last mean reward per episode: -39.28 - Last mean moves per episode: 21.58\n",
"Saving new best model\n",
"1069997 timesteps\n",
"Best mean reward: -39.28 - Last mean reward per episode: -38.45 - Last mean moves per episode: 21.48\n",
"Saving new best model\n",
"1079989 timesteps\n",
"Best mean reward: -38.45 - Last mean reward per episode: -38.14 - Last mean moves per episode: 21.45\n",
"Saving new best model\n",
"1089969 timesteps\n",
"Best mean reward: -38.14 - Last mean reward per episode: -37.69 - Last mean moves per episode: 21.40\n",
"Saving new best model\n",
"1099991 timesteps\n",
"Best mean reward: -37.69 - Last mean reward per episode: -37.41 - Last mean moves per episode: 21.35\n",
"Saving new best model\n",
"1109996 timesteps\n",
"Best mean reward: -37.41 - Last mean reward per episode: -36.95 - Last mean moves per episode: 21.28\n",
"Saving new best model\n",
"1119992 timesteps\n",
"Best mean reward: -36.95 - Last mean reward per episode: -36.35 - Last mean moves per episode: 21.21\n",
"Saving new best model\n",
"1129986 timesteps\n",
"Best mean reward: -36.35 - Last mean reward per episode: -35.38 - Last mean moves per episode: 21.10\n",
"Saving new best model\n",
"1139987 timesteps\n",
"Best mean reward: -35.38 - Last mean reward per episode: -35.54 - Last mean moves per episode: 21.10\n",
"1149984 timesteps\n",
"Best mean reward: -35.38 - Last mean reward per episode: -35.07 - Last mean moves per episode: 21.02\n",
"Saving new best model\n",
"1159993 timesteps\n",
"Best mean reward: -35.07 - Last mean reward per episode: -34.78 - Last mean moves per episode: 20.97\n",
"Saving new best model\n",
"1169983 timesteps\n",
"Best mean reward: -34.78 - Last mean reward per episode: -34.31 - Last mean moves per episode: 20.89\n",
"Saving new best model\n",
"1179987 timesteps\n",
"Best mean reward: -34.31 - Last mean reward per episode: -34.09 - Last mean moves per episode: 20.86\n",
"Saving new best model\n",
"1189995 timesteps\n",
"Best mean reward: -34.09 - Last mean reward per episode: -34.35 - Last mean moves per episode: 20.83\n",
"1199996 timesteps\n",
"Best mean reward: -34.09 - Last mean reward per episode: -33.12 - Last mean moves per episode: 20.64\n",
"Saving new best model\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"1209997 timesteps\n",
"Best mean reward: -33.12 - Last mean reward per episode: -32.45 - Last mean moves per episode: 20.54\n",
"Saving new best model\n",
"1219982 timesteps\n",
"Best mean reward: -32.45 - Last mean reward per episode: -31.67 - Last mean moves per episode: 20.41\n",
"Saving new best model\n",
"1229986 timesteps\n",
"Best mean reward: -31.67 - Last mean reward per episode: -31.61 - Last mean moves per episode: 20.38\n",
"Saving new best model\n",
"1239987 timesteps\n",
"Best mean reward: -31.61 - Last mean reward per episode: -31.03 - Last mean moves per episode: 20.28\n",
"Saving new best model\n",
"1249987 timesteps\n",
"Best mean reward: -31.03 - Last mean reward per episode: -30.58 - Last mean moves per episode: 20.18\n",
"Saving new best model\n",
"1259993 timesteps\n",
"Best mean reward: -30.58 - Last mean reward per episode: -30.15 - Last mean moves per episode: 20.07\n",
"Saving new best model\n",
"1269999 timesteps\n",
"Best mean reward: -30.15 - Last mean reward per episode: -29.86 - Last mean moves per episode: 19.99\n",
"Saving new best model\n",
"1279978 timesteps\n",
"Best mean reward: -29.86 - Last mean reward per episode: -29.30 - Last mean moves per episode: 19.91\n",
"Saving new best model\n",
"1289999 timesteps\n",
"Best mean reward: -29.30 - Last mean reward per episode: -29.38 - Last mean moves per episode: 19.85\n",
"1299975 timesteps\n",
"Best mean reward: -29.30 - Last mean reward per episode: -29.58 - Last mean moves per episode: 19.82\n",
"1309981 timesteps\n",
"Best mean reward: -29.30 - Last mean reward per episode: -29.27 - Last mean moves per episode: 19.73\n",
"Saving new best model\n",
"1319997 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -30.11 - Last mean moves per episode: 19.73\n",
"1329998 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -30.51 - Last mean moves per episode: 19.72\n",
"1339989 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -30.54 - Last mean moves per episode: 19.66\n",
"1349993 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -30.66 - Last mean moves per episode: 19.59\n",
"1359998 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -31.14 - Last mean moves per episode: 19.59\n",
"1369999 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -31.52 - Last mean moves per episode: 19.57\n",
"1379983 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -30.43 - Last mean moves per episode: 19.47\n",
"1389994 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -30.40 - Last mean moves per episode: 19.46\n",
"1400000 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -30.93 - Last mean moves per episode: 19.46\n",
"1410000 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -31.39 - Last mean moves per episode: 19.47\n",
"1419992 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -31.39 - Last mean moves per episode: 19.43\n",
"1429996 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -31.85 - Last mean moves per episode: 19.48\n",
"1439990 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -32.04 - Last mean moves per episode: 19.49\n",
"1449994 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -32.73 - Last mean moves per episode: 19.58\n",
"1459985 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -32.92 - Last mean moves per episode: 19.64\n",
"1469976 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -33.26 - Last mean moves per episode: 19.67\n",
"1479997 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -33.41 - Last mean moves per episode: 19.65\n",
"1489981 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -33.91 - Last mean moves per episode: 19.70\n",
"1499988 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -33.51 - Last mean moves per episode: 19.69\n",
"1510000 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -33.91 - Last mean moves per episode: 19.74\n",
"1519995 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -34.15 - Last mean moves per episode: 19.77\n",
"1529998 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -34.16 - Last mean moves per episode: 19.78\n",
"1539988 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -34.57 - Last mean moves per episode: 19.83\n",
"1549976 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -34.84 - Last mean moves per episode: 19.89\n",
"1559999 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -34.65 - Last mean moves per episode: 19.87\n",
"1569981 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -35.64 - Last mean moves per episode: 19.95\n",
"1579991 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -36.81 - Last mean moves per episode: 20.03\n",
"1589995 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -37.79 - Last mean moves per episode: 20.10\n",
"1599999 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -38.56 - Last mean moves per episode: 20.17\n",
"1609979 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -39.42 - Last mean moves per episode: 20.26\n",
"1619998 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -39.83 - Last mean moves per episode: 20.33\n",
"1629971 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -40.46 - Last mean moves per episode: 20.37\n",
"1639986 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -40.37 - Last mean moves per episode: 20.39\n",
"1649987 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -40.76 - Last mean moves per episode: 20.40\n",
"1659998 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -40.50 - Last mean moves per episode: 20.38\n",
"1669998 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -40.82 - Last mean moves per episode: 20.41\n",
"1679993 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -40.01 - Last mean moves per episode: 20.33\n",
"1689999 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -39.16 - Last mean moves per episode: 20.25\n",
"1699991 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -38.91 - Last mean moves per episode: 20.21\n",
"1709998 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -38.43 - Last mean moves per episode: 20.16\n",
"1719990 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -37.10 - Last mean moves per episode: 20.05\n",
"1729987 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -36.15 - Last mean moves per episode: 19.96\n",
"1739994 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -35.21 - Last mean moves per episode: 19.89\n",
"1749994 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -33.31 - Last mean moves per episode: 19.71\n",
"1759999 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -32.08 - Last mean moves per episode: 19.61\n",
"1769997 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -30.44 - Last mean moves per episode: 19.48\n",
"1779996 timesteps\n",
"Best mean reward: -29.27 - Last mean reward per episode: -28.92 - Last mean moves per episode: 19.34\n",
"Saving new best model\n",
"1789984 timesteps\n",
"Best mean reward: -28.92 - Last mean reward per episode: -27.63 - Last mean moves per episode: 19.22\n",
"Saving new best model\n",
"1799983 timesteps\n",
"Best mean reward: -27.63 - Last mean reward per episode: -25.92 - Last mean moves per episode: 19.03\n",
"Saving new best model\n",
"1809980 timesteps\n",
"Best mean reward: -25.92 - Last mean reward per episode: -24.89 - Last mean moves per episode: 18.89\n",
"Saving new best model\n",
"1819995 timesteps\n",
"Best mean reward: -24.89 - Last mean reward per episode: -23.37 - Last mean moves per episode: 18.66\n",
"Saving new best model\n",
"1830000 timesteps\n",
"Best mean reward: -23.37 - Last mean reward per episode: -22.23 - Last mean moves per episode: 18.47\n",
"Saving new best model\n",
"1840000 timesteps\n",
"Best mean reward: -22.23 - Last mean reward per episode: -21.40 - Last mean moves per episode: 18.34\n",
"Saving new best model\n",
"1849996 timesteps\n",
"Best mean reward: -21.40 - Last mean reward per episode: -20.54 - Last mean moves per episode: 18.21\n",
"Saving new best model\n",
"1860000 timesteps\n",
"Best mean reward: -20.54 - Last mean reward per episode: -19.86 - Last mean moves per episode: 18.11\n",
"Saving new best model\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"1869995 timesteps\n",
"Best mean reward: -19.86 - Last mean reward per episode: -19.34 - Last mean moves per episode: 18.01\n",
"Saving new best model\n",
"1879999 timesteps\n",
"Best mean reward: -19.34 - Last mean reward per episode: -18.20 - Last mean moves per episode: 17.86\n",
"Saving new best model\n",
"1889993 timesteps\n",
"Best mean reward: -18.20 - Last mean reward per episode: -17.04 - Last mean moves per episode: 17.72\n",
"Saving new best model\n",
"1899991 timesteps\n",
"Best mean reward: -17.04 - Last mean reward per episode: -16.06 - Last mean moves per episode: 17.56\n",
"Saving new best model\n",
"1909985 timesteps\n",
"Best mean reward: -16.06 - Last mean reward per episode: -14.49 - Last mean moves per episode: 17.35\n",
"Saving new best model\n",
"1919994 timesteps\n",
"Best mean reward: -14.49 - Last mean reward per episode: -14.02 - Last mean moves per episode: 17.24\n",
"Saving new best model\n",
"1929979 timesteps\n",
"Best mean reward: -14.02 - Last mean reward per episode: -13.38 - Last mean moves per episode: 17.13\n",
"Saving new best model\n",
"1939971 timesteps\n",
"Best mean reward: -13.38 - Last mean reward per episode: -12.44 - Last mean moves per episode: 16.95\n",
"Saving new best model\n",
"1949999 timesteps\n",
"Best mean reward: -12.44 - Last mean reward per episode: -11.17 - Last mean moves per episode: 16.77\n",
"Saving new best model\n",
"1959985 timesteps\n",
"Best mean reward: -11.17 - Last mean reward per episode: -10.49 - Last mean moves per episode: 16.65\n",
"Saving new best model\n",
"1969997 timesteps\n",
"Best mean reward: -10.49 - Last mean reward per episode: -9.61 - Last mean moves per episode: 16.54\n",
"Saving new best model\n",
"1979997 timesteps\n",
"Best mean reward: -9.61 - Last mean reward per episode: -9.30 - Last mean moves per episode: 16.51\n",
"Saving new best model\n",
"1989986 timesteps\n",
"Best mean reward: -9.30 - Last mean reward per episode: -8.21 - Last mean moves per episode: 16.35\n",
"Saving new best model\n",
"1999996 timesteps\n",
"Best mean reward: -8.21 - Last mean reward per episode: -7.90 - Last mean moves per episode: 16.27\n",
"Saving new best model\n",
"2009980 timesteps\n",
"Best mean reward: -7.90 - Last mean reward per episode: -6.57 - Last mean moves per episode: 16.05\n",
"Saving new best model\n",
"2019987 timesteps\n",
"Best mean reward: -6.57 - Last mean reward per episode: -5.78 - Last mean moves per episode: 15.90\n",
"Saving new best model\n",
"2029982 timesteps\n",
"Best mean reward: -5.78 - Last mean reward per episode: -4.32 - Last mean moves per episode: 15.69\n",
"Saving new best model\n",
"2039992 timesteps\n",
"Best mean reward: -4.32 - Last mean reward per episode: -3.52 - Last mean moves per episode: 15.52\n",
"Saving new best model\n",
"2049988 timesteps\n",
"Best mean reward: -3.52 - Last mean reward per episode: -2.98 - Last mean moves per episode: 15.40\n",
"Saving new best model\n",
"2059989 timesteps\n",
"Best mean reward: -2.98 - Last mean reward per episode: -2.59 - Last mean moves per episode: 15.27\n",
"Saving new best model\n",
"2069993 timesteps\n",
"Best mean reward: -2.59 - Last mean reward per episode: -2.01 - Last mean moves per episode: 15.10\n",
"Saving new best model\n",
"2079979 timesteps\n",
"Best mean reward: -2.01 - Last mean reward per episode: -1.47 - Last mean moves per episode: 14.93\n",
"Saving new best model\n",
"2089998 timesteps\n",
"Best mean reward: -1.47 - Last mean reward per episode: -1.01 - Last mean moves per episode: 14.76\n",
"Saving new best model\n",
"2099992 timesteps\n",
"Best mean reward: -1.01 - Last mean reward per episode: -0.68 - Last mean moves per episode: 14.63\n",
"Saving new best model\n",
"2109982 timesteps\n",
"Best mean reward: -0.68 - Last mean reward per episode: -0.37 - Last mean moves per episode: 14.51\n",
"Saving new best model\n",
"2119995 timesteps\n",
"Best mean reward: -0.37 - Last mean reward per episode: -0.07 - Last mean moves per episode: 14.38\n",
"Saving new best model\n",
"2129988 timesteps\n",
"Best mean reward: -0.07 - Last mean reward per episode: 0.44 - Last mean moves per episode: 14.26\n",
"Saving new best model\n",
"2139996 timesteps\n",
"Best mean reward: 0.44 - Last mean reward per episode: 1.26 - Last mean moves per episode: 14.15\n",
"Saving new best model\n",
"2150000 timesteps\n",
"Best mean reward: 1.26 - Last mean reward per episode: 1.45 - Last mean moves per episode: 14.10\n",
"Saving new best model\n",
"2159984 timesteps\n",
"Best mean reward: 1.45 - Last mean reward per episode: 1.47 - Last mean moves per episode: 14.08\n",
"Saving new best model\n",
"2170000 timesteps\n",
"Best mean reward: 1.47 - Last mean reward per episode: 1.38 - Last mean moves per episode: 14.06\n",
"2179977 timesteps\n",
"Best mean reward: 1.47 - Last mean reward per episode: 1.48 - Last mean moves per episode: 14.00\n",
"Saving new best model\n",
"2189995 timesteps\n",
"Best mean reward: 1.48 - Last mean reward per episode: 1.72 - Last mean moves per episode: 13.96\n",
"Saving new best model\n",
"2199993 timesteps\n",
"Best mean reward: 1.72 - Last mean reward per episode: 1.99 - Last mean moves per episode: 13.92\n",
"Saving new best model\n",
"2209992 timesteps\n",
"Best mean reward: 1.99 - Last mean reward per episode: 1.82 - Last mean moves per episode: 13.95\n",
"2219996 timesteps\n",
"Best mean reward: 1.99 - Last mean reward per episode: 1.42 - Last mean moves per episode: 14.01\n",
"2229995 timesteps\n",
"Best mean reward: 1.99 - Last mean reward per episode: 1.76 - Last mean moves per episode: 14.02\n",
"2239998 timesteps\n",
"Best mean reward: 1.99 - Last mean reward per episode: 2.69 - Last mean moves per episode: 13.93\n",
"Saving new best model\n",
"2249986 timesteps\n",
"Best mean reward: 2.69 - Last mean reward per episode: 3.67 - Last mean moves per episode: 13.84\n",
"Saving new best model\n",
"2259988 timesteps\n",
"Best mean reward: 3.67 - Last mean reward per episode: 4.12 - Last mean moves per episode: 13.79\n",
"Saving new best model\n",
"2269979 timesteps\n",
"Best mean reward: 4.12 - Last mean reward per episode: 4.69 - Last mean moves per episode: 13.70\n",
"Saving new best model\n",
"2279996 timesteps\n",
"Best mean reward: 4.69 - Last mean reward per episode: 5.13 - Last mean moves per episode: 13.63\n",
"Saving new best model\n",
"2289997 timesteps\n",
"Best mean reward: 5.13 - Last mean reward per episode: 5.71 - Last mean moves per episode: 13.55\n",
"Saving new best model\n",
"2299984 timesteps\n",
"Best mean reward: 5.71 - Last mean reward per episode: 6.00 - Last mean moves per episode: 13.45\n",
"Saving new best model\n",
"2309992 timesteps\n",
"Best mean reward: 6.00 - Last mean reward per episode: 6.21 - Last mean moves per episode: 13.40\n",
"Saving new best model\n",
"2319985 timesteps\n",
"Best mean reward: 6.21 - Last mean reward per episode: 6.56 - Last mean moves per episode: 13.30\n",
"Saving new best model\n",
"2329985 timesteps\n",
"Best mean reward: 6.56 - Last mean reward per episode: 6.69 - Last mean moves per episode: 13.22\n",
"Saving new best model\n",
"2339975 timesteps\n",
"Best mean reward: 6.69 - Last mean reward per episode: 7.55 - Last mean moves per episode: 13.08\n",
"Saving new best model\n",
"2349985 timesteps\n",
"Best mean reward: 7.55 - Last mean reward per episode: 8.52 - Last mean moves per episode: 12.92\n",
"Saving new best model\n",
"2359995 timesteps\n",
"Best mean reward: 8.52 - Last mean reward per episode: 8.72 - Last mean moves per episode: 12.84\n",
"Saving new best model\n",
"2369999 timesteps\n",
"Best mean reward: 8.72 - Last mean reward per episode: 8.69 - Last mean moves per episode: 12.80\n",
"2379994 timesteps\n",
"Best mean reward: 8.72 - Last mean reward per episode: 8.91 - Last mean moves per episode: 12.70\n",
"Saving new best model\n",
"2389978 timesteps\n",
"Best mean reward: 8.91 - Last mean reward per episode: 9.01 - Last mean moves per episode: 12.64\n",
"Saving new best model\n",
"2399994 timesteps\n",
"Best mean reward: 9.01 - Last mean reward per episode: 9.63 - Last mean moves per episode: 12.52\n",
"Saving new best model\n",
"2409998 timesteps\n",
"Best mean reward: 9.63 - Last mean reward per episode: 9.99 - Last mean moves per episode: 12.47\n",
"Saving new best model\n",
"2419991 timesteps\n",
"Best mean reward: 9.99 - Last mean reward per episode: 10.21 - Last mean moves per episode: 12.46\n",
"Saving new best model\n",
"2429981 timesteps\n",
"Best mean reward: 10.21 - Last mean reward per episode: 10.92 - Last mean moves per episode: 12.40\n",
"Saving new best model\n",
"2439998 timesteps\n",
"Best mean reward: 10.92 - Last mean reward per episode: 11.50 - Last mean moves per episode: 12.37\n",
"Saving new best model\n",
"2449995 timesteps\n",
"Best mean reward: 11.50 - Last mean reward per episode: 11.91 - Last mean moves per episode: 12.31\n",
"Saving new best model\n",
"2459992 timesteps\n",
"Best mean reward: 11.91 - Last mean reward per episode: 12.38 - Last mean moves per episode: 12.29\n",
"Saving new best model\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"2469994 timesteps\n",
"Best mean reward: 12.38 - Last mean reward per episode: 12.32 - Last mean moves per episode: 12.30\n",
"2479998 timesteps\n",
"Best mean reward: 12.38 - Last mean reward per episode: 12.39 - Last mean moves per episode: 12.31\n",
"Saving new best model\n",
"2490000 timesteps\n",
"Best mean reward: 12.39 - Last mean reward per episode: 12.48 - Last mean moves per episode: 12.31\n",
"Saving new best model\n",
"2499978 timesteps\n",
"Best mean reward: 12.48 - Last mean reward per episode: 12.70 - Last mean moves per episode: 12.35\n",
"Saving new best model\n",
"2509989 timesteps\n",
"Best mean reward: 12.70 - Last mean reward per episode: 13.20 - Last mean moves per episode: 12.28\n",
"Saving new best model\n",
"2520000 timesteps\n",
"Best mean reward: 13.20 - Last mean reward per episode: 13.05 - Last mean moves per episode: 12.31\n",
"2529983 timesteps\n",
"Best mean reward: 13.20 - Last mean reward per episode: 13.61 - Last mean moves per episode: 12.23\n",
"Saving new best model\n",
"2539996 timesteps\n",
"Best mean reward: 13.61 - Last mean reward per episode: 13.64 - Last mean moves per episode: 12.21\n",
"Saving new best model\n",
"2549976 timesteps\n",
"Best mean reward: 13.64 - Last mean reward per episode: 12.57 - Last mean moves per episode: 12.33\n",
"2560000 timesteps\n",
"Best mean reward: 13.64 - Last mean reward per episode: 12.93 - Last mean moves per episode: 12.26\n",
"2569992 timesteps\n",
"Best mean reward: 13.64 - Last mean reward per episode: 13.05 - Last mean moves per episode: 12.24\n",
"2579999 timesteps\n",
"Best mean reward: 13.64 - Last mean reward per episode: 12.87 - Last mean moves per episode: 12.25\n",
"2589999 timesteps\n",
"Best mean reward: 13.64 - Last mean reward per episode: 13.28 - Last mean moves per episode: 12.15\n",
"2599974 timesteps\n",
"Best mean reward: 13.64 - Last mean reward per episode: 13.88 - Last mean moves per episode: 12.08\n",
"Saving new best model\n",
"2609971 timesteps\n",
"Best mean reward: 13.88 - Last mean reward per episode: 14.43 - Last mean moves per episode: 11.98\n",
"Saving new best model\n",
"2619998 timesteps\n",
"Best mean reward: 14.43 - Last mean reward per episode: 14.44 - Last mean moves per episode: 11.96\n",
"Saving new best model\n",
"2629995 timesteps\n",
"Best mean reward: 14.44 - Last mean reward per episode: 14.69 - Last mean moves per episode: 11.93\n",
"Saving new best model\n",
"2639994 timesteps\n",
"Best mean reward: 14.69 - Last mean reward per episode: 15.04 - Last mean moves per episode: 11.89\n",
"Saving new best model\n",
"2649998 timesteps\n",
"Best mean reward: 15.04 - Last mean reward per episode: 15.16 - Last mean moves per episode: 11.88\n",
"Saving new best model\n",
"2659995 timesteps\n",
"Best mean reward: 15.16 - Last mean reward per episode: 15.90 - Last mean moves per episode: 11.80\n",
"Saving new best model\n",
"2669994 timesteps\n",
"Best mean reward: 15.90 - Last mean reward per episode: 16.74 - Last mean moves per episode: 11.70\n",
"Saving new best model\n",
"2679989 timesteps\n",
"Best mean reward: 16.74 - Last mean reward per episode: 17.04 - Last mean moves per episode: 11.66\n",
"Saving new best model\n",
"2689970 timesteps\n",
"Best mean reward: 17.04 - Last mean reward per episode: 17.34 - Last mean moves per episode: 11.66\n",
"Saving new best model\n",
"2699991 timesteps\n",
"Best mean reward: 17.34 - Last mean reward per episode: 17.62 - Last mean moves per episode: 11.62\n",
"Saving new best model\n",
"2709999 timesteps\n",
"Best mean reward: 17.62 - Last mean reward per episode: 17.62 - Last mean moves per episode: 11.63\n",
"2719990 timesteps\n",
"Best mean reward: 17.62 - Last mean reward per episode: 17.97 - Last mean moves per episode: 11.59\n",
"Saving new best model\n",
"2729993 timesteps\n",
"Best mean reward: 17.97 - Last mean reward per episode: 18.29 - Last mean moves per episode: 11.54\n",
"Saving new best model\n",
"2739982 timesteps\n",
"Best mean reward: 18.29 - Last mean reward per episode: 18.29 - Last mean moves per episode: 11.54\n",
"Saving new best model\n",
"2749996 timesteps\n",
"Best mean reward: 18.29 - Last mean reward per episode: 18.09 - Last mean moves per episode: 11.55\n",
"2759999 timesteps\n",
"Best mean reward: 18.29 - Last mean reward per episode: 18.44 - Last mean moves per episode: 11.47\n",
"Saving new best model\n",
"2769969 timesteps\n",
"Best mean reward: 18.44 - Last mean reward per episode: 18.48 - Last mean moves per episode: 11.43\n",
"Saving new best model\n",
"2779987 timesteps\n",
"Best mean reward: 18.48 - Last mean reward per episode: 18.50 - Last mean moves per episode: 11.42\n",
"Saving new best model\n",
"2789997 timesteps\n",
"Best mean reward: 18.50 - Last mean reward per episode: 18.78 - Last mean moves per episode: 11.37\n",
"Saving new best model\n",
"2799976 timesteps\n",
"Best mean reward: 18.78 - Last mean reward per episode: 18.78 - Last mean moves per episode: 11.35\n",
"Saving new best model\n",
"2809999 timesteps\n",
"Best mean reward: 18.78 - Last mean reward per episode: 18.76 - Last mean moves per episode: 11.35\n",
"2819994 timesteps\n",
"Best mean reward: 18.78 - Last mean reward per episode: 19.16 - Last mean moves per episode: 11.32\n",
"Saving new best model\n",
"2829993 timesteps\n",
"Best mean reward: 19.16 - Last mean reward per episode: 19.29 - Last mean moves per episode: 11.29\n",
"Saving new best model\n",
"2839988 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.80 - Last mean moves per episode: 11.32\n",
"2849998 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 19.01 - Last mean moves per episode: 11.28\n",
"2859993 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 19.07 - Last mean moves per episode: 11.28\n",
"2869997 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 19.21 - Last mean moves per episode: 11.26\n",
"2879984 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.66 - Last mean moves per episode: 11.33\n",
"2889971 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.86 - Last mean moves per episode: 11.29\n",
"2899996 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.91 - Last mean moves per episode: 11.27\n",
"2909981 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.69 - Last mean moves per episode: 11.28\n",
"2919990 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.91 - Last mean moves per episode: 11.23\n",
"2929992 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.77 - Last mean moves per episode: 11.20\n",
"2939998 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.39 - Last mean moves per episode: 11.24\n",
"2949999 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.61 - Last mean moves per episode: 11.21\n",
"2959993 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.26 - Last mean moves per episode: 11.21\n",
"2969977 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.24 - Last mean moves per episode: 11.18\n",
"2979987 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 17.96 - Last mean moves per episode: 11.19\n",
"2989996 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.28 - Last mean moves per episode: 11.15\n",
"2999997 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.28 - Last mean moves per episode: 11.15\n",
"3009988 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.29 - Last mean moves per episode: 11.14\n",
"3019996 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.36 - Last mean moves per episode: 11.12\n",
"3029978 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 17.92 - Last mean moves per episode: 11.17\n",
"3039998 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 17.49 - Last mean moves per episode: 11.23\n",
"3049997 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 17.82 - Last mean moves per episode: 11.18\n",
"3059999 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 17.92 - Last mean moves per episode: 11.16\n",
"3069997 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.61 - Last mean moves per episode: 11.09\n",
"3079992 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 18.66 - Last mean moves per episode: 11.08\n",
"3089987 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 19.25 - Last mean moves per episode: 11.00\n",
"3099999 timesteps\n",
"Best mean reward: 19.29 - Last mean reward per episode: 19.62 - Last mean moves per episode: 10.93\n",
"Saving new best model\n",
"3109996 timesteps\n",
"Best mean reward: 19.62 - Last mean reward per episode: 20.10 - Last mean moves per episode: 10.86\n",
"Saving new best model\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"3119975 timesteps\n",
"Best mean reward: 20.10 - Last mean reward per episode: 20.25 - Last mean moves per episode: 10.85\n",
"Saving new best model\n",
"3129971 timesteps\n",
"Best mean reward: 20.25 - Last mean reward per episode: 20.36 - Last mean moves per episode: 10.83\n",
"Saving new best model\n",
"3139984 timesteps\n",
"Best mean reward: 20.36 - Last mean reward per episode: 20.90 - Last mean moves per episode: 10.78\n",
"Saving new best model\n",
"3149999 timesteps\n",
"Best mean reward: 20.90 - Last mean reward per episode: 21.19 - Last mean moves per episode: 10.77\n",
"Saving new best model\n",
"3159997 timesteps\n",
"Best mean reward: 21.19 - Last mean reward per episode: 21.32 - Last mean moves per episode: 10.76\n",
"Saving new best model\n",
"3169999 timesteps\n",
"Best mean reward: 21.32 - Last mean reward per episode: 21.84 - Last mean moves per episode: 10.70\n",
"Saving new best model\n",
"3179996 timesteps\n",
"Best mean reward: 21.84 - Last mean reward per episode: 21.98 - Last mean moves per episode: 10.70\n",
"Saving new best model\n",
"3189977 timesteps\n",
"Best mean reward: 21.98 - Last mean reward per episode: 22.36 - Last mean moves per episode: 10.68\n",
"Saving new best model\n",
"3199994 timesteps\n",
"Best mean reward: 22.36 - Last mean reward per episode: 22.71 - Last mean moves per episode: 10.65\n",
"Saving new best model\n",
"3209992 timesteps\n",
"Best mean reward: 22.71 - Last mean reward per episode: 22.71 - Last mean moves per episode: 10.68\n",
"3220000 timesteps\n",
"Best mean reward: 22.71 - Last mean reward per episode: 23.15 - Last mean moves per episode: 10.61\n",
"Saving new best model\n",
"3229990 timesteps\n",
"Best mean reward: 23.15 - Last mean reward per episode: 23.27 - Last mean moves per episode: 10.58\n",
"Saving new best model\n",
"3239997 timesteps\n",
"Best mean reward: 23.27 - Last mean reward per episode: 23.76 - Last mean moves per episode: 10.53\n",
"Saving new best model\n",
"3249998 timesteps\n",
"Best mean reward: 23.76 - Last mean reward per episode: 24.06 - Last mean moves per episode: 10.50\n",
"Saving new best model\n",
"3259993 timesteps\n",
"Best mean reward: 24.06 - Last mean reward per episode: 24.15 - Last mean moves per episode: 10.48\n",
"Saving new best model\n",
"3269988 timesteps\n",
"Best mean reward: 24.15 - Last mean reward per episode: 24.21 - Last mean moves per episode: 10.48\n",
"Saving new best model\n",
"3279995 timesteps\n",
"Best mean reward: 24.21 - Last mean reward per episode: 24.39 - Last mean moves per episode: 10.48\n",
"Saving new best model\n",
"3289993 timesteps\n",
"Best mean reward: 24.39 - Last mean reward per episode: 24.05 - Last mean moves per episode: 10.53\n",
"3299996 timesteps\n",
"Best mean reward: 24.39 - Last mean reward per episode: 24.61 - Last mean moves per episode: 10.47\n",
"Saving new best model\n",
"3310000 timesteps\n",
"Best mean reward: 24.61 - Last mean reward per episode: 24.47 - Last mean moves per episode: 10.51\n",
"3319992 timesteps\n",
"Best mean reward: 24.61 - Last mean reward per episode: 24.43 - Last mean moves per episode: 10.54\n",
"3329970 timesteps\n",
"Best mean reward: 24.61 - Last mean reward per episode: 24.23 - Last mean moves per episode: 10.57\n",
"3339997 timesteps\n",
"Best mean reward: 24.61 - Last mean reward per episode: 24.16 - Last mean moves per episode: 10.59\n",
"3349996 timesteps\n",
"Best mean reward: 24.61 - Last mean reward per episode: 24.40 - Last mean moves per episode: 10.56\n",
"3359987 timesteps\n",
"Best mean reward: 24.61 - Last mean reward per episode: 24.47 - Last mean moves per episode: 10.57\n",
"3369971 timesteps\n",
"Best mean reward: 24.61 - Last mean reward per episode: 24.73 - Last mean moves per episode: 10.55\n",
"Saving new best model\n",
"3379995 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.51 - Last mean moves per episode: 10.58\n",
"3389997 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.64 - Last mean moves per episode: 10.57\n",
"3399996 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.70 - Last mean moves per episode: 10.59\n",
"3410000 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.30 - Last mean moves per episode: 10.66\n",
"3419991 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.29 - Last mean moves per episode: 10.68\n",
"3429998 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.41 - Last mean moves per episode: 10.69\n",
"3439994 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.27 - Last mean moves per episode: 10.71\n",
"3449974 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.25 - Last mean moves per episode: 10.75\n",
"3459993 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.21 - Last mean moves per episode: 10.76\n",
"3470000 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 23.94 - Last mean moves per episode: 10.83\n",
"3479991 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 23.89 - Last mean moves per episode: 10.87\n",
"3489995 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 23.81 - Last mean moves per episode: 10.89\n",
"3499992 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 23.70 - Last mean moves per episode: 10.91\n",
"3509998 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 23.79 - Last mean moves per episode: 10.90\n",
"3519989 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 23.97 - Last mean moves per episode: 10.88\n",
"3529998 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.12 - Last mean moves per episode: 10.86\n",
"3539990 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.19 - Last mean moves per episode: 10.88\n",
"3549991 timesteps\n",
"Best mean reward: 24.73 - Last mean reward per episode: 24.86 - Last mean moves per episode: 10.81\n",
"Saving new best model\n",
"3559992 timesteps\n",
"Best mean reward: 24.86 - Last mean reward per episode: 25.08 - Last mean moves per episode: 10.82\n",
"Saving new best model\n",
"3569993 timesteps\n",
"Best mean reward: 25.08 - Last mean reward per episode: 24.92 - Last mean moves per episode: 10.85\n",
"3579983 timesteps\n",
"Best mean reward: 25.08 - Last mean reward per episode: 25.41 - Last mean moves per episode: 10.78\n",
"Saving new best model\n",
"3589990 timesteps\n",
"Best mean reward: 25.41 - Last mean reward per episode: 25.58 - Last mean moves per episode: 10.76\n",
"Saving new best model\n",
"3599996 timesteps\n",
"Best mean reward: 25.58 - Last mean reward per episode: 25.46 - Last mean moves per episode: 10.81\n",
"3609977 timesteps\n",
"Best mean reward: 25.58 - Last mean reward per episode: 25.47 - Last mean moves per episode: 10.79\n",
"3619987 timesteps\n",
"Best mean reward: 25.58 - Last mean reward per episode: 25.44 - Last mean moves per episode: 10.79\n",
"3629992 timesteps\n",
"Best mean reward: 25.58 - Last mean reward per episode: 25.53 - Last mean moves per episode: 10.77\n",
"3639997 timesteps\n",
"Best mean reward: 25.58 - Last mean reward per episode: 25.77 - Last mean moves per episode: 10.74\n",
"Saving new best model\n",
"3649998 timesteps\n",
"Best mean reward: 25.77 - Last mean reward per episode: 25.80 - Last mean moves per episode: 10.75\n",
"Saving new best model\n",
"3659971 timesteps\n",
"Best mean reward: 25.80 - Last mean reward per episode: 25.91 - Last mean moves per episode: 10.74\n",
"Saving new best model\n",
"3669987 timesteps\n",
"Best mean reward: 25.91 - Last mean reward per episode: 26.14 - Last mean moves per episode: 10.69\n",
"Saving new best model\n",
"3679988 timesteps\n",
"Best mean reward: 26.14 - Last mean reward per episode: 26.37 - Last mean moves per episode: 10.66\n",
"Saving new best model\n",
"3689977 timesteps\n",
"Best mean reward: 26.37 - Last mean reward per episode: 26.46 - Last mean moves per episode: 10.65\n",
"Saving new best model\n",
"3699988 timesteps\n",
"Best mean reward: 26.46 - Last mean reward per episode: 27.09 - Last mean moves per episode: 10.55\n",
"Saving new best model\n",
"3709983 timesteps\n",
"Best mean reward: 27.09 - Last mean reward per episode: 27.57 - Last mean moves per episode: 10.49\n",
"Saving new best model\n",
"3719998 timesteps\n",
"Best mean reward: 27.57 - Last mean reward per episode: 27.94 - Last mean moves per episode: 10.42\n",
"Saving new best model\n",
"3729997 timesteps\n",
"Best mean reward: 27.94 - Last mean reward per episode: 28.14 - Last mean moves per episode: 10.42\n",
"Saving new best model\n",
"3739996 timesteps\n",
"Best mean reward: 28.14 - Last mean reward per episode: 28.10 - Last mean moves per episode: 10.45\n",
"3749991 timesteps\n",
"Best mean reward: 28.14 - Last mean reward per episode: 28.38 - Last mean moves per episode: 10.41\n",
"Saving new best model\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"3759989 timesteps\n",
"Best mean reward: 28.38 - Last mean reward per episode: 28.57 - Last mean moves per episode: 10.36\n",
"Saving new best model\n",
"3769991 timesteps\n",
"Best mean reward: 28.57 - Last mean reward per episode: 28.73 - Last mean moves per episode: 10.33\n",
"Saving new best model\n",
"3779991 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.68 - Last mean moves per episode: 10.33\n",
"3789994 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.73 - Last mean moves per episode: 10.30\n",
"Saving new best model\n",
"3800000 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.51 - Last mean moves per episode: 10.31\n",
"3810000 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.55 - Last mean moves per episode: 10.33\n",
"3819994 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.64 - Last mean moves per episode: 10.32\n",
"3829996 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.57 - Last mean moves per episode: 10.33\n",
"3839996 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.24 - Last mean moves per episode: 10.37\n",
"3850000 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.21 - Last mean moves per episode: 10.38\n",
"3859999 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.06 - Last mean moves per episode: 10.41\n",
"3869978 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.39 - Last mean moves per episode: 10.38\n",
"3880000 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 27.91 - Last mean moves per episode: 10.45\n",
"3889971 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 27.86 - Last mean moves per episode: 10.46\n",
"3899989 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.35 - Last mean moves per episode: 10.41\n",
"3909978 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.34 - Last mean moves per episode: 10.44\n",
"3919995 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.46 - Last mean moves per episode: 10.41\n",
"3929991 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.38 - Last mean moves per episode: 10.46\n",
"3939998 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.72 - Last mean moves per episode: 10.42\n",
"3949999 timesteps\n",
"Best mean reward: 28.73 - Last mean reward per episode: 28.85 - Last mean moves per episode: 10.39\n",
"Saving new best model\n",
"3959996 timesteps\n",
"Best mean reward: 28.85 - Last mean reward per episode: 29.03 - Last mean moves per episode: 10.36\n",
"Saving new best model\n",
"3969994 timesteps\n",
"Best mean reward: 29.03 - Last mean reward per episode: 28.97 - Last mean moves per episode: 10.39\n",
"3980000 timesteps\n",
"Best mean reward: 29.03 - Last mean reward per episode: 29.20 - Last mean moves per episode: 10.36\n",
"Saving new best model\n",
"3989994 timesteps\n",
"Best mean reward: 29.20 - Last mean reward per episode: 29.81 - Last mean moves per episode: 10.32\n",
"Saving new best model\n",
"3999984 timesteps\n",
"Best mean reward: 29.81 - Last mean reward per episode: 29.79 - Last mean moves per episode: 10.31\n",
"4009998 timesteps\n",
"Best mean reward: 29.81 - Last mean reward per episode: 29.85 - Last mean moves per episode: 10.27\n",
"Saving new best model\n",
"4020000 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.58 - Last mean moves per episode: 10.30\n",
"4029993 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.55 - Last mean moves per episode: 10.28\n",
"4039996 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.67 - Last mean moves per episode: 10.23\n",
"4049995 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.61 - Last mean moves per episode: 10.23\n",
"4059997 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.64 - Last mean moves per episode: 10.24\n",
"4069992 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.69 - Last mean moves per episode: 10.22\n",
"4079999 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.62 - Last mean moves per episode: 10.23\n",
"4089992 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.67 - Last mean moves per episode: 10.22\n",
"4099995 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.61 - Last mean moves per episode: 10.22\n",
"4109996 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 29.80 - Last mean moves per episode: 10.22\n",
"4119992 timesteps\n",
"Best mean reward: 29.85 - Last mean reward per episode: 30.13 - Last mean moves per episode: 10.17\n",
"Saving new best model\n",
"4129999 timesteps\n",
"Best mean reward: 30.13 - Last mean reward per episode: 30.36 - Last mean moves per episode: 10.12\n",
"Saving new best model\n",
"4140000 timesteps\n",
"Best mean reward: 30.36 - Last mean reward per episode: 30.47 - Last mean moves per episode: 10.14\n",
"Saving new best model\n",
"4150000 timesteps\n",
"Best mean reward: 30.47 - Last mean reward per episode: 30.65 - Last mean moves per episode: 10.14\n",
"Saving new best model\n",
"4159995 timesteps\n",
"Best mean reward: 30.65 - Last mean reward per episode: 30.93 - Last mean moves per episode: 10.11\n",
"Saving new best model\n",
"4169998 timesteps\n",
"Best mean reward: 30.93 - Last mean reward per episode: 30.97 - Last mean moves per episode: 10.13\n",
"Saving new best model\n",
"4179996 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.80 - Last mean moves per episode: 10.13\n",
"4190000 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.75 - Last mean moves per episode: 10.14\n",
"4200000 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.76 - Last mean moves per episode: 10.15\n",
"4209999 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.83 - Last mean moves per episode: 10.13\n",
"4219995 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.83 - Last mean moves per episode: 10.15\n",
"4229996 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.50 - Last mean moves per episode: 10.23\n",
"4239990 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.47 - Last mean moves per episode: 10.21\n",
"4249998 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.34 - Last mean moves per episode: 10.23\n",
"4259997 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.31 - Last mean moves per episode: 10.22\n",
"4269995 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.39 - Last mean moves per episode: 10.20\n",
"4279995 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.55 - Last mean moves per episode: 10.19\n",
"4290000 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.53 - Last mean moves per episode: 10.20\n",
"4299997 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.45 - Last mean moves per episode: 10.21\n",
"4309998 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.38 - Last mean moves per episode: 10.22\n",
"4320000 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.31 - Last mean moves per episode: 10.23\n",
"4329990 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.05 - Last mean moves per episode: 10.26\n",
"4339999 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 29.25 - Last mean moves per episode: 10.34\n",
"4349997 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 29.33 - Last mean moves per episode: 10.32\n",
"4359999 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 28.74 - Last mean moves per episode: 10.39\n",
"4369997 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 27.68 - Last mean moves per episode: 10.50\n",
"4379997 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 26.90 - Last mean moves per episode: 10.58\n",
"4389990 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 26.87 - Last mean moves per episode: 10.56\n",
"4399999 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 26.96 - Last mean moves per episode: 10.59\n",
"4409999 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 26.97 - Last mean moves per episode: 10.60\n",
"4420000 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 27.06 - Last mean moves per episode: 10.57\n",
"4429985 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 27.04 - Last mean moves per episode: 10.57\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"4440000 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 28.00 - Last mean moves per episode: 10.45\n",
"4449977 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 28.35 - Last mean moves per episode: 10.43\n",
"4459996 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 28.68 - Last mean moves per episode: 10.37\n",
"4469996 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 29.62 - Last mean moves per episode: 10.29\n",
"4479988 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.64 - Last mean moves per episode: 10.17\n",
"4490000 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 30.90 - Last mean moves per episode: 10.17\n",
"4499999 timesteps\n",
"Best mean reward: 30.97 - Last mean reward per episode: 31.12 - Last mean moves per episode: 10.11\n",
"Saving new best model\n",
"4509992 timesteps\n",
"Best mean reward: 31.12 - Last mean reward per episode: 31.14 - Last mean moves per episode: 10.09\n",
"Saving new best model\n",
"4519999 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 30.97 - Last mean moves per episode: 10.13\n",
"4529997 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 31.06 - Last mean moves per episode: 10.11\n",
"4540000 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 30.81 - Last mean moves per episode: 10.17\n",
"4549996 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 30.75 - Last mean moves per episode: 10.19\n",
"4559994 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 31.05 - Last mean moves per episode: 10.15\n",
"4570000 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 30.41 - Last mean moves per episode: 10.23\n",
"4579993 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 29.68 - Last mean moves per episode: 10.33\n",
"4589998 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 29.35 - Last mean moves per episode: 10.37\n",
"4600000 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 29.21 - Last mean moves per episode: 10.35\n",
"4609989 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 28.30 - Last mean moves per episode: 10.48\n",
"4619992 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 28.46 - Last mean moves per episode: 10.46\n",
"4629994 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 28.20 - Last mean moves per episode: 10.50\n",
"4639994 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 28.12 - Last mean moves per episode: 10.52\n",
"4649998 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 27.95 - Last mean moves per episode: 10.54\n",
"4659978 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 28.02 - Last mean moves per episode: 10.54\n",
"4669991 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 27.99 - Last mean moves per episode: 10.55\n",
"4679994 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 29.12 - Last mean moves per episode: 10.41\n",
"4689992 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 29.50 - Last mean moves per episode: 10.34\n",
"4699997 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 29.73 - Last mean moves per episode: 10.34\n",
"4709989 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 30.91 - Last mean moves per episode: 10.16\n",
"4719999 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 30.93 - Last mean moves per episode: 10.19\n",
"4730000 timesteps\n",
"Best mean reward: 31.14 - Last mean reward per episode: 31.48 - Last mean moves per episode: 10.12\n",
"Saving new best model\n",
"4739997 timesteps\n",
"Best mean reward: 31.48 - Last mean reward per episode: 31.72 - Last mean moves per episode: 10.09\n",
"Saving new best model\n",
"4749985 timesteps\n",
"Best mean reward: 31.72 - Last mean reward per episode: 31.90 - Last mean moves per episode: 10.09\n",
"Saving new best model\n",
"4760000 timesteps\n",
"Best mean reward: 31.90 - Last mean reward per episode: 32.03 - Last mean moves per episode: 10.06\n",
"Saving new best model\n",
"4770000 timesteps\n",
"Best mean reward: 32.03 - Last mean reward per episode: 32.21 - Last mean moves per episode: 10.07\n",
"Saving new best model\n",
"4779984 timesteps\n",
"Best mean reward: 32.21 - Last mean reward per episode: 32.49 - Last mean moves per episode: 10.03\n",
"Saving new best model\n",
"4789994 timesteps\n",
"Best mean reward: 32.49 - Last mean reward per episode: 32.55 - Last mean moves per episode: 10.04\n",
"Saving new best model\n",
"4799989 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 32.55 - Last mean moves per episode: 10.05\n",
"4809991 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 32.41 - Last mean moves per episode: 10.11\n",
"4819996 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 32.37 - Last mean moves per episode: 10.11\n",
"4829995 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 32.11 - Last mean moves per episode: 10.14\n",
"4839997 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.91 - Last mean moves per episode: 10.17\n",
"4849999 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.97 - Last mean moves per episode: 10.14\n",
"4859999 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.86 - Last mean moves per episode: 10.15\n",
"4869987 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.81 - Last mean moves per episode: 10.13\n",
"4879995 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.67 - Last mean moves per episode: 10.13\n",
"4889975 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.58 - Last mean moves per episode: 10.14\n",
"4899995 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.52 - Last mean moves per episode: 10.13\n",
"4909994 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.54 - Last mean moves per episode: 10.11\n",
"4919990 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.71 - Last mean moves per episode: 10.07\n",
"4929993 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.98 - Last mean moves per episode: 10.04\n",
"4939995 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 32.25 - Last mean moves per episode: 9.99\n",
"4949986 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 31.44 - Last mean moves per episode: 10.06\n",
"4959993 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 30.47 - Last mean moves per episode: 10.18\n",
"4969993 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 29.53 - Last mean moves per episode: 10.32\n",
"4979998 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 28.58 - Last mean moves per episode: 10.43\n",
"4989997 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 27.57 - Last mean moves per episode: 10.60\n",
"4999991 timesteps\n",
"Best mean reward: 32.55 - Last mean reward per episode: 26.83 - Last mean moves per episode: 10.69\n"
]
}
],
"source": [
"clear_session()\n",
"\n",
"# ships -- keep only one kind for 5x5 grid\n",
"ships = {}\n",
"ships['cruiser'] = 3\n",
"\n",
"grid_size = 6\n",
"num_timesteps = 5000000 # this is number of moves and not number of episodes\n",
"\n",
"best_mean_reward, n_steps, step_interval, episode_interval = -np.inf, 0, 10000, 10000\n",
"\n",
"# Instantiate the env\n",
"env = BattleshipEnv(enemy_board=None, ship_locs={}, grid_size=grid_size, ships=ships)\n",
"\n",
"\n",
"# wrap it\n",
"log_dir = \"./gym/\"\n",
"os.makedirs(log_dir, exist_ok=True)\n",
"env = Monitor(env, filename=log_dir, allow_early_resets=True)\n",
"env = DummyVecEnv([lambda: env])\n",
"\n",
"model = A2C('MlpPolicy', env, verbose=0, #learning_rate=0.00007,\n",
" ).learn(total_timesteps=num_timesteps, callback=callback)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# save manually \n",
"#model.save(log_dir + 'best_model_cruiser_10x10.pkl')"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAZQAAAEWCAYAAABBvWFzAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nO3dd5hU1fnA8e+7nbL0XpeOiGBZwYKKiICiEv1ZY2KJxq6xRAWNjUQl9hh7orFrTDTRiIqgYEEUsVAEQcoqCNI7Uvf9/XHP7N6ZnbrM7Mws7+d59tk757Zzp9z3nnPPPUdUFWOMMWZ35aQ7A8YYY2oHCyjGGGOSwgKKMcaYpLCAYowxJiksoBhjjEkKCyjGGGOSwgJKDRCRc0Tk4yRtq0REVETykrG9VBGRt0Xk7HTnI5OISB0R+Z+IrBeRf6U7P9UlIgNFZEk11x0iIv9Ndp5MaojIfSJyUbzL16qAIiIDROQT94NdIyKTReTAGs5DWk/4IlImIoPTsW8/VT1GVZ9JxbZFpIGIPCAiP4jIJhGZ7143S8X+kuhkoCXQVFVPCZ0pIreKyA53TOvcd/ngms9mSt0BjEl3JmIRkSdEZK6IlIvIOWHmXyUiP7lzzVMiUuibVyIiE0Vki4h8G/p7jLZuAvmbJCLnV+vgEnM3cKOIFMSzcK0JKCLSAHgT+CvQBGgL3AZsS2e+aqN0lo7cF/s9YG9gGNAAOARYDfSrxvZq8lg6AvNUdWeUZf6pqvWBZsBEIK0lmWS+P+7irqGqfpqsbca53+ocw3TgEuDLMNsbCowEjgJKgM5455qAl4CvgKbAjcC/RaR5nOtmFFVdBnwLnBDvCrXiDygF1kWZfw4wGbgfWAcsxDsRnQMsBlYAZ/uWbwg8C6wEvgf+AOS4eTnu9fduvWfxfigAPwAKbHJ/B7t9fAzcA6wFFgHHhOzrSWAZ8CPwJyDXzct1661yeb7UbT8vwnGWAYMjzDsO+Nod/ydAH9+8kcACYCMwGzgxwnu3xuUv1jFNAs73rR9t2U7Ah27fE4CHgecjHMP5wHKgfpTPWoGuvtdPA39y0wOBJcD1wE/Ac8Ac4Djf8nnu/d7fvT7IvV/r8E40A6Psey937OuAb4ATXPptwHZgh/tenBdm3Vv9xw30csfSPNZnCJwL/M+33HzgFd/rxcC+bvov7vUG4AvgsJA8/Bt43s0/H6jj3sO17rtxLbDEt871eN/bjcBc4KgI783NwN9D0noC4933ai5wqu89/wn3O3BpJwIzfL/BwHd2NfAK0MTNK3Hv23l4v8cPgbHA5SH7ngH8IsZ55WPgnJC0F4E7fK+PAn5y093xLmKLffM/Ai6KtW6YfRe5z2G1+7w/xyvh3g7sAra679JD0d5L32/gMTd/I/AB0NHNE7zf9gpgvXtfevvWvRH4R7T3qWLZeBbKhj+8K9XVwDPAMUDjkPnnADvxfni5eCfFH/BOXoXAEPdG13fLPwu8DhS7L+g83EkA+A3eD7YzUB94DXgu5MucF7LvHcBv3b4vBpYC4ub/F3gcqAe0AKYCF7p5F+FdIbTHK3lNDN1+yHGWESagAPu7L0x/l4ez3bKFbv4pQBu8H+ppwGagdch7dzneybZOHMc0ieCAEm3ZKXjBpgAYgHciixRQXgaeifFdiBVQdgJ/dp97HbwT3Qu+5YcD37rptnjfq2Pde3O0e908zH7z3ffiBncsg/C+Uz3c/FsjHVfofLf+GLzAlhfrM8T7Lq5zeWyNd7Hzo1uvM14wCFwQ/Qrv6jkPuAbvxF3ky8MO4BduW3VcPj7C+/61B2bhAgrQAy84tfF9/7tEOL5/Adf6Xtdz657r8rK/O9693fwFwNEh649001cCnwLt3PE/DrwU8ht81u2jDnAq8JlvW33d51gQ47sULqBMB07zvW7m9tcUL+jNCVn+IeCvsdYNs+8Lgf8Bdd3nfQDQIPT3Fed7+TTed/Fw9379BfjYzRuKd2HRCC+47IX77bv5JwFfxnUe3t0TeSb9uTfiabwr0J3AG0BLN+8c4Dvfsvu4D7KlL201sK/78LYBvUI+3Elu+j3gEt+8Hng/wjwiB5T5vtd13TKt8K44tgF1fPPPACa66fdxVzfu9ZDQ7Ye8B2WEDyiPAn8MSZsLHBFhO18DI3z5/yFkfsRjCv3Cxzj+Du6zquub/zyRA8p4YEyM70GsgLIddwJ1aV3xfmx13esXgJvd9PW4iwXf8uPwlWZ96YfhnZxzfGkvAbe66VsjHZdv/na8wLDLfR8HxvsZ4p1Q9gdOB57AuzDpiXeSeSPKftcCfX15+DBk/kJgmO/1BVQGlK54QW4wkB/jcxlP8Hf5NOCjkGUeB25x038CnnLTxXgXOR3d6zn4SkJ4QTT0N9jZN78Q78q9m3t9D/BItPy65cIFlAUh70e+218J8Gvg05DlbweejrVumH3/hpCaBN+8SQQHlFjv5dPAy7559d13rD3ehc88vFJhTph9HQ0sjPVeqWrtuYcCoKpzVPUcVW0H9Ma74n7At8hy3/TPbp3QtED9dQHeVV7A93hXq7jths7LwwsOkfzky+cWN1kfr149H1jmbsSuw/sitPDta3HIvqqjI3BNYB9uP+3d9hGRs0Tka9+83njvQ8DiqpuMeEzhRFq2DbDGlxZpXwGr8U4eu2Olqm715Wc+3gnqeBGpi1df/KKb3RE4JeR9GxAhD22Axapa7kvzf2/i8YqqNsL7Ls3CuyoNiPoZ4lVjDMS7Cv0A76RzhPv7ILAREblGROa4m8Lr8Kpco33WEb+D7r27Ei8QrRCRl0WkDeGtxQsM/uPpH3I8Z+JdaID3GZzkbloHrpK/9637H996c/BOkP7fYEWeVXUbXrXYr0QkB++i7bkI+YxlE16NSEBgemOYeYH5G+NYN9RzeBcvL4vIUhG5S0TyI+Qp1nsJwe/HJrwA20ZV38crRT0MLHcNEvx5LMa7yImpVgUUP1X9Fi8q967G6qvwrnY6+tI64NUTg1ddEzpvJ17A0gT3tRivhNJMVRu5vwaqurebvwzvpOHfV3UsBm737aORqtZV1ZdEpCPwN+AyvKJ3I7yTmfjWT/S44rUMaOJO5AHtIy2Md49lqIjUi7LMFrxSUECrkPnhjuUlvJPMCGC2O1GC9749F/K+1VPVcC2VlgLt3QkrwP+9iZuqrsIrFd8qIoHgFfEzdPMDAeUwN/0BIQFFRA7DK3Wdilct3Aiv3jzaZx31O6iqL6rqALzfhOJVJ4YzA+8eQ8Bi4IOQ46mvqhe77c7GC17HAL+kMsgH1j0mZN0iVfW/16HH8QzeSfYoYIuqTomQz1i+wasyC+gLLFfV1W5eZxEpDpn/TRzrBlHVHap6m6r2wrvfexxwVmB2yOJR30un4jMUkfp4VZhL3b4eVNUD8Bq7dMe7TxawF15VXUy1JqCISE935dXOvW6Pd4JIuEWJqu7Cu5q5XUSK3Qn3aryqGPBOPleJSCf3wdyB1zpnJ95N/HK8eut49rUMeBe41zWHzRGRLiJyhFvkFeAKEWknIo3xbkTGki8iRb6/PLyAcZGI9BdPPREZ7r749fC+oCsBRORcqheIE+auOKfhnTgLXDPZ46Os8hzej+dV95nniEhTEblBRI51y3wN/FJEckVkGN4JNZaX8aoTLyb4xPU8XsllqNtekXjPYbQLs43P8KplrhORfBEZ6I7l5Tj2X4W7KBoHXOeSon2G4AWNI/GqT5fg3fcYhle3/5Vbphjv4mclkCciN1P1ijrUK8AoEWnsjvvywAwR6SEig1wpYiteKX9XhO28RfBn8SbQXUR+7d6vfBE5UET28i3zInAFXqnL3+LtMbzfZ0eXj+YiMiLaQbgAUg7cS4zSifsuFuEF2sDvKXC+fBY4T0R6ud/kH/AuXlHVeXjfv1vcOicCfYBXY60bJg9Hisg+IpKLd19xB5Xv7XKCzzHxvJfHivdoRQHwR7x7Sovdcv1d6Wcz3ufo/wyPAN6O9n5ViKdeLBv+8KoVXsG7Gtzs/j9O5U2sc3A3obSy7ldDtrEEGOCmG+OdTFbincBuJriV180ufaVbrrFvO6Nd+jq8esmgfbtlKur58aocHnX7X4/34z/dzcvDa4GxGq91VDytvDTkL3D/YBheS5F1eFed/8K1RsGr512DVzq7D+/kdH649y5Kmv+YJsVY379sF7yT30a8+1NPAE9G+awb4lVlLsarQljg8tzUzS/FuxLciHfieImQVl4Rtvse3sm2VUh6f/d+rHGf61igQ4Rt7O2WXU/V1nK3EudN+ZB9bwZaxPoM3fxl+Frk4AXrt32vc/FaFG5wy16H775bhDzUxTsRriOklRfeyXKqe6/X4J3Y2kQ5xs+B/r7XPdz7uRLvO/4+rjWam98BLwiMDdlODt5F3ly37wW41lOEuY/pW+8PhNxfiZDPSVT9HQ30zb8a76S+AfgHrnGLb/+T8ILrXELuaUZbN2S5M9z6m93yD1LZQONgvPsea4EHY72XBLfy2oTX8q2Tm3cUXulxE97v/wUqGye1xjsvRW28EPgLtLIxJmOIyD/xWlndku68mOQSkSF4DVp+kab9nwVcoF4V3R5DRJ7Guwj4Q4Lr3QssUNVH4lk+o7vvMHsG8R54W4NXAhuCdx8j45+mNolT1XfxqnhrnLtPdwkQ18nRgKpek8jyteYeislqrfCqCDbhFesvVtWvoq5hTALEe0J9JV7V0YsxFjfVZFVexhhjkiJjSyiuRc1XIvKme91JRD4Tke9E5J8SZ2dlxhhjakbGllBE5Gq81joNVPU4EXkFeE1VXxaRx4DpqvpotG00a9ZMS0pKaiC3xhhTe3zxxRerVLV5outl5E1519Z9OF5T1qtFRPC6B/ilW+QZvOaNUQNKSUkJ06ZNS2FOjTGm9hGRavXIkalVXg/gtY8PdGHRFK8n4UC330tIrDsLY4wxKZZxAUVEjgNWqOoX/uQwi4atqxORC0RkmohMW7lyZUryaIwxpqqMCyjAocAJIlKG12XFILwSSyOpHCinHa4PmlCq+oSqlqpqafPmCVcBGmOMqaaMCyiqOkpV26lqCV433O+r6pl444Cc7BY7G2+sEmOMMRki4wJKFNfj3aCfj3dP5ck058cYY4xPRrbyClDVSXhPUKOqC6nGmOHGGGNqRjaVUIwxxmQwCyjGGJPBfvvsNGYv3RB1mRUbtrJ+y44aylFkFlCMMSZDzFiyjpKRY1m0ajMAC1ZuYvzs5Rz74EdVll2xYStL1nojZ/e74z36jk5LJ85BMvoeijHGpNqGrTsoLszD65AjvU54aDIAR94zieH7tGbszGUV8+av2EiX5vU5/YlPuero7pz+RNXBaFds3EqL4qIay2+ojO3LKxlKS0vVul4xxvid9dRUTi1tx3F92rB4zRYOu2siR/dqiSqMOrYnXZrXZ9vOXezYpdQv9K65f1q/lab1C8jPzWHN5u00qRfcN+3WHbvoedM7AHx509Gs27KdQfd+AMCc0cP4dNFqzv3H5wAsuvPYsMHr7Kem8sG83XsYu3XDIqaMOmq3tgEgIl+oammi61kJxRiT8dZv2UHdwlzyc3MoL1e+/Wkjvdo0qLLcq18soWuL+vRt3yjitj6ct5IP563kyB4tOOyuiQCMn70cgAlzlvPtH4dVBIe/n1VKz9bFDPjzRHq0LGbEfm246525AJx1cEdGj+gNwIXPVXbssf8fxwftb6+b3wl6/cG8lZzzj88Ze8UA9m7TkK07dvHkx4t2O5gA9G7bcLe3sTssoBhjMpqqVtwfKBsznDvfnsPfPlrEvy46mO4tiuk7+l0mXH0EazZv55p/Ta9Y7s/vfMujkxZUvN64dQfrfDeu975lXNj9/W96ZScc5z9bWcMxd/nGimAC8OyU77lmSA8a1slPKBg8+fEiAKaVrWX4gx/HvV48hu7dKqnbS5QFFGNMxglURd1/Wl+G9Ao+Sf7tI++EPGH2ck55bAoAg+/7IGiZkpFjo76O5tp/z4h72W07djEgzA3zaD76bhUAt7zxTdTlDu3alMnzVye07a07diW0fLJZKy9jTMYJVEVd9c/pfOi7+v90YeUJdme50rddeqt4xs5cxpK1Pyd9u60bFvHC+QcxZ/QwysYMp19Jk6jLH9+3DUBSqs12hwUUY0zabdq2k5KRY7ng2aqNaC5+4cuKaX/Lpic/XsRRe7Xc7X1/+8dhdG5er1rrtmlUJ+h12Zjhu5WXSb8fSNmY4RU31usU5ALwykUHBy13y/G9KqbrFeRyx4nevZzAvaB0sYBijAlry/adbNsZvgrlkwWreGN62A6/q2Xlxm0AvDt7eULVU/eNn7fb+y7Kz+X9awYy6fcDE1737nFzI8671XfSBxhz0j4Rlz21tB1lY4ZT0ixyYGtcNx+AM/q159xDO7HozmO56bheTLnhqIrWaBcd0SWR7CedBRRjTFi9bh5Hv9vfCzvvl3/7jCte+or1P+/e09llqzazdN3PHHnPpN3aTiQvnt8/bHrvtlVbiPlP5vee0peCXO/0uOjOY5k9emjFvMG+UtH8FZsqpr+7/RgAXr34YJoXF3LagR2Ctt8pQrBoVr+AW0/YO9ah8OVNR/PJyEHceVIfAESE8wZ0okFRPiJCYV4O6X4MxG7KG2MiWv/zDm767yy27dzF+Yd15tkpZdx0XOWV9zc/rueQrs1Yv2UHX/6wloK8HA7t2izi9hIpfVTHuCsPZ+gDH1a8PqRrMyZcfUSVm/b/u2wAQJXnQb686WgAmtQr4Bf7tWXrjl2ICHUL8rh+WE+Wb9jKqk3bquz3ikFdyXcB6ICOTfj8xsEAzB49lKPu/YD3rjkiYnXUR9cNqqjaikZEqlSx+RXm5bBtZ3nE+TXBAooxpoqrX/m6Yvq5T73hxSfOXcnKjdt4/tMfKuat2rwdgOMf+pgf1njdgNx6fC8+nr+K7i2LuW5Yz4plr0+g9VRA2ZjhrNuynd88/Tlf/rAu5vI9WhUz4erDKS7Kp2Edr4qoa4v6APTr1IRnf9OPNZu3R3wq3v/AYm6OUK+w8hR58UCvOsn/3gT079w07PbqFuRV3A/p1bpqqQigKD85FUUbtu5kwcpNsRdMIQsoxpgqXvvyxyppgfscfgtXbqpS6rj1f7MBmDBnBaeWtqdR3Xz++9WP/HPa4mrlpVHdAv5wXC9OeuSTirSDOzdlysLwTWq7tiiukua/WR7tKj8e4d6bjk3rxlwvUILxe/H8/knt8iXQJDld7B6KMYaSkWMpGTmWWT+uT2i9ByZ8F3X+wHsmse/o8RVBJpb57j4EBJcW9u/QOCgoXDusR8X0nVFudteUlg1i959VGKYk0sCVopKlQVF6ywgWUIwxFY7768fc927klkuplpebwztXHkZBXg6fhumT6srB3QDYu00DXji/P29ePoDWDdPXGWJAXk7sUkZhXtX7JAV5yTsFt2lYRLvGsUtKqWQBxZg9zNeL11FeHrk10IPvz0/p/h//9QFh0wNX1z1bNWDen44Je7K9cnB3ysYMpzAvl0O7NqN324Zs2Z7ep8MLcnPiqrYqDHM8yezfeOn6rcxeFn3clFSzgGJMlpu6aA0btsbXfPedWT/xi4cn89Rkr/uSid+u4JA7wzcNToV/nHtgxP6mvr55SLW2WZ7uprK58YWFcPdQivJjt+6K1/4dGtGzVTEPvf8dJSPH8uO65D/BH4sFFGOyxJK1W6o897F60zZOfXwKfW6Nb3Clpz/xAsmLU72WWuc+/TlL12+tVn78D+4N3qsFo0fszfkDOkVd58geLQCCnusA+Mvp+5ITR7VROJu27qzWeskSbwkpP0zgaV5cmLR8NKlXwLc/beSed72HPQ8d837Sth2vjAwoItJeRCaKyBwR+UZEfufSm4jIeBH5zv1vnO68GlNTBvx5In1vCw4cc3/aWDG9Y1c5qsotr8+iZORYnvmkjH9/sSRo+U8XrgG8ahr/Q3mJeu2SQ9ivQ+XP78T92nHWwSX84bheEbsf8afXLcijbMxwzj64IwADu7eodl5G7Nu22uvWpHDVYuGqwaor3D2ampapzYZ3Ateo6pciUgx8ISLjgXOA91R1jIiMBEYC16cxn8Yk3XF//YhVG7fz6Q1HsW7Ldka/OZuLfV1qlIwcy2HdmvHYrw7gl3//rCK9241vB20n0JvtSfu1peuNbzHuysMr5rVoUFTlYb9Qh3dvHtQxo9/+HYKv5Yb3aR3fwYW4bURvbnNjilRXnYJcbjth74hPomeyZDYZXvfz9qRtq7oyMqCo6jJgmZveKCJzgLbACGCgW+wZYBIWUEwtM+vHyhurj0xawGtf/ljl2YePvltFn9viq+Y69sGPKFc4+v7KJ8gjBQq/f5xzIHOWbaBL8/rMXrae/3vU6yr+/WuOiLnuhYd35vEPF8aVv2Q4+5CSGttXpkq0q/tUyMiA4iciJcB+wGdASxdsUNVlIlL9crIxGW7Ew5OZvjjy0+G7orTU8vvWVy0WTYviQlb4Hl7MzZGKEQD379CY8wZ04pTSdnRuXj/mtkIfHrz0yPR2WphMj565f1APyNUx89YhaW+dlgoZHVBEpD7wKnClqm6Ip3goIhcAFwB06NAhxtLGZK5owSSZmtQroH+nJpw3oBMnuwGrQolIUB9eAf+55BC+X72lSnqg25OGdfKZfkv1Wm9lqnhb1EVTXJRPcVFyH2r0e+uKw+JufZZMGXlTHkBE8vGCyQuq+ppLXi4ird381sCK0PVU9QlVLVXV0ubNm9dcho2J0/erN1MycizLNwS3rnpzxtK0tMz56LojefRXB/Czb7S//p2iD+gUsF+Hxvxiv6o3xY/r05prh/ZgyqhBSctnpkjFgFrJMOFq7x7Z5zcOplebBnRvWbULmlTLyIAiXlHkSWCOqt7nm/UGcLabPht4vabzZszuOuLuSQD0vyP4+Y/LXvyqxp8dmDN6WEUHiG191VRnHVyyW9vNy83h0iO7UrcgoytBquWKo7pRXJjHXSf3SXjdj647Mq57UNXRtUUxZWOGJ7UpcqIy9dM+FPg1MFNEAl173gCMAV4RkfOAH4BT0pQ/YxK2YesOGoSp5njy40Xs1armryYhuKdb/32PIXvv/kiItVV+bg4zbxtKebly3b9n8McRsccyCWjfJL1do6RaRgYUVf2YyL0SVO3gx5gMNG/5RloWF9F39Lv0K2nC1LI13PV/wVe1H8xbyR/fjK/jRPD6a6rug4jh+O9L+p/aDvdUtwmWkyO7PeRvbZORAcWY2mCIr5nu1DLvgcLrXg0eE+Tsp6YmtM33rhnIs1PK+PXBHalbkMd+o99l7ZbwN4k/v3EwB94+IeK2igvt52+Syy5DjEmBnbt2b+S8p84p5dqhlV20z7h1CGVjhlOnIJcLj+hScW/iq5uHhL1KPrN/B5oXFwYNgVu/MI97Tulb8XrjtqpdllwxqCunH9h+t/Ju9lx2iWJMCpSt3lyt9S4Z2IXebRsyqGdLBnZvwd3jvK7kw9178Zs8chANivLYx/XptcxVix3iG4531m1e/1m//9f0iNu5ekiPiPOMicUCijHVoKphu834YfUWOjSty+D7PgyzVmzXDu1Rsd2cHKF32wb8+qCOMddrG/Ig4SFdKoekLcjNYftulpiMiYdVeRmToJGvzqDTqLcY981PQemvf/0jh989kUH3Tqr2tkOD1JuXH8ZpB8b/gO6TZ5cCcHzfNhVp34weyrw/VY6EeEY/e+DXpIYFFGMS9PLn3tjoFz73RVD67172WrgvXJl4dVeyRu47aq+WlI0ZHjQkbX5uTtD2SztaJ90mNazKy5g06d22QUVHkA+evh/DeocfeCrZWmXAkLmmdrISijE1ILQl1lWDu3N8n8pqqXCDL6VK/05NOKNfBz64dmCN7dPsGayEYkwa/OqgDnznG+CqJh8kzMvN4c6T9qmx/Zk9h5VQjNkNJSPHsmHrDpatT6wPrqb1C4P6XErH+N/GJJuVUIzZTbHGcz+qZ/hhe/wBpeY7Gjcm+SygGJMCd560D6cf2J4la3+u6HTx/WuOYNC9lcPu+rs+qWfdoJhawKq8zB5tx65y3pn1E6rRRz9c//MO3puznB1xPiB4ygHtEBHaN6lLbo5X/ggd6dD/zElgGWOymV0WmT3aec9M48N5K7nr5D6cWhq5D6u+bvz2nnF2Mx8pQJxzSAn9wgxedXQv6y7eZD8roZg92ofzVgLw3pzlbNi6g4+/W1VlmZKRYyum4xmf/dIju4TtlgXg1hP25th9WldJz7MSiqkFLKAYA7RtVJc+t77Lr578jEWrNrNw5Saenrwo6jrhevn96LojuXZoz4T3HykAGZNNrMrLGMBfQDjhoY/ZuNXr2v3MGB0zlo0ZTsnIsZw/oBN/OK5XKrNoTMazgGIM8PePK0sjgWACMGfZhrDL+59st1H7jPFYlZcxUZzw0OSw6Ud0b56U7e/XoVFStmNMJrASitljxWoqHM2BJVVbalXHfy45NCnbMSYTZF0JRUSGichcEZkvIiPTnR+TvVZu3Fbtdfdu0zCJOTGmdsiqgCIiucDDwDFAL+AMEbE7oaZanppcFveyp4U8o7JX6/ieRzFmT5JVAQXoB8xX1YWquh14GRiR5jyZLPXJgqrPnETSu11wiWTL9l3Jzo4xWS/bAkpbYLHv9RKXZkxUO3aVUzJyLCs2bq1Ia1gnP+7192sffPO8TcgY7saY7Aso4Z7+CrqzKiIXiMg0EZm2cuXKGsqWyXR73zIOgH63v1eRNnl+/CWUFsWFNKpbGYCs7y1jqsq2gLIE8FdmtwOW+hdQ1SdUtVRVS5s3T07TTpP9tu+s7NTx5Ec/YdLcFZS7S5FPRg7i+fP6M+qYnhFHTqxbmMelA7vWRFaNyVrZFlA+B7qJSCcRKQBOB95Ic55MBisvV7rc8FZQ2rTv13LOPz7nyB7eBUebRnUY0K0ZFx7RhSd+XVqx3J9+0RuA4qI86hfm0a1lcG/BxphgWfUciqruFJHLgHFALvCUqn6T5myZDDbt+7XsKg//vEmrhkVV0lo3qkw7pbQdv/J1vZKsZ0+Mqa2yKqAAqOpbwFsxFzQGOPXxKRHn/eerH6uk5eXkhJ0GKMrPBeCwbs2SlF0ZP+cAACAASURBVDtjapesCyjGJMvWHVUHy+raorJaK/S+e26O8Omoo2hcL/7WYcbsSSygmD1a52b1Is4L16V8uGoyY4wn227KG5NUjesVpDsLxtQaVkIxe7RZP66vkvbaJYewY2d8Y8cbYypZCcXsMY7p3apK2rYwgWP/Do3p37lpTWTJmFrFAorZI8y8dQjN6hemOxvG1GoWUMweobgon8Z1rXWWMalkAcXUWuUhDzT2bW+jIxqTShZQTK313KffB70e2KMFALef2Jsbju2ZjiwZU6tZKy9Ta93yRnCvPLk5QtmY4QB89cNaAApy7ZrKmGSxgGJqvauP7l4lbd/2jbhycDfO7N8xzBrGmOqwgGJqvcuOrNrtvIhw5eCqgcYYU31W3jdZa/ridZSMHMvqTduqzCsZObZiOkwPKsaYFLCAYrLWiIcnA3DAnyZUpP2wegsT564IWi5cn1zGmOSzKi9Tqxx+98R0Z8GYPZYFFJO1iovy2Lh1JwBL1/0cdnwTY0zNsYBistaZ/Tvy2AcLABh6/4ds3LazyjK3n9i7prNlzB7L7qGYrLWrvLJjx3DBBLBmwcbUIAsoJmstXbc16vwxJ+1TQzkxxoAFFJPFxs5cFnX+svXRA44xJrkyLqCIyN0i8q2IzBCR/4hII9+8USIyX0TmisjQdObTpF9J07pR50+Ys7yGcmKMgQwMKMB4oLeq9gHmAaMARKQXcDqwNzAMeEREctOWS5N2Zau3RJ3/+6E9aignxhjIwICiqu+qauAO66dAOzc9AnhZVbep6iJgPtAvHXk02aFegTViNKYmZVxACfEb4G033RZY7Ju3xKWZNCgZOTaoe5OaNubtb2MuU7Zqcw3kxBgTEFdAEZEuIlLopgeKyBX+exuJEpEJIjIrzN8I3zI3AjuBFwJJYTaloQkicoGITBORaStXrqxuFk2cAv1pLVi5qUb3G3j+5NxDS4LSA93TA2yK0JTYGJMa8ZZQXgV2iUhX4EmgE/BidXeqqoNVtXeYv9cBRORs4DjgTFUNBI0lQHvfZtoBS8Ns+wlVLVXV0ubNm1c3iyZOgf60jrr3gxrb585dlc+f7Cqvck1RoUOT6DftjTHJFW9AKXf3NU4EHlDVq4DWqciQiAwDrgdOUFX/Xdc3gNNFpFBEOgHdgKmpyIPJbA++P79i+tkp31eZ/7/LBtCqQRFH9mxRk9kyZo8Xb0DZISJnAGcDb7q0/NRkiYeAYmC8iHwtIo8BqOo3wCvAbOAd4FJV3ZWiPJgM9JcJ31EyciyrwnRX77dPu4Z8esNR5OZYL8PG1KR4A8q5wMHA7aq6yJUQnk9FhlS1q6q2V9V93d9Fvnm3q2oXVe2hqm9H245JnR2+Kqd4vDNrGT1vepvK2svquX/CPABe/OyHirTnzrOGfsZkirgCiqrOVtUrVPUl93qRqo5JbdZMpli1aRvPTimreL1wZfjWU51HhW/1ddHzX7J1Rznrf96R9Lz1aVvttiHGmCSL2lBfRGYSpiVVgHv40NRypW4Aq/aN63JkzxYU5oW/DolyfxzwukJpVLcgafk6tbQdDevmM2XUIA6+830m/n5g0rZtjElcrCe/jnP/L3X/n3P/zwSiP6Zsap2Xpv7AX977jpk/ro+4zK5yjXjv4se1P7NX6wZJy88lA72x4ls3rBPUXNgYkx5RA4qqfg8gIoeq6qG+WSNFZDIwOpWZM5ll3ZYdfL14XdRltu7YRb3Cyq/VF9+vrZj+cd3PSc1Pe2sWbExGifemfD0RGRB4ISKHAPVSkyWTqaZ9vybmMss3BPfw+3+PflIxvXQ3Akp5mPo0a8RlTGaJN6D8BnhYRMpEZBHwiEsze5Am9WLf//jyh8glmMc/XMiH81Yy8tUZCe970eqqDQFELKIYk0liBhQRyQG6qmpfoA8QaM77ZcpzZzJK20Z1wqaf0LdNxXRoCSXUWU9N5eXPFyfchPjRSQsSWt4YU/NiBhRVLQcuc9MbVDXyHVlTq01fEvzR33RcLwD+dGJvGtX1nnNdsja4rcaIfdsQzleL1/HOrJ/i3nfPVsWJZNUYkwbxVnmNF5Hfi0h7EWkS+Etpzkxa7CpXnptSFvTwYrP64au6+pU0oWzMcBoU5bNui/eMyUtTFwct07NV+FZdJz3yCRc9/wU/xTmqYreWFlCMyXTxDhgRuF9yqS9Ngc7JzY5Jp607dtHzpncAmL9iE7eN6A1E7oAxP6/yHkabhkUsDRMcNmyN/jDj8g1badWwKGbetu/0AlyX5vVYEOHBSmNMesX7pHynMH8WTGqZQDABeGbK9xWllMDJPFR+buXX56Ez9w+7TKx7H/H2txWoShveJ3wVmjEm/eIeYEtEeovIqSJyVuAvlRkz6ffenBUAbIsUUHIqvz77d2gcdpmT9os+Blo8VV5btu9kkRssa/BeLTiocxMmXH1EzPWMMTUrriovEbkFGAj0At4CjgE+Bp5NWc5M2l30/BdR5+fEcTmyzAWM+0/ry1X/nF5lfnFR7K9gr5vHVUyXK7x8wcGxd2yMqXHxllBOBo4CflLVc4G+QGHKcmWyQkGEPr38nUBOWbgagBP3a0fZmOHk5wZXcW1PsOfikqb2dLwxmSregPKzaz68U0QaACuwG/K11qMR7oeEWrs5/A33j76LPPTy5JGDgl5v3ZFYQGlQlKpheIwxuyvegDLNjSH/N+AL4EtstMSst2TtFv724cIq6cfsE99gnO2bhH/Qce3m7RHXaVEc3KJr287ExkjLsf5WjMlYcd1DUdVL3ORjIvIO0EBVE+8/w2SUAX+eCMCw3q2q1dFinfzcoNdXDe7O/RPmcdPr3/DZojW8OWNZzG3MWLKeQT1bULcg3hbsxphMFVcJRUSeFZHfikhPVS2zYJL9/F2f/LCm8un2/p3if141tC+tlZsqW2xFCyZ92jWsmH7iw4VBN91DhT55b4zJXPFWeT0NtAb+KiILRORVEfld6rJlUq1sdeWJ+ulPyiqqnj5bFLtH4UhGHrNXXMvddXIfiguDSySbtu0Mu2ygFGWMyXzxPtj4PnA7cBPwd6AUuDiF+TIpVDJyLMc9+FHF6/Gzl/PYJO9eSqsG4Z9az4vj3kX9wviqrXq2asCXNx8dlLZw5aaY69kgWsZktnirvN4DJgOnAXOBA1W1Zyoz5voOUxFp5l6LiDwoIvNFZIaIxNcUyQQJVHVt3h58M/z+CfMA+Mn1FvzhtUcGzf/q5qNpUVzI/y4bQKJGHVP1q+J/yh6gcYyhgR84bd+E92uMqVnxVnnNALYDvfG6sO8tIuGb+CSBiLQHjgZ+8CUfA3RzfxcAj6Zq/7XZZS9+FXV+P3cPpUPI8x7FRflMvXEw+/juf8TruL6xu0s55i8foapMWbA6bNf2w/vE1/LMGJM+8VZ5XaWqhwMnAquBfwDRx4LdPfcD1+F1QBkwAnhWPZ8CjUTEzjIJGjszesurJ88urZhedOexdG5Wr0p1138vPZQpowaFrhpRpN6K/TZt20mnUW9xxt8+5W3Xrb3/vkpoicYYk3ni7XrlMuAw4ADge+Ap4KOoK1WTiJwA/Kiq00NaEbUF/H2jL3FpsdummrgV+x4cFBHe//3AKsvs275RQtsszMuNvZDPd8s3wT7wzCdlCa1njEmveBv/1wHuA75Q1fDNcRIgIhOAVmFm3QjcAAwJt1qYtCp1IyJyAV6VGB06dNiNXNYuo16bUeWhQoD3rjmC9+es4Pa35iRlP/UL8ypKFnXyc/l5R2IPLoJ3P+eigZ25e9zcpOTJGFMz4n2w8W4RGQD8GviHiDQH6qvqoursVFUHh0sXkX2ATkCgdNIO+FJE+uGVSNr7Fm8HLA2z7SeAJwBKS0sTG2e2ltqwdUeVga8C8nKE3x7emVYNiyL2KpyIL286mpGvzeCek/uyavM2vl8d+TmScVceztAHPgw7b/O2xAORMSa94m3ldQtwPTDKJeUDzyc7M6o6U1VbqGqJqpbgBZH9VfUn4A3gLNfa6yBgvapadVcc+tz6bsR5rRt6bSuO79uGkw9ot9v7KsjL4b5T9yUnR2hRXMSBJZEflOzRqjhiU+CdCXYaaYxJv3jvdJ4InABsBlDVpUBNj8n6FrAQmI/Xp9gl0Rc30ZzRzyvsReoxON38nUZeObhbGnNijIlXvPdQtquqiogCiEi9FOapgiulBKaV4CGITRjzV2yiXeM6FOVHvhF+9dHdueKobtx5Up8azFlitvo6jbxycPc05sQYE694L09fEZHH8Zrq/haYgPfEvMkgO3eVM/i+D4KG8g2nKD8zSyV+W6txM98Yk17xPodyD/Bv4FWgB3Czqj6YyoyZxK3aFL7b+H3aBj+MGK30kg5f/GEwr196aFCaf5AuY0x2iPtSVVXHq+q1qvp74H0ROTOF+TLVMO37yo4d+90+geUbtvLOrJ/oGPLUe6YFlKb1C+nbvhEL7ziWVy70hvf99ZM23I4x2SbqPRQ3OuOleA8QvgGMd6+vBb4GXkh1Bk38/N2qrNi4jf53vBd2uS0RevZNt5wcqTLGijEme8QqoTyHV8U1EzgfeBc4BRihqiNSnDeTgP9+9WPcy361OJW95uyejdusqsuYbBWrlVdnVd0HQET+DqwCOqjqxpTnzCTkyn9+HXV+u8Z1WLL2ZwB6t0m8g8eaEtp9/sgwPRUbYzJTrBJKxeWiqu4CFlkwyU5N61V20Di4V8s05iS6OgXBVV6HdGmappwYYxIVq4TSV0Q2uGkB6rjXgvdoSIOU5s4kzfQl6yum4xksK11yQoYVDm2hZozJXFEDiqraHdIscP/4eQktH+/IiumwPaQ/sdBx640xmSvzn3AzVazbsp2SkWNZsHITqspf3vuuYl60gaia1Is9Lkm6tW4YfghiY0zmy9xLVRPRvqPHA3DUvR/w28M6Bc27aXgvxs4I32fmu1cdzpQFq2mcwYElzzeQ1qQwY7EYYzKXlVCy3N8+qhxBoEfLYlo1LOKW43vx35AnzwGa1S/k+DiG4023o3q2AKCkWY10GWeMSRIrodQi4646HIBzD/VKLa9efDD/9+gUAP590cFpy1einjirlF3lNpSNMdnGSihZYv2WHZz6+BQ2bo3/wb8DOlaORVIaZVySTJObIxnbrb4xJjIroWSJvqO9QbL63R6+O5VIju/bhhbFhanIkjHGBLGAkqFe/OwHZi9bz59+sQ8zfc+QJDpG+1/P2C/ZWTPGmLCsXiFD3fCfmTz/6Q8AjPrPjJjLX3h451RnyRhjorISSgY59fEpTCtbw8O/3L8ibd2W7cz6cUOUtWD26KHULbCP0hiTXnYWSrOSkWMB+OyGo5i6yBvP5OIXvqyYH3jmJBoLJsaYTGBVXhki0tglkTSsk5+inBhjTPVkZEARkctFZK6IfCMid/nSR4nIfDdvaDrzmG7Rulgxxph0yLiAIiJHAiOAPqq6N3CPS+8FnA7sDQwDHhGRrO68clrZmpjLlIQM3xtw0eFdkp0dY4zZLZlY+X4xMEZVtwGo6gqXPgJ42aUvEpH5QD9gSnqyufsuf+mrmMuUrd4SNr1d4zpMv2UIP29PrBmxMcakSsaVUIDuwGEi8pmIfCAiB7r0tsBi33JLXFrW6him9DF7dOSavLd/d1jFdE6O0LBOPq2sd15jTIZISwlFRCYArcLMuhEvT42Bg4ADgVdEpDPeoF6hqnT4JCIXABcAdOjQIVlZjmr64nW0blhEiwbxn9w3bt3BpwurVnlFa7GVyeOYGGNMWkooqjpYVXuH+Xsdr+TxmnqmAuVAM5fe3reZdsDSMNt+QlVLVbW0efPmNXE4jHh4Mv2itNJ6a+YySkaOZeeuysGj9rn13YjLT79lCOOuPDwo7ZvbhlKUn9W3jIwxtVwmVnn9FxgEICLdgQJgFfAGcLqIFIpIJ6AbMDVtuXRUY/eKe4l7ruSbpdEfUDyjnxcvG9bJp0er4qB59QrzaFDHK6H0am0jLxtjMk8m1qE8BTwlIrOA7cDZ6p21vxGRV4DZwE7gUlVNyx1pVWXz9l3UL8xj/OzlFenrf94R9fmQzxatpm/7RkElFYDvbj+GJz5cyCUDo7fcKszLpWzM8N3LvDHGpEjGlVBUdbuq/spVge2vqu/75t2uql1UtYeqvp2uPD41uYzet4zj9a9/DBp+t+9t7/L2TG+0xF3lyowl64LWu+Otb5m9dANdb6zMettGdcjPzeHSI7va+OnGmKyWcQElG/zxzdkA/O7lr6tUYwW6Telyw1uc8NBk3puzPGj+sQ9+FPR68shBEffzLzco1vRbhux2no0xJtUyscqrVjnvmWkR553ZP3ortANLmlgVlzEma1gJJQUC1V6xtGlUJ8U5McaYmmMBJUHlcYx17u8tOJoT+rbZ3ewYY0zGsICSoAuf/yJp22rfJHw/XcYYk40soCTI30zY792rDg+bHs51w3rw+Y2Dk5UlY4zJCBZQqikvJ7iJb/eWxXF3Kf/enBU0Ly5MRbaMMSZtLKAkwP9U/Pw7jq0y/4HT9o247oCuzSqmh/RqmdyMGWNMBrCAEgdVZVe58s6sn6Iul58b+e187NcHVEyv3bIjaXkzxphMYc+hxOGRSQu4e9zciPNP2i98L/pDerXkXXfPJT+3sors+mE9kptBY4zJAFZCiUNoMCnt2Djo9Wtf/Vgx/fqlh1ZMn+F7cLHAV3qxLlaMMbWRlVCq4a+/3C/odXFR5dvYp13DiumDOjUF4Pi+bRARXr34kJrJoDHGpIEFlGpo3dB7wj0vR9hZrvRuUxlERIQz+rVnwpwV1CkI7h34gJCSjTHG1CZW5RWHI3uEH6jrkTP3B+Cqo7sHpd95Uh97zsQYs8exEkocJs5dWTH9wbUDK6aH7N2K6bcMiToGijHG7CksoCQgXM+/FkyMMcZjVV7GGGOSwgJKDKs3bUt3FowxJitYQInhlWlL0p0FY4zJChZQYli7ZTsA9iyiMcZEl3EBRUT2FZFPReRrEZkmIv1cuojIgyIyX0RmiMj+NZGfQHf1PVs1qIndGWNM1sq4gALcBdymqvsCN7vXAMcA3dzfBcCjNZGZRas2A1CQa0UUY4yJJhMDigKB4kBDYKmbHgE8q55PgUYiEt8AJElw5eDusRcyxpg9WCY+h3IlME5E7sELeIEOsNoCi33LLXFpy2oiU4d1axZ7IWOM2YOlJaCIyASgVZhZNwJHAVep6qsicirwJDAYCFfnpKEJInIBXpUYHTp0qLJCdeVFGevEGGNMmgKKqkbs6EpEngV+517+C/i7m14CtPct2o7K6jD/tp8AngAoLS2tEnASsat8t1Y3xpg9SiZedi8FjnDTg4Dv3PQbwFmutddBwHpVTWl11+btO1O5eWOMqVUy8R7Kb4G/iEgesBVXfQW8BRwLzAe2AOemOiNrNm1P9S6MMabWyLiAoqofAweESVfg0prMy8B7JtXk7owxJqtlYpVXxmlQlHFx1xhjMo4FlDj8zp5BMcaYmCygxKFtozrpzoIxxmQ8CyhxsIBijDGxWUCJQ8uGhenOgjHGZDwLKHFoXLcg3VkwxpiMZwElDvnW7YoxxsRkZ0pjjDFJYQHFGGNMUtgTe1E0qVfA0L1bpjsbxhiTFayEEsWOXeUU5uWmOxvGGJMVLKBEsX1nOYV59hYZY0w87GwZgaqybWe5tfAyxpg42dkygrLVWwB4avKiNOfEGGOygwWUCOb+tBGALdt3pTknxhiTHSygRNCmUREAlw/qmuacGGNMdrCAEsGPa38GoGk963bFGGPiYQElgoWrNgPQvLgozTkxxpjsYAElgm/dPRSRNGfEGGOyhAWUCA7t0hSAvu0bpTknxhiTHdISUETkFBH5RkTKRaQ0ZN4oEZkvInNFZKgvfZhLmy8iI1Odx5GvzQSwBxuNMSZO6TpbzgJOAj70J4pIL+B0YG9gGPCIiOSKSC7wMHAM0As4wy2bcvk5FlCMMSYeaekcUlXnAEjVGxQjgJdVdRuwSETmA/3cvPmqutCt97Jbdnaq89qgjvWfaYwx8ci0y++2wGLf6yUuLVJ6yoUJesYYY8JI2eW3iEwAWoWZdaOqvh5ptTBpSvjApxH2ewFwAUCHDh3iyGmYDWvYTRtjjIkiZQFFVQdXY7UlQHvf63bAUjcdKT10v08ATwCUlpZWKzL8sGZLdVYzxpg9WqZVeb0BnC4ihSLSCegGTAU+B7qJSCcRKcC7cf9GqjLRumEdcgQePGO/VO3CGGNqnbTccRaRE4G/As2BsSLytaoOVdVvROQVvJvtO4FLVXWXW+cyYByQCzylqt+kKn8FeTksvHN4qjZvjDG1ktTm+wWlpaU6bdq0dGfDGGOyioh8oaqlsZcMlmlVXsYYY7KUBRRjjDFJYQHFGGNMUlhAMcYYkxQWUIwxxiSFBRRjjDFJYQHFGGNMUtTq51BEZCXwfTVWbQasSnJ2Mp0d855jTzxuO+bEdFTV5omuVKsDSnWJyLTqPNSTzeyY9xx74nHbMdcMq/IyxhiTFBZQjDHGJIUFlPCeSHcG0sCOec+xJx63HXMNsHsoxhhjksJKKMYYY5LCAooxxpiksIASQkSGichcEZkvIiPTnZ9wROQpEVkhIrN8aU1EZLyIfOf+N3bpIiIPuuOZISL7+9Y52y3/nYic7Us/QERmunUeFBGp7j6SeMztRWSiiMwRkW9E5He1/bhFpEhEporIdHfMt7n0TiLymcvPP90opriRTv/p8vOZiJT4tjXKpc8VkaG+9LDf9+rsI8nHnisiX4nIm3vQMZe579/XIjLNpWXX91tV7c/94Y0GuQDoDBQA04Fe6c5XmHweDuwPzPKl3QWMdNMjgT+76WOBtwEBDgI+c+lNgIXuf2M33djNmwoc7NZ5GzimOvtI8jG3BvZ308XAPKBXbT5ut936bjof+Mzt5xXgdJf+GHCxm74EeMxNnw780033ct/lQqCT+47nRvu+J7qPFHzeVwMvAm9WJz9ZesxlQLOQtKz6fqf95JhJf+7NHud7PQoYle58RchrCcEBZS7Q2k23Bua66ceBM0KXA84AHvelP+7SWgPf+tIrlkt0Hyk+/teBo/eU4wbqAl8C/fGefs4L/c7iDZF9sJvOc8tJ6Pc4sFyk77tbJ6F9JPlY2wHvAYOAN6uTn2w7ZrftMqoGlKz6fluVV7C2wGLf6yUuLRu0VNVlAO5/C5ce6ZiipS8Jk16dfaSEq3LYD++KvVYft6v6+RpYAYzHu7pep6o7w+yzIj9u/nqgaZR8RkpvWo19JNMDwHVAuXtdnfxk2zEDKPCuiHwhIhe4tKz6fufFOMA9jYRJy/Z21ZGOKdH06uwj6USkPvAqcKWqbnDVwInkKauOW1V3AfuKSCPgP8BeUfaZ6LGFu6CM9V6k9JhF5Dhghap+ISID49hn1h+zz6GqulREWgDjReTbKMtm5PfbSijBlgDtfa/bAUvTlJdELReR1gDu/wqXHumYoqW3C5NenX0klYjk4wWTF1T1tWrmKeuOG0BV1wGT8OqyG4lI4GLQv8+K/Lj5DYE1UfIZKX1VNfaRLIcCJ4hIGfAyXrXXA9XITzYdMwCqutT9X4F38dCPLPt+W0AJ9jnQzbX2KMC7AfdGmvMUrzeAQIuOs/HuMQTSz3ItNg4C1rti7ThgiIg0dq06huDVGS8DNorIQa4VyFkh20pkH0nj8vIkMEdV79sTjltEmruSCSJSBxgMzAEmAidHyE8gnycD76tX+f0GcLprrdQJ6IZ3gzbs992tk+g+kkJVR6lqO1Utcfl5X1XPrM3HDCAi9USkODCN972cRbZ9v5N9Yynb//BaNszDq6u+Md35iZDHl4BlwA68q4jz8Op03wO+c/+buGUFeNgdz0yg1Led3wDz3d+5vvRS92VeADxEZY8KCe8jicc8AK+4PQP42v0dW5uPG+gDfOWOeRZws0vvjHdynA/8Cyh06UXu9Xw3v7NvWze6fM7Fte6J9n2vzj5S8JkPpLKVV60+Zrfv6e7vm0C+su37bV2vGGOMSQqr8jLGGJMUFlCMMcYkhQUUY4wxSWEBxRhjTFJYQDHGGJMUFlBMVhARFZF7fa9/LyK3JmnbT4vIybGX3O39nCJeb8kTfWn7iNe77NciskZEFrnpCSLSRkT+ncL87Csix6Zq+2bPYwHFZIttwEki0izdGfETkdwEFj8PuERVjwwkqOpMVd1XVffFe5DsWvd6sKouVdVUBrp98Z7JMCYpLKCYbLETb4zsq0JnhJYwRGST+z9QRD4QkVdEZJ6IjBGRM8UbY2SmiHTxbWawiHzkljvOrZ8rIneLyOfijQdxoW+7E0XkRbwHvkLzc4bb/iwR+bNLuxnv4czHROTueA5YRErEjXkjIueIyH9F5H+uFHOZiFwt3pghn4pIE7dcFxF5R7wOBj8SkZ4u/RSXn+ki8qF7Snw0cJorEZ3mntZ+yh3vVyIywrfv191254rILS69noiMdducJSKnxXNcpvayziFNNnkYmCEidyWwTl+8DhXX4I0N8XdV7SfeAF2XA1e65UqAI4AuwEQR6YrXPcV6VT1QRAqBySLyrlu+H9BbVRf5dyYibYA/AwcAa/F6j/2Fqo4WkUHA71V1WsJH7umN18tyEd5T0Ner6n4icr/L6wN4QfciVf1ORPoDj+D1h3UzMFRVfxSRRqq63QW5UlW9zOX9DrxuRX4jXpcvU0Vkgv94gS3A5yIyFugILFXV4W79htU8LlNLWAnFZA1V3QA8C1yRwGqfq+oyVd2G14VEICDMxAsiAa+oarmqfocXeHri9YN0lnjdx3+G10VFN7f81NBg4hwITFLVlep1df4C3oBoyTBRVTeq6kq8LtT/5z8W8XpiPgT4l8vz43jjWwBMBp4Wkd/iDTIVzhBgpFt3El7g6uDmjVfV1ar6M/AaXmlrJl7J7s8icpiqrk/ScZosZSUUk20ewBto6h++tJ24iyPX8V2BdXiPewAAAaBJREFUb94233S573U5wd//0D6IAl1+X66q4/wzxOtWfXOE/EXsTz8JYh1LDt6YHvuGrqiqF7kSy3DgaxGpsgxe3v9PVecGJXrrVXl/VHWeiByAdx/mThF5V1VHV+fATO1gJRSTVVR1Dd5Qref5ksvwqpgARuANl5uoU0Qkx91X6YzXoeA44GLxus1HRLqL1xNsNJ8BR4hIM3fD/gzgg2rkJ2GuBLdIRE6BijHB+7rpLqr6marejNdVe3tgI95wygHjgMtdUEZE9vPNO1q8scfrAL/Aq/5rA2xR1eeBe/CGpTZ7MAsoJhvdC/hbe/0N7yQ+FW+I3Eilh2jm4p3438a7B7EV+DswG/jS3Rx/nBilevW69x6F1xX6dOBLVX092jpJdiZwnogEeq0d4dLvDjQUAD50eZsI9ArclAf+iBeMZ7jl/ujb7sfAc3i9PL/q7gPtg3ef5Wu8nn3/lPrDM5nMehs2xkQlIufgu3lvTCRWQjHGGJMUVkIxxhiTFFZCMcYYkxQWUIwxxiSFBRRjjDFJYQHFGGNMUlhAMcYYkxT/D2s/YetDuwbdAAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAY8AAAEWCAYAAACe8xtsAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nO3dd3gVVfrA8e+b0HtHeqRYAGkGEGzYaT9xXRvrWlld27p2sa5iWdS1rKu7rr2sYkNdV1RERBELEDoCAtKLEHovgff3xzlJJjf3Jvcm9+amvJ/nyZOZM2dmztxM7jvnzJkzoqoYY4wxsUhJdgGMMcaUPRY8jDHGxMyChzHGmJhZ8DDGGBMzCx7GGGNiZsHDGGNMzCx4lCARuVREJsVpW2kioiJSKR7bSxQR+UxELkl2OUoTEakuIv8Tka0i8l6yy1McIlJVROaJyCHJLospnIh0EZHv47Gtchk8ROQ4Efne/3NuEpHvRKRnCZchqV/uIrJMRE5Nxr6DVHWAqr6WiG2LSB0ReUpEVojIDhFZ7OcbJWJ/cXQO0BRoqKrnhi4Ukfv8uXN9SPoNPv2+EipnNK4EJqrqr8kuSEFE5Dz/nbBLRL4Os7ybiEzzy6eJSLfAMhGRR0Rko/95VEQkmnVjKF/cLiwLoqqzgS0i8n/F3Va5Cx4iUgf4BPgH0ABoAdwP7E1mucqjZNZ6RKQKMB7oBPQH6gB9gY1AryJsrySPpQ2wUFWzCsizEAitsV3s00uTPwJvlOQO/Zd5rN9dm4CngJFhtlcF+C/wH6A+8BrwX58OLkCeBXQFugCDcccdzbql0Zv48heLqparHyAd2FLA8kuB74AngS3AEtyXzqXASmA9cEkgf13gdSATWA7cDaT4ZSl+frlf73Wgrl+2AlBgh//p4/cxCfgbsBlYCgwI2ddLwFpgNfAgkOqXpfr1NvgyX+u3XynCcS4DTo2wbDAw0x//90CXwLLhwC/AdmAe8JsIn90mX77Cjulr4A+B9QvKeygw0e/7S+BZ4D8RjuEPwDqgVgF/awXaB+ZfBR700/2AVcDtwK+4L8D5wOBA/kr+8+7h54/xn9cWYBbQr4B9H+mPfQvwE3CmT78f2Afs9+fFsDDr3of7MpoPdPJpnfz8f4D7AnmvABb7v8fHQHOf/hzwt5Dt/he4yU83B0bjzuulwPWBfL2ADGCb/4yfiHCMrYHdBM5BoKr/+67w6z4HVPfLivz5+s/yIdz5txu4FZgWUp6bgY8K+X74A/B1SNrpuP83CaStAPr76e+BKwPLhgE/RrNuhO+fJbhzfClwoT9X9gAH/DmxJYrPsh/u/L3Tf4bLgAsD+xmI+//d7st3S2BZC/8ZVo31+zXPsRRn5dL4g7sC3Yi7AhgA1A/zx8sCLsN9IT/o/zjP+j/W6f4Dr+Xzv477p6sNpOGu/Ib5ZZfj/nHbArWAD4A3/LI0Qr7c/b734/7hU4GrgTXZJx7wEfBvoCbQBJgC/NEvuwpYALTC1agmhG4/5DiXESZ4AD1wga63L8MlPm9Vv/xc3BdLCnA+sBNoFvLZ/Qn3j189imP6mrzBo6C8P+D+WaoAx+G+vCIFj7eB1wo5FwoLHlnAI/7vXh24F3gzkH8QsCDwD7cR90+ZApzm5xuH2W9lf17c6Y/lZNw5dbhffl+k4wou9+s/4tMeBe4gEDz8djf4v2lVXG17ol92Au5iKPuzrY/7wsj+207zx1sFd/4uAc4I/B0u8tO1gGMilHMQ8FNI2lO4INYA9z/zP+CvflmRP19/Hq3ABdFK/ng3AUcGtjcD+G0h50S44HEj8FlI2ifAzX56K9A7sCwd2B7NuiHpNXHndPZ50Izci4NLgUkxfJb9cOfvE/6zOBH3v5q97bXA8YG/fY+QbW8jcNFYpO/a4qxcWn9wkfxVXGTO8n+ApoE/0qJA3qNwXzJNA2kbgW64L7i9QMfAsj9mn3y4ZpNrAssOx305ViJy8FgcmK/h8xyCawPfi7+y8MuHAhP89FfAVYFlp4duP+QzWEb44PEv4IGQtJ+BEyNsZyYwJFD+FSHLIx6Tn/+avMEj0vG39n+rGoHl/yFy8BgHjCzkPCgseOwDqgWWt8d9ydfw828C9/rp2/EXBoH8YwnUUgPpx+NqMymBtFHkfunfF+m4gsv9Z7ICF4xW4C4cgsHjJeDRwHq1/PmXBohf5wS/7ArgKz/dO8zf8Q7gFT89EVdDalTI53sh/grczwvuC6xdIK0PsLS4n68/j0aEOZcf8tOdcLXZAq+mCR887gHeDkl7M/A5HwCOCCzr4M8tKWzdkPSauFrVbwn8nwf+NyYF5gv7LPvh/l9qBpa/C9zjp1fgvqvqRPgcVmefG0X9KXf3PABUdb6qXqqqLYHOuKutpwJZ1gWmd/t1QtNqAY1wV2bLA8uW466S8NsNXVYJFwgiybmxqKq7/GQtXDt4ZWCtiGwRkS24WkiTwL5WhuyrKNoAN2fvw++nld8+InKxiMwMLOuM+xyyrcy/yYjHFE6kvM2BTYG0SPvKthF35VYcmaq6J1Cexbimlf8TkRrAmcBbfnEb4NyQz+24CGVoDqxU1YOBtOB5ExVVXYGrwTyMu+AJ/TzynH+qugP3ubRQ9w3xNu4CBOB3uC+17GNpHnIsd5J73g4DDgMWiMhUERkcoYibcVfE2RrjLgimBbb7uU+Px+cbevyvAb/zN68vAt5V1aLc29yBa7EIqoMLdOGW1wF2+M+4sHVzqOpOXG3+Ktz/+RgROSJCmQr8LL3NfpvZluP/j3EBaiCwXES+EZE+IduvjQtkRVYug0eQqi7AXXF2LsLqG3BXcm0Caa1xURtck0vosixccNIY97USV/NopKr1/E8dVe3kl6/FfckH91UUK3FXa/UCPzVUdZSItAFeAK7D9QSqB8zFXQVli/W4orUWaOC/VLK1ipQZd0/kDBGpWUCeXbh/wGyh3UnDHcso3BfuEGCe/8ID97m9EfK51VTVfDdgcedFq5CbusHzJhav49ryX4+wn5zzz38WDQP7GQWc4/+uvXH3OLKPZWnIsdRW1YEAqrpIVYfiLlweAd6P8DnPBtoGOhtswF14dQpst66qBi8kivP55vl7qeqPuNrj8bjgWNQb9z8BXYI9qHA3xn8KLO8aWNY1ZFlB6+ahqmNV9TRcUFyA+3+D/OdiNJ9l/ZC/S2vcOYGqTlXVIbi/4Ue4WgkAItIcd1H8c7gyRqvcBQ8ROUJEbhaRln6+Fe5k/THWbanqAdyH/pCI1Pb/hDfhmg7A/SPcKCKHikgt3BXiO+p60WQCB3HtydHsay3wBfC474KaIiLtROREn+Vd4HoRaSki9XE3tgtTWUSqBX4q4U7Wq0Skt++1UlNEBolIbVy1Wn3ZEZHLKFrQjZmqLsfdpL1PRKr4K6WCuhO+gfvCGe3/5iki0lBE7hSRgT7PTNyVaaqI9Me1CxfmbVyT4NXkXhWD+5v/n4ic4bdXTUT6ZZ9nISbjmhxuE5HKItLPH8vbUew/1Du+PO+GWfYWcJnvKloVd/5NVtVlAKo6A/e3fBEYq6rZV5pTgG0icru4Z05SRaSz+O7sIvJ7EWnsa07Z6xwI3bmqrgIW4Xu3+fwvAE+KSBO/rRYickZgtXh8vkGvA88AWaoasatr9jZxLQMpfvuV/eKv/fFdL+65let8+leBfdzkj6U5Lpi/GuW6wTI0FZEz/Rf+XlytJftzXQe0zO6lFeVnCXC//385HtcR5j0/f6GI1FXV/bj7G8G/Xz9cE2axeqCWu+CBqy72BiaLyE5c0JiL+4MXxZ9wXwRLcD2F3gJe9stexn2JTcT1nNjj82c3yTwEfOerncdEsa+LcVcE83BNAu+TW21/AdcGPAuYjrs5X5hPcVcv2T/3qWoGrv37Gb+Pxbj2VlR1HvA47obpOtz9oO+i2E+8XIhr192I68jwDhG6WPsT/1Tc1ds43D/IFFwT22Sf7c+4L+0tftsfFVYAH8R/wPXAeyeQvhJ3tXwn7gt5Ja7HT77/IVXdh2uSGYC7gvwncLGvBcdEVXer6pequjvMsvG4NvfRuJpbO+CCkGyjcJ/TW4H1DuA+l26483YDLsDU9Vn6Az+JyA7g78AFwea9EP/GNRllux13Tv0oIttwNcTDA/su9ucb4g3cBU5htY6LcP8D/8LVVHbjr/r93+ss3P/fFlxHmLN8evYx/g+Yg/suGePTolk3KAX3PbQGd7P/ROAav+wrXG3lVxHZ4NMK/CxxTcCb/fbexN0TzT7HLgKW+fWuAn4fWO9CXM+tYsnuiWFMqSMi7+B64/wl2WUx4fkazwzgFB8YSnr/1XG9B3uo6qKS3n+y+Nrsf/x93VjWOwp4XlVD74HErDzWPEwZJSI9fVNdim9mGkIUtQWTPKq6V1U7JiNweFcDUytS4CgOVZ0Tj8ABrv3PmNLiEFxzXENcN+urfbu9MfmIyDJcZ46zklyUCsmarYwxxsTMmq2MMcbErFw1WzVq1EjT0tKSXQxjjCkzpk2btkFVGxeeM69yFTzS0tLIyMhIdjGMMabMEJEijVaRsODhH857HXcT9CCue9jffffL7L7K9XAjSOYb/97fDNuOe7glS1XTE1VWY4wxsUlkzSMLN7LkdP/08jQRGaeq52dnEJHHcSNWRnKSqm4oYLkxxpgkSFjw8P2+1/rp7SIyHzcw3DxwL3QBzsMNK22MMaYMKZHeViKSBnQnd9gIcEMErCvg4R4FvhD3WscrC9j2lSKSISIZmZmZ8SqyMcaYAiQ8ePgBA0cDN6jqtsCiobhxdyI5VlV74MYHulZETgiXSVWfV9V0VU1v3DjmDgPGGGOKIKHBw49aORr39rAPAumVgLMJDIwWSlWzhxZeD3xIEd5LbYwxJjESFjz8PY2XgPmq+kTI4lNxA96tirBuTX+TPfsdBafjRrM0xhhTCiSy5nEsbljgk8W9mW5m4D0LFxDSZCUizUXkUz/bFJgkIrNww2yPUdXPE1XQGSs2c9eHcxK1eWOMKXcS2dtqEnnfQBdcdmmYtDW41yaiqkvI++auhNmbdYDf/PN7AC7s3YaOzUPfKGmMMSZUhR/bqmql1JzpVZt3FZDTGGNMtgofPIJ+XLIp2UUwxpgywYJHwMvfLU12EYwxpkyw4AF8c2u/ZBfBGGPKFAseQMv6NZJdBGOMKVMseACpKa5TWPsmtZJcEmOMKRsseAQsXr8j2UUwxpgywYJHiEXrtie7CMYYU+pZ8AixduueZBfBGGNKPQseIS5+eUqyi2CMMaWeBQ/v8XNzR0PZs/9AEktijDGlnwUPr23jmjnTG3bsTWJJjDGm9LPg4XVvXT9n+rhHJiSxJMYYU/pZ8DDGGBMzCx4BU+46JdlFMMaYMsGCR0CT2tWSXQRjjCkTLHgYY4yJmQUPY4wxMbPgEcHuffashzHGRJKw4CEirURkgojMF5GfROTPPv0+EVktIjP9z8AI6/cXkZ9FZLGIDE9UOUMNPOoQAF79fllJ7dIYY8qcRNY8soCbVfVI4BjgWhHp6Jc9qard/M+noSuKSCrwLDAA6AgMDaybUMd3aAzAI58vKIndGWNMmZSw4KGqa1V1up/eDswHWkS5ei9gsaouUdV9wNvAkMSUNK9RU1bkTKtqSezSGGPKnBK55yEiaUB3YLJPuk5EZovIyyJSP8wqLYCVgflVRAg8InKliGSISEZmZmaxy7pt9/6c6dmrthZ7e8YYUx4lPHiISC1gNHCDqm4D/gW0A7oBa4HHw60WJi1sNUBVn1fVdFVNb9y4cbHLe37P1jnTQ579rtjbM8aY8iihwUNEKuMCx5uq+gGAqq5T1QOqehB4AddEFWoV0Cow3xJYk8iyZru6X7uS2I0xxpRpiextJcBLwHxVfSKQ3iyQ7TfA3DCrTwU6iMihIlIFuAD4OFFlDfXJn44rqV0ZY0yZVCmB2z4WuAiYIyIzfdqduJ5T3XDNUMuAPwKISHPgRVUdqKpZInIdMBZIBV5W1Z8SWNY8OreomzOtqrg4aIwxJlvCgoeqTiL8vYt8XXN9/jXAwMD8p5HylqSPZq7mN91bJrsYxhhTqtgT5oW48Z1ZyS6CMcaUOhY8IhjcpVnhmYwxpoKy4BHB0xd0T3YRjDGm1LLgEUFKSu7tmhe/XZLEkhhjTOljwSMKD46Zj6ry7tSV7Nlvo+0aY4wFjyiNmrKS20bP5k+jZiS7KMYYk3QWPAqw9K+u53CLetV5cZJruho3b10yi2SMMaWCBY8CZD8cuHrLbvbYy6GMMSaHBY8ordm6B4B6NSonuSTGGJN8FjxitGXX/sIzGWNMOWfBI0ZDe7UqPJMxxpRzFjwK0bVl3Tzzo6aszDP/5uTlDP7HtyVZJGOMSbpEjqpbLswKeZtgi3rV88zf9WG4EeVz7cs6yIYde2kesp4xxpRlVvOIUbfW9XKmg+84/2D6Khb8uo2tu/PeE7ng+R/oO/IrfsncUegDhtv37OcPr03lV39z3hhjSiureRSied1qOT2tAL75Ofc96fd9nPuKkZvezR19d9nIQQwfPZv6NaswfcUWAE55/JucZZHc+9+f+HL+er6cP77AfMYYk2xW8yjE8IFH5pnfsTcrZ/r1H5eHXefgQeXtqSv519e/xLSvD2esjr2AxhiTBBY8CnFm1+b50rKbqwKtVnm0vTPyO6xCm6527zvArn1ZEXLn6v3wl6QNH1NoPmOMKQkWPGJQ3z8guDfrYJG3sXrL7jzzR977OR3vHcu6bZHvc6zctIt12/YWuN3vF2/ggU/m8c7UFUUumzHGRMvuecSgRf3qbN61n9VbdtOuca2o16teOZXdvsaxbuuesOv+7oUf88x/v3gDB1S5+OUp3Dkgt+ns4EHNM1x8zvovTs6ZPi+9lb133RiTUAmreYhIKxGZICLzReQnEfmzT39MRBaIyGwR+VBE6kVYf5mIzBGRmSKSkahyRuPLm07grSt6M3f1NgAuf3Uql70yJer1Hz2nC/cO7gjALxt2kjZ8DC9PWponzy+ZO/PMvz11JRe9NAVVeOjT+TnpG3YUXAMBWLhuR9RlM8aYokhks1UWcLOqHgkcA1wrIh2BcUBnVe0CLATuKGAbJ6lqN1VNT2A5C9W+SW36tmtEzSqpACzfuIsJgV5Xn99wfIHrD+7SjLRGNQC45yP3XMiIT+aRdSBy81cNv69Q67fnDx77Q7bz4rdLyDpw0EYANsYkTMKCh6quVdXpfno7MB9ooapfqGr2HeIfgZaJKkO8Hdq4Ztj0Iw6pk69rbbdW9Vg2chDLRg5CRPhl/c5861395vSI+6pbPfwAjJlhah6h90vem7aK9nd9xhWvZ/D9Lxsi7sMYY4qqRG6Yi0ga0B2YHLLocuCzCKsp8IWITBORKwvY9pUikiEiGZmZmZGyxUXbRgXf56gUuBdRu1re20lHNquTL3+4msGrl/UE4IhmtcPuI9PXPDbv3Efm9r3MWLGZ4x6ZELFMV/8ncoAyxpiiSvgNcxGpBYwGblDVbYH0u3BNW29GWPVYVV0jIk2AcSKyQFUnhmZS1eeB5wHS09MjdJ6NjwY1q+RLCw6UuPDBATnddM/q1iJPvjD3uMPKDlBvh4yhle2292dz2/uzo9sY5Hvi3Rhj4iGhNQ8RqYwLHG+q6geB9EuAwcCFquGfllDVNf73euBDoFciyxqNM7vlfeYjrWEN7hrUMWc+JUUY4vOc3SNv8OjbvlFU+6hf0zVXTV66qThFNcaYhEpYzUNcX9GXgPmq+kQgvT9wO3Ciqu6KsG5NIEVVt/vp04ERiSprtHq0rp8zHWn4kL9f0J2/X9C9SNv/6uYTqV45/I3yomobcp9mb9YBtu7aT5M61eK6H2NMxZLImsexwEXAyb677UwRGQg8A9TGNUXNFJHnAESkuYhkP5rdFJgkIrOAKcAYVf08gWVNqhFDOpGaIrRtXItKqXn/JKcc0STq7Sx4oH++tONCajy3vjebXg+P53+z1hStsMYYQwJrHqo6CQjX0h927A7fTDXQTy8BuiaqbMUxtFcrGteO71X7xX3SuLhPWthlnVrUpV6NKsxbu435a7eFzZOtWphay+s/LGfEkM458x/7oPGnUTP4vzBDrxhjTDRseJIY/fXsLtx02mHF2sYT53Vl7v1nRJV32LGH8vh5XRncpVmR9zd89GzSho8JO66WMcYUhQ1PUoKWPDyQA6pU9k1TL12STs2q+f8EXVvVY9ZKN5R7XT+e1rUnteexsT8Xab9vT3U9t0Jvwi/dsJOOzfN3ITbGmMJYzaMEpaRITuAAOOXIphzTtmG+fKOv6hN2/S9vOjHitsffHHlZtuveyvvMx94sq3kYY4rGgkcpFHrTPFv7JrXC9vJ67JwuOYMtLnpoAJf2TWPRQwPy5du+J+/Q7xt27ItDaY0xFZE1W5VSr1zaM+LIvctGDkJVOfQO1/eg96G5tZfKqSncd2anqPbx9PhFnNaxafELa4ypcKzmUUqddEQTWjesEXF5cMj1SPl6pTUocB9zVm8tWuGMMRWeBY9y7NFzuoRNvz9MzSRt+Bh7U6ExJmoWPMqwnx/sz8IH89/byNYmQo1kaK/WeeZDh3Q3xpjCWPAow6pWSqVKpch/wkhvE6ycmjc9mc1Xu/Zl5dR69hXj9b7GmJJlwaMCCg0ql76c+1bEf339S4mWpe/Ir3KmZ6zYXKL7NsYUnQWPcu7WMw7nupPa58x/e9tJ+fJsC3ThfeTzBYyfX3JvINyyK3fI+M/m/lpi+zXGFI911S3nrvWB49qT2vNL5g5aNch7H+T9aavyrTPstQxeuawnlVKE4zs0LpFyArz6/bKouxkbY5LLgkcFUb1KKp1b1M2Xfst7s8Lmv+yVqQC8cHE6e7MOMLhL/AdRjPAqF2NMGWDNVqZAV7yewXVvzUjItvfsz3+D/LGxCxKyL2NMfFnNw5S47xdv4K0pK6hRJf8Q8s9O+IUTD2tCr0MLfsDRGJNcVvOooPp3OiRf2l0DjyyRff/uxcl8Mnst72bkv98CcN6/fyiRchhjis6CRwU1JOR97M/+rgdXnNA27vtZv20PJz42gcXrt9PjgXGc9sQ3YfPVrV45z3xhL74yxiSXNVtVUGd0OoThA45gaM/W1KyaGnEk32wHDiqpKeEfOizIcY9OYF/WQc557ge27NrPpp3hR/Ldunt/nvkBf/824nvijTHJZzWPCiolRbjqxHbUrVG50MABsHj9jiLtJ/up8eDzHKEe+e1RdG9dr0jbN8YkR8KCh4i0EpEJIjJfRH4SkT/79AYiMk5EFvnf9SOsf4nPs0hELklUOU10EtmMtHrzbgYdVfTX7BpjSl4iax5ZwM2qeiRwDHCtiHQEhgPjVbUDMN7P5yEiDYC/AL2BXsBfIgUZUzIqpYZvstqXdZDvFm8Iu+zgweie43j6q8VcduyhXH9y+zzp67fvia2QxpgSk7DgoaprVXW6n94OzAdaAEOA13y214Czwqx+BjBOVTep6mZgHNA/UWU1hWtVP/8IvUsyd3DY3Z9x4YuT+cf4RfmWh97HCHrh4vSc6W6t6pGaItx0+uEcUqdaYPs7i1lqY0yilMg9DxFJA7oDk4GmqroWXIABmoRZpQWwMjC/yqeZJAkdtj1t+BhOfjy359Tj4xby9c/r2bE3d5ysoS/8GHF7px6Z+2c/N71lzvTYG07ImS7oPokxJrkSHjxEpBYwGrhBVaNtOA/XRhK2DURErhSRDBHJyMzMLGoxjRdpiPd9geCxbU/4L/VLX5lK57+MzZlf8Ov2sPlOOaIJIsKUu07h4j5tOC+9Vc6yujUqU7+G67b79tQVMZffGFMyEho8RKQyLnC8qaof+OR1ItLML28GrA+z6iqgVWC+JbAm3D5U9XlVTVfV9MaNS24Qv/LqP8N6h03fm3WQnb5WsWhd+KAQjY+uPZaXLu0JQJPa1RgxpDOVQ3p7dWxeB4DNVvMwptRKZG8rAV4C5qvqE4FFHwPZvacuAf4bZvWxwOkiUt/fKD/dp5kEO8oPnvh/XfM+RHjZK1Pp9JexfDJ7DcV58WC3VoV3yW1QsyoA2wq4Z2KMSa5E1jyOBS4CThaRmf5nIDASOE1EFgGn+XlEJF1EXgRQ1U3AA8BU/zPCp5kEq14llYUPDuDpC7qFXX7dWzPYtHNvgdso7mtt61Z3z65u2RX+gUJjTPIl7AlzVZ1E+HsXAKeEyZ8B/CEw/zLwcmJKZwqSfd/j6aHdeXLcQpZuyNvrad6agm9ddbjrM766+cSc+Ua1qrJhx16u6dcuqv3Xq14FsGYrY0oze8LcRHRm1+b8Y2j3fOlHtXRNT6Ov7suShweGXTfYE2twF/cAYOsG+bv7hlOEUVCMMSUsquAhIjVFJMVPHyYiZ/qb4aacO+KQ2vnSrng9A3CDHqakCHcOPKLAbdx3Zid+vOMULujVOqp9ntczt6/EuxkrySpmM5gxJv6irXlMBKqJSAvcU+GXAa8mqlCm9Cho3Kvs8a4Oa5o/wIQ6pG61QvNkCw7AeNv7s2l/12dRr2uMKRnRBg9R1V3A2cA/VPU3QMfEFcuUBQOOcu8E6Xd4ExrVqhK37TauVTVu2zLGJEbUwUNE+gAXAmN8mg3nXsG1b5Jb48i4+zQ+vKZvvjyv+Gc6YhGuthPpwURjTHJEGzxuAO4APlTVn0SkLTAhccUyZVH31vnHrjzpiHCjz8Suy31fsHh90R9ONMbEV1TBQ1W/UdUzgWf8/BJVvT6hJTOlRmE3xIM+uKYvw447NCHleGvySl6atJS04WPsJroxSRZtb6s+IjIPNzIuItJVRP6Z0JKZUuPKE9pF/Va/Hq3rc89gdzssXE+t4nj5u6U88Mk8wA3jboxJnmibrZ7CDZO+EUBVZwEnFLiGKXfGBx78+374yQXm/eCavrz5h/DjZMXD02GGgDfGlJyob3qr6ko3XFWOA/EvjinN2jWuxZQ7T+HrnzNpXq96gXl7hLn/YYwpP6KteawUkb6AikgVEbkF34RlKpYmdarleYgvUTo0qQXA5ccm5v6JMaZ4oq15XAX8HfdCplXAF8C1iSqUMWNvOIHd+w/w67Y9vPzd0mQXxxgTIpaHBC9U1aaq2kRVf6+qGxNaMlOhpaQINbMweHQAACAASURBVKtWom2jmlx7UnQDKhpjSk60weN7EflCRIaJSOEvZDAmTkSEW8+IvquwMaZkRPucRwfgbqATMF1EPhGR3ye0ZMYYY0qtqIdkV9UpqnoT0AvYBLyWsFIZE+K89JYRl81etQXVsK+4N8YkSLQPCdYRkUtE5DPge2AtLogYUyKWZO4Mm37HB3M485nv+M+Py0u4RMZUbNHWPGYB3XCvgz1MVW9X1WkJLJcxeWQs35wvTVUZNWUFYE+cG1PSou2q21ZVVURqi0gtVd2R0FIZE4XZq7bmTB88aM1WxpSkaGsenURkBjAXmCci00SkcwLLZUyhhjz7Xc70xp37klgSYyqeaIPH88BNqtpGVVsDN/u0iETkZRFZLyJzA2nviMhM/7NMRGZGWHeZiMzx+TKiPRhTMZzZtXmyi2BMhRdt8Kipqjnv71DVr4GahazzKtA/mKCq56tqN1XtBowGPihg/ZN83vQoy2jKsRFDOgFw96AjadWg4HG1jDGJF+09jyUicg/whp//PVDgmBGqOlFE0sItEzfC4nlAwUOzGuNddEwb2jaqxbHtG5L+4JfJLo4xFV60NY/Lgca4msKHfvqyYuz3eGCdqkYaV1uBL/y9lSsL2pCIXCkiGSKSkZmZWYwimdJMRDiuQyNEhLo1Kie7OMZUeNE+Yb5ZVa9X1R6q2l1V/6yq+ftORm8oMKqA5ceqag9gAHCtiER8d4iqPq+q6aqa3rhx42IUyZQVL1xsLZnGJFuBzVYi8nFBy/2raWMiIpWAs4GjC9juGv97vYh8iHsgcWKs+zLlU7vGtcKmHzyopKRI2GXGmPgq7J5HH2AlrpYwGYjHf+apwAJVXRVuoYjUBFJUdbufPh0YEYf9mnLuoCopcTlFjTGFKazZ6hDgTqAz7n0epwEbVPUbVf2moBVFZBTwA3C4iKwSkWF+0QWENFmJSHMR+dTPNgUmicgsYAowRlU/j+WgTPn34TV986U9Pm5hEkpiTMUk0Q4oJyJVcfcqHsMNU/KPRBasKNLT0zUjwx4LqQhUlUPv+DRfeqNaVcm4+9QklMiYsklEphXlkYhCu+r6oDEIFzjSgKcp+PkMYxJORHj+oqNpXq863yzM5LGxPwOwYcfeJJfMmIqhsBvmr+GarD4D7lfVuQXlN6Yknd7pEABemmSvqTWmpBV2z+Mi4DDgz7i3CW7zP9tFZFvii2dM4S7o2SrZRTCmwikweKhqiqrW9j91Aj+1VbVOSRXSmIL0btuQVy/rmexiGFOhRP0mQWNKs36HN6FH63p50m58ZybvTF2RpBIZU75Z8DDlxvQVWwBIGz6G5Rt38uGM1dw+ek6SS2VM+WTBw5RLF700JdlFMKZcs+Bhyo2zuuW+52PFpl1JLIkx5Z8FD1NudGhaO2z6jr1Z3PjOTDbb2waNiZto3+dhTKk3bt66sOmd/zIWgA9nrGbZyEElWSRjyi2reZhy4wz/0KAxJvEseJhy47vFG5JdBGMqDAseptxo1aBGsotgTIVhwcOUG3cPOjLZRTCmwrDgYcqNmlUrcWnftIjLxd4TZUzcWPAw5cp9Z3aiTcPwzVct61cv4dIYU35Z8DDlzqgrjgmbHuV7z4wxUbDnPEy507xedT6+7ljaN6nF6Omr6dayHn8aNZ29WQeTXTRjyg2reZhyqUvLetSoUomLjmnDUS3r0qddI/YfsKqHMfGSsOAhIi+LyHoRmRtIu09EVovITP8zMMK6/UXkZxFZLCLDE1VGU3FUSRWyDlrNw5h4SWTN41Wgf5j0J1W1m//5NHShiKQCzwIDgI7AUBHpmMBymgpiy679yS6CMeVGwoKHqk4ENhVh1V7AYlVdoqr7gLeBIXEtnKlwXvthOQBqd82NiYtk3PO4TkRm+2at+mGWtwBWBuZX+bSwRORKEckQkYzMzMx4l9WUM3uzDrJ+255kF8OYMq+kg8e/gHZAN2At8HiYPOEe5Yp4uaiqz6tquqqmN27cOD6lNOXOIXWqAXDzu7Po9fB4PpuzNs9yVeXAQauVGBOtEg0eqrpOVQ+o6kHgBVwTVahVQKvAfEtgTUmUz5RfpxzZBIAxPmhc/eZ0APYfOMjOvVnc+eFc2t35KfsP2E11Y6JRos95iEgzVc2+5PsNMDdMtqlABxE5FFgNXAD8roSKaMqpLbvD3yzv+dCXeW6kb9+TRYOaVQA4/tGvOL3jIdwz2PprGBMqkV11RwE/AIeLyCoRGQY8KiJzRGQ2cBJwo8/bXEQ+BVDVLOA6YCwwH3hXVX9KVDlNxXDu0S3Dpof2wDoYuKG+ctNuXpq0NKHlMqasSljNQ1WHhkl+KULeNcDAwPynQL5uvMYU1fQVW6LKN/T5H/nixhPypGUdOEilVHue1pgg+48wFULXlnWjyrdo/Q4+nfMrk5fm9jJfsmFnooplTJllwcNUCPVqVMmXdueHc8LmXbdtDz8u2ZgzP3/ttoSVy5iyygZGNBXC0W3yP1L01uQVYfNOXrqRjGWbc+btDYXG5Gc1D2NCVEpNYePOfTnzB+35D2PyseBhTIgxs/M+QGhDuRuTnwUPYwqxesvuZBfBmFLHgoepMN4YFm5Ag1xDe7UOm37b+7MTURxjyjQLHqbCyH5y/IrjDw27/PgOjUqyOMaUaRY8TIXRqXldRl/dl9v7HxF2ef0w3XmNMeFZ8DAVytFt6ud7Wvz0jk059cgmHNO2QcT1xs9fx/WjZiS6eMaUGRY8TIXX7/AmvHhJT0TCvQ3AGfZaBh/PWsPerAMlWDJjSi8LHqbCO7tH+HeNNaldNV/apsDzH8ZUZPaEuanwqlVOzZle8vBA9mYdpEqlFFSV9nd9lifvBc//yDe3nlTSRTSm1LGah6mQ/jOsNwBf3nRinvSUFKF6lVRSU4RKqSk0Dql9LN+4q8j7XPDrNv79zS9FXt+Y0sRqHqZCOq5DI5aNHFRovszte/Ol7d53gOpVUsPkLlj/p74F4OI+aUVa35jSxGoexsToyHs/Z+7qrYXmu+vDOTwxbmG+9E277L6JKfsseBhTBIP/ManQPG9OXsHT4xflS/92YWYiimRMibLgYUyU+h3eOM982vAxpA0fE3bU3Qc/mRdxO7v3W3dfU/ZZ8DAmSlee0DZs+reLNwC5wWTLrn28GHj3eWhw+WrB+sQV0pgSkrDgISIvi8h6EZkbSHtMRBaIyGwR+VBE6kVYd5mIzBGRmSKSkagyGlOYR8/pAsArl/akb7vwY1/t3JvF4vU7cua7jRiXZ3nmjrw33b9dtCHOpTSm5CWy5vEq0D8kbRzQWVW7AAuBOwpY/yRV7aaq6QkqnzGFOi+9FdPuPpWTjmgSMc81b07n1Ce+ibi898PjE1E0Y5IqYcFDVScCm0LSvlDVLD/7I9AyUfs3Jl4a1sp91mPhgwOKvb3QeyfGlEXJvOdxOfBZhGUKfCEi00TkyoI2IiJXikiGiGRkZlovFpNYVSqlcFjTWjGvN2vllpzp0zo2jWeRjEmKpAQPEbkLyALejJDlWFXtAQwArhWREyJtS1WfV9V0VU1v3Niu6Eziff7niKdjREOe/S5netryzfEsjjFJUeLBQ0QuAQYDF6pq/j6OgKqu8b/XAx8CBb8CzpgSlJISefTdaHwwfTURTn1jyowSDR4i0h+4HThTVcMOEiQiNUWkdvY0cDowN1xeY8qqbbuzCs9kTCmWyK66o4AfgMNFZJWIDAOeAWoD43w33Od83uYi8qlftSkwSURmAVOAMar6eaLKaUxRXNKnDQD3n9kpYp52jWtGXLZys7t22uC78e7Zf4C04WM44dEJcSylMYkj5an6nJ6erhkZ9liIKTmPjV3AsxOKN1LuvYM7MiLwRHo0AzbGIuvAQVJEit3cZsonEZlWlEci7AlzY4rhnKNbFXsbIwoYyiQe2t/1GT0eHFd4RhNXo6et4o9vJO5iduWmXcxYkbzOFxY8jCmGVvWr55k/Lz3vo0v1a1SOeZu3vT+LtOFjyDpwsMB8a7bsZsgzk1izZXfEPNktC1t27Y+5HKZ4bn5vFmN/Wpew7R//6AR+88/vGT56Nt1GfJGw/URiwcOYYqiUmsKykYOYctcpzLjnNB49p2vOsm9vO4kLerXOk79jszqFbvPdjFUAbNntvvC//2UDacPH5NwfyXbLe7OYtWorb05eHnFbqzZHDiyJkDZ8DP2fmlii+ywpr363lI9nrYl5vXADZ8bT21NXJuXiwIKHMXHQpHY16tesAsBVJ7YDoGmdatxy+uE5eZaNHMSnfz4+6m2mP/glacPH8LsXJueZX7lpF2u37ub7XzYC8NbkFQB8OGMVZ//zO05/8htWb9nN6i27efLL/O8TyZa5fS/fL47/OFsLft0e920my469WaQNH8MLE5dw3//mcf2oGTFvY9aqLVz71vQElC657E2CxsTZ7f0P56bTDqNKpdxrs9Pj+FT58SE9sjbv2s/Jf/uaJRt25qQdO/KrQrfT86EvgfjdoN+9r/wNNb8k0w14+dCn84u8jd/883sA7hiwi5b1a8SlXHNWFf4yskSz4GFMnIkIVSrl9myKd++pcIKBI1Zbdu2jXo0qxdr/M18t4qXAMPThqCoL1+3g8ENqF2tfJalu9djvWUWyYlP8gsdjX/wcl+0UhzVbGVPCvr6lH+9d1SdP2uuX9+Lpod0Tut9Ibe/xaFL52xcL2VxIu/t701ZxxlMT+frnsvM+k18ydxSeKUrz1myL27bSGsYnCBWHBQ9jSlhao5r0TGuQp0bSrXU9Wob03Iq3SF+E3y3emC9tz/4DHAgJNtv27Gfykvx5o/Htokxue382AF/OL14PpEXrtrOnhN7G+PzEJXHb1uY4vru+96EN47atorLgYUwpUKdaZXq0rs+ZXZsnbB/3/y/650mOuOfzfM8oXPl6Buc//yO79uUOraKqUd3ruOilKTnTM1ZsKSBnwbbu3s9pT07kiHtKZtCJWlXzN1tt31O0nk3Hd4jfwK2htcWaVVLjtu1oWfAwJok++dNxPPrbLjnz15/SPmH7mhRjz6ov5+dtXvpxiXs9z449Lnhs2rmPQ+/4lCPvzf9Fft5zP+RM/2P8ojzLgu9HidWOvSU7Jti2IgaKcJ6dsDhu2wr104jQ9+4lngUPY5Koc4u6nNcz9yn19k1qs2zkIGpXrUSzutXy5J1296nF3l9hDx4CfDpnbc50uC/r/b45a9SUFRG3MWXZJtKGj+Hp8Yt4fFze7sITF8b+3p0tu/axZstuFpZwN+Btu/MHj9DmvGglslaZDBY8jCmF5tx/Bj/ccUrO/IIH+ue5Yn/u9z145dKeMW/3T/45hZWb8g5qnbk99wHEa97MbRLp/JexPPjJvDxDyE9YsJ43flzOY2ML7/HzxLjwz5lkPxW/ctMu0oaPKbTpq9uIcfQd+VXC7wuFChc8sooYPGpUiX/n1r9f0I0pd55SeMYEsOBhTBlQrXLeNu3+nZsV+F51gPQ29fOlfTb3V9KGj8n3rEjfke496+EGSn1x0lIOvePTnPm7P5rLPR8V7y0JfUd+xb6sgznlCNf0lW3zztwbzfNDah43vD2DrxYkbgiQbXvy17yKWvO49q3p/Lp1T3GLlMeAzs1oUqda4RkTwIKHMaXYgM6H0LpBbrfMZSMH5eml9cMdJ/P1Lf1y5r+5NXd62HGHAtCqQeFX6/sPKGnDx9Dx3rHFL3SUDrs7/1uox89fl1Mrmr5iM2nDx7A1cPUffML7sbEL+GjmGi5/Ne+N/QW/bmP+2vh0iw3XbNf74fHc9O7MIm1v2caiP48TNLhLM4A8D6KWNAsexpRi//r90Uy87aSIy5vVrU5ao9z3hrRp6KbP6tY8Z5iQlZuiH99qdwl1gQ27730HGPZaRk5t5Gz/ZHa/v30dNn/oUPh79h/gjR+X0/+pbxnw92+LVZYnxy3ME7RCfTB9db607HeyHH73Z6gqEmYE/INRvgJj+cadPPDJPLZGeHbmk9lrw6aXJHvC3Jhy4I1hvfjJP4SWXTP5LtC76tj2DcM+z1FS2jepxeL1BT9wF2y6uuyVKQXkzG/t1t30+WveIVnezVjJbe/PplbVSnx83bG0bVwrz/J/fr2YRz93920WPjiASYszOfmIpoybt46/j1/E38cvomqlFPZmFd7JAODbRe7z3pt1kAMHlXBx4vcvTuarm/uRsXwzvQ9tQKsG4R/2O/GxrwF4adJSlv51INktZSniRjAoDexlUMaUU5t37qP7A+O4/pQODO3VirP/+T1r49zmHtSmYQ26tKzHP4Z2p99jE1i2Mfem/JKHBzJ+wXqueD15/5/P/f5o+nc+hB+XbOTaN6ezcWf+h/ae+V13rnsrusEPl40cxLY9+7nno7n8d2be0XbvHnQkD44pfDysf17Yg/0HDjKkW4s86WnDx+RMt6hXndW+g0Gftg35IfCgZjyGvinqy6AseBhTgbw7dSW3jZ6dMz/lrlPo9dD4Im/vm1v75VwlL3l4YM7bCmev2sKZz3yXky/7S67vX8ezJoEBrDDLRg7K88Vc3G3d/v5s3slYWextjbvxBC5/bSr1a1Sha8t6vPFj5GH2Q8tQXPYmQWNMoYLPlPxjaHea1C5eT52mgZ4+wdfcdmlZj1FXHAPAo+fkPgT5zh/7cHyHRsXaZ3HEK3AA7M06EJfAAXDakxNZuWk3s1dtjTpwDOh8SFz2XVQJDR4i8rKIrBeRuYG0BiIyTkQW+d/5+xO6fJf4PItE5JJEltOYiihSe3skFx3ThmUjB/Hfa4+lZ1p97hnckWqVU/nm1n45gSKoT7uGZNx9Kuel5wasVg1q8Maw3sUue2lw+N0lM0RKJFWT2NMKEl/zeBUIfW5+ODBeVTsA4/18HiLSAPgL0BvoBfwlUpAxxhTNEX5o9H6H5x9zqVZV15fmqfO75aQd1aIuAF1b1eO9q/rmdAVu07AmfdqFH6ivUSFDkSwbOYihIW9bjKR65ZIfvymoTrXY+xf1aF0vASVx5sWpO3JRJTR4qOpEYFNI8hDgNT/9GnBWmFXPAMap6iZV3QyMI38QMsYUwauX9eSiY9rkPHj44sX5m7sv8M1bwW7AdYvwPvZIRgzpxP+uOw6Ac45uGTbPY4HmLojcjbikmm8a1a7K7PtO5+5BR0a9TnE6KFx7UrsCly9cF7/h4osiGfWepqq6FsD/DveYbAsg2Ji4yqflIyJXikiGiGRkZsY+Zo4xFU2/w5vwwFmdc+Yrpeb/GrjihLZ8edOJdGuVe+V8ciFPtMfi4j5pHNXS1WSODjwJH7wB3LVVPU4J7LN9k7xdbbP96/dHc91JBQ8oecJh4Ue0nXJX5KE9ggNWAgw+qhl1qlXmmRgGOGxRzz2g2bBmlXwPeBbm1jOOiDpvMpTWG+bhOjKH7Ramqs+rarqqpjduHL8hj42pSP58Sgf+eELbnKao2tUq5XxZP/Lbo/hd79ZUDhNkEumwprX590VH06ZhDabfcxrD++d+mS4bOYjFDw3gl4cHAnDLGYez5OGBDO3VKuy2Xr+8V9j07A4DlVPdV851J7WnWuUU3r+qD+f1bMUrl+WOH5Z9/H0jNNGFs8g/2xKuW3BBfrr/jJjyJ0MyHhJcJyLNVHWtiDQDwr1WbBXQLzDfEvi6BMpmTIV042mHAW7cputP7pBnEL/ze7bm/NjHYIxJ+ya1qO+bxRY/NID9B9y1YqXUFL651T1hf1yHRvRKa8Ddg4/MWRaUkiL89ewu/PXsLsxdvZVZq7Zw14e5Y3A9cFbnnDG55t5/BpV877AvbzqRpnWqUjk1haqVUrjljMNz1jnp8Nyaz29981q7kIcNf3l4IFt27ePKN6YxbfnmPMsGHnUIo6bk7ZE1897T6DZiXJ600zq6hxOz1axa+FfzH3ygT5Zk1Dw+BrJ7T10C/DdMnrHA6SJS398oP92nGWMSKDVF4npvI1pf3nQi713VF3BBoXqYlxtVq5zKu1f1oUvLwm9Cd25Rlwt7t8mT1qJebrfiWlUr5dzzad+kFrWrVaZa5dSwT2+/feUxDDqqWc4Q+b/pntuCPmJIJ1JThIa1qjL66r4sfmgA/wi8Tjj7jX9/vyC340G9GlX44sYT8uzj+YuO5sNr+ubbd5VKKaSmhH+i/O7BHcMffAlJaM1DREbhahCNRGQVrgfVSOBdERkGrADO9XnTgatU9Q+quklEHgCm+k2NUNXQG+/GGFOg609uT1P/pV+3etGC4jFtG3JM29ymqraNa/HSJekcUrcanZrXzZO3UmoK/9e1OW0b1+Smd2ZxVvcWDOnWPF9QOqxp7TzzIkL31u7ez6GBTgoLHxwA5D6fsmzkIPo/NTHnifNksifMjTEVQvZwLRCfJ7OL644PZuc0aRVWnlvfm8V3izfw/R2n5AybH68xror6hLkNjGiMqRDq16zCaR2b8rsonytJtIfOOopvF20I+4BlqMfO7ZozbQMjJoDVPIwxJjY2tpUxxpgSY8HDGGNMzCx4GGOMiZkFD2OMMTGz4GGMMSZmFjyMMcbEzIKHMcaYmFnwMMYYE7Ny9ZCgiGQC0b0AOFcjYEMCilOaVcRjhop53HbMFUNxjrmNqsb8PotyFTyKQkQyivJ0ZVlWEY8ZKuZx2zFXDMk4Zmu2MsYYEzMLHsYYY2JmwQOeT3YBkqAiHjNUzOO2Y64YSvyYK/w9D2OMMbGzmocxxpiYWfAwxhgTswodPESkv4j8LCKLRWR4sssTiYi8LCLrRWRuIK2BiIwTkUX+d32fLiLytD+m2SLSI7DOJT7/IhG5JJB+tIjM8es8Lf5VZUXZR5yOt5WITBCR+SLyk4j8ubwfs99HNRGZIiKz/HHf79MPFZHJvkzviEgVn17Vzy/2y9MC27rDp/8sImcE0sOe80XZR5yPPVVEZojIJxXhmEVkmT//ZopIhk8rW+e3qlbIHyAV+AVoC1QBZgEdk12uCGU9AegBzA2kPQoM99PDgUf89EDgM0CAY4DJPr0BsMT/ru+n6/tlU4A+fp3PgAFF2Uccj7cZ0MNP1wYWAh3L8zH7fQhQy09XBib7fb0LXODTnwOu9tPXAM/56QuAd/x0R38+VwUO9ed5akHnfKz7SMCx3wS8BXxSlPKUtWMGlgGNQtLK1Pmd9C/GZP34D3ZsYP4O4I5kl6uA8qaRN3j8DDTz082An/30v4GhofmAocC/A+n/9mnNgAWB9Jx8se4jgcf+X+C0CnbMNYDpQG/ck8OVQs9bYCzQx09X8vkk9FzOzhfpnPfrxLSPOB9rS2A8cDLwSVHKUwaPeRn5g0eZOr8rcrNVC2BlYH6VTysrmqrqWgD/u4lPj3RcBaWvCpNelH3EnW8y6I67Ci/3x+ybb2YC64FxuKvmLaqaFWa/OWXyy7cCDQsoa6T0hkXYRzw9BdwGHPTzRSlPWTtmBb4QkWkicqVPK1Pnd6VCDrA8kzBp5aHfcqTjijW9KPuIKxGpBYwGblDVbb7ZNpbylLljVtUDQDcRqQd8CBxZwH5jPb5wF4uFfR4JPW4RGQysV9VpItIvin2W+WP2jlXVNSLSBBgnIgsKyFsqz++KXPNYBbQKzLcE1iSpLEWxTkSaAfjf6316pOMqKL1lmPSi7CNuRKQyLnC8qaofFLE8ZeqYg1R1C/A1rv25nohkX+gF95tTJr+8LrCpgLJGSt9QhH3Ey7HAmSKyDHgb13T1VBHKU5aOGVVd43+vx10k9KKMnd8VOXhMBTr4HhdVcDfGPk5ymWLxMZDdu+IS3H2B7PSLfe+JY4Ctvno6FjhdROr7Hhan49p41wLbReQY3yPj4pBtxbKPuPDleAmYr6pPVIRj9sfd2Nc4EJHqwKnAfGACcE6EMmWX9RzgK3UN1h8DF/heQ4cCHXA3UMOe836dWPcRF6p6h6q2VNU0X56vVPXC8nzMIlJTRGpnT+POy7mUtfM7njeBytoProfBQly78l3JLk8B5RwFrAX2464QhuHaYMcDi/zvBj6vAM/6Y5oDpAe2czmw2P9cFkhP9yfvL8Az5I48EPM+4nS8x+GqzLOBmf5nYHk+Zr+PLsAMf9xzgXt9elvcF+Fi4D2gqk+v5ucX++VtA9u6y5f1Z3xPm4LO+aLsIwHH34/c3lbl9pj9fmf5n5+yy1TWzm8bnsQYY0zMKnKzlTHGmCKy4GGMMSZmFjyMMcbEzIKHMcaYmFnwMMYYEzMLHqbUEREVkccD87eIyH1x2varInJO4TmLvZ9zxY0KPCGQdpS4UVRnisgmEVnqp78UkeYi8n4Cy9NNRAYmavum4rHgYUqjvcDZItIo2QUJEpHUGLIPA65R1ZOyE1R1jqp2U9VuuIeybvXzp6rqGlVNZFDrhnvewZi4sOBhSqMs3DuZbwxdEFpzEJEd/nc/EflGRN4VkYUiMlJELhT3fow5ItIusJlTReRbn2+wXz9VRB4Tkani3mfwx8B2J4jIW7iHp0LLM9Rvf66IPOLT7sU96PiciDwWzQGLSJr497WIyKUi8pGI/M/XTq4TkZvEve/iRxFp4PO1E5HPxQ2u962IHOHTz/XlmSUiE/2T1SOA831N53z/lPPL/nhniMiQwL7/67f7s4j8xafXFJExfptzReT8aI7LlF8VeWBEU7o9C8wWkUdjWKcrbiDBTbh3G7yoqr3EvUzqT8ANPl8acCLQDpggIu1xQzhsVdWeIlIV+E5EvvD5ewGdVXVpcGci0hx4BDga2IwbJfUsVR0hIicDt6hqRsxH7nTGjSZcDff08O2q2l1EnvRlfQoXYK9S1UUi0hv4J25sqHuBM1R1tYjUU9V9PqClq+p1vuwP44bduFzckChTROTL4PECu4CpIjIGaAOsUdVBfv26RTwuU05YzcOUSqq6DXgduD6G1aaq6lpV3YsbZiH7y38OLmBke1dVD6rqIlyQOQI3LtDF4oZDn4wbxqGDzz8lNHB4PYGvVTVT3dDdb+Je3BUPE1R1u6pmrPYe+QAAAfhJREFU4oYE/1/wWMSNONwXeM+X+d+49zMAfAe8KiJX4F6GFM7pwHC/7te4INXaLxunqhtVdTfwAa4WNQdXY3tERI5X1a1xOk5TRlnNw5RmT+FeiPRKIC0Lf9HjB32rEli2NzB9MDB/kLzneuiYPNnDWP9JVccGF4gbJnxnhPJFHCM+Dgo7lhTc+yi6ha6oqlf5msggYKaI5MuDK/tvVfXnPIluvXyfj6ouFJGjcfdN/ioiX6jqiKIcmCkfrOZhSi1V3YR7VeiwQPIyXDMRwBDc61pjda6IpPj7IG1xA+mNBa4WNxQ8InKYuBFPCzIZOFFEGvmb6UOBb4pQnpj5mtlSETkXct5B3dVPt1PVyap6L27o8VbAdtwrfbONBf7kAzAi0j2w7DRx77quDpyFa8JrDuxS1f8Af8O9FtlUYBY8TGn3OBDsdfUC7gt7Cu4VrZFqBQX5Gfcl/xnunsEe4EVgHjDd37j+N4XUzNUNWX0HbmjvWcB0Vf1vQevE2YXAMBHJHp11iE9/LPsmPjDRl20C0DH7hjnwAC7wzvb5HghsdxLwBm4049H+vs1RuPsiM3Gj1z6Y+MMzpZmNqmuMySEilxK4sW5MJFbzMMYYEzOreRhjjImZ1TyMMcbEzIKHMcaYmFnwMMYYEzMLHsYYY2JmwcMYY0zM/h9rM82HWOHJYwAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"plot_results(log_dir,1000)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# keep learning using a previously trained model\n",
"#model = A2C.load('./gym/best_model_cruiser_7x7.pkl')\n",
"\n",
"# ships -- keep only one kind for 5x5 grid\n",
"#ships = {}\n",
"#ships['cruiser'] = 3\n",
"\n",
"#grid_size = 7\n",
"#num_timesteps = 10000000 # this is number of moves and not number of episodes\n",
"\n",
"#best_mean_reward, n_steps, step_interval, episode_interval = -np.inf, 0, 10000, 1000\n",
"\n",
"# Instantiate the env\n",
"#env = BattleshipEnv(enemy_board=None, ship_locs={}, grid_size=grid_size, ships=ships)\n",
"\n",
"# wrap it\n",
"#log_dir = \"./gym/\"\n",
"#os.makedirs(log_dir, exist_ok=True)\n",
"#env = Monitor(env, filename=log_dir, allow_early_resets=True)\n",
"#env = DummyVecEnv([lambda: env])\n",
"\n",
"#model.set_env(env)\n",
"\n",
"#model.learn(total_timesteps=num_timesteps, callback=callback)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Visualizing How the Agent Plays"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#model_best = A2C.load('./gym/best_model_cruiser_5x5.pkl')\n",
"#model_best = A2C.load('./gym/best_model_cruiser_6x6.pkl')\n",
"model_best = A2C.load('./gym/best_model_cruiser_7x7.pkl')\n",
"#model_best = A2C.load('./gym/best_model_cruiser_10x10.pkl')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"# brew install ffmpeg\n",
"# brew install gifsicle\n",
"# Shift + Command + 5 for recording. This saves .mov file\n",
"# right-click on mov file, get info for video size to use here below\n",
"# ffmpeg -i in.mov -s 448x790 -pix_fmt rgb24 -r 10 -f gif - | gifsicle --optimize=3 --delay=3 > out.gif\n",
"\n",
"from IPython.display import clear_output\n",
"import time\n",
"\n",
"ships = {}\n",
"ships['cruiser'] = 3\n",
"\n",
"grid_size=7\n",
"enemy_board = 0*np.ones((grid_size, grid_size), dtype='int')\n",
"#enemy_board[3,5] = 1\n",
"#enemy_board[4,5] = 1\n",
"#enemy_board[5,5] = 1\n",
"env = BattleshipEnv(enemy_board=None, ship_locs={}, grid_size=grid_size, ships=ships)\n",
"# give me time to setup recording\n",
"time.sleep(5)\n",
"for ep in range(10):\n",
" obs = env.reset()\n",
" ## 2 empty boards\n",
" done = False\n",
" nmoves = 0\n",
" print('episode no.', ep, '# moves:', nmoves)\n",
" env.render()\n",
" env.render()\n",
" time.sleep(5)\n",
" clear_output(wait=True) \n",
" while not done:\n",
" action, obs = model_best.predict(obs, deterministic=True)\n",
" obs, _, done , _ = env.step(action)\n",
" nmoves += 1\n",
" print('episode no.', ep, '# moves:', nmoves)\n",
" env.render()\n",
" board_rendering(grid_size, env.enemy_board)\n",
" time.sleep(np.random.uniform(1,3))\n",
" clear_output(wait=True) \n",
" "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Optimizing The Algorithm Parameters with Hyperopt"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"## To optimize a RL model, see https://github.com/araffin/rl-baselines-zoo/tree/master/hyperparams or\n",
"## in general https://github.com/araffin/rl-baselines-zoo. This package uses optuna optimization\n",
"## but it works for the trained agents there. You can modify this package to include your case\n",
"## or just use the yml file to see what parameters to tune\n",
"\n",
"from hyperopt import hp, fmin, tpe, STATUS_OK, Trials, space_eval\n",
"from stable_baselines.common.vec_env import DummyVecEnv\n",
"from stable_baselines import DQN, PPO2, A2C, ACKTR\n",
"from stable_baselines.bench import Monitor\n",
"\n",
"# Agent hyperparameter optimization\n",
"def objective(space):\n",
" \n",
"\n",
" env_copies = space['env_copies'] \n",
" num_timesteps = space['num_timesteps']\n",
" gamma = space['gamma']\n",
" n_steps = space['n_steps']\n",
" vf_coef = space['vf_coef']\n",
" ent_coef = space['ent_coef']\n",
" max_grad_norm = space['max_grad_norm']\n",
" learning_rate = space['learning_rate']\n",
" alpha = space['alpha']\n",
" epsilon = space['epsilon']\n",
" lr_schedule = space['lr_schedule']\n",
" \n",
" print('space:', space)\n",
" \n",
" # ships\n",
" ships = {}\n",
" ships['cruiser'] = 3\n",
"\n",
" grid_size = 7\n",
"\n",
" # Instantiate the env\n",
" env = BattleshipEnv(enemy_board=None, ship_locs={}, grid_size=grid_size, ships=ships)\n",
"\n",
" env = DummyVecEnv([lambda: env]*env_copies)\n",
" \n",
" model = A2C('MlpPolicy', env, verbose=0, \n",
" gamma=gamma,\n",
" n_steps=n_steps,\n",
" ent_coef=ent_coef,\n",
" learning_rate=learning_rate,\n",
" vf_coef=vf_coef,\n",
" max_grad_norm=max_grad_norm,\n",
" alpha=alpha,\n",
" epsilon=epsilon,\n",
" lr_schedule=lr_schedule\n",
" ).learn(total_timesteps=num_timesteps)\n",
" \n",
" rewards_mean = []\n",
" moves_mean = []\n",
" n_episodes = 100\n",
" for ep in range(n_episodes):\n",
" reward_env = []\n",
" moves_env = []\n",
" for env_i in env.envs:\n",
" obs = env_i.reset()\n",
" done = False\n",
" rewards_sum = 0\n",
" moves = 0\n",
" while not done:\n",
" action, obs = model.predict(obs, deterministic=True)\n",
" obs, reward, done , _ = env_i.step(action)\n",
" rewards_sum += reward # total reward for this episode\n",
" moves += 1\n",
" reward_env.append(rewards_sum)\n",
" moves_env.append(moves)\n",
" rewards_mean.append(np.min(reward_env)) # avg environment reward \n",
" moves_mean.append(np.mean(moves_env)) # avg environment reward \n",
" rewards_mean = np.mean(rewards_mean)\n",
" moves_mean = np.mean(moves_mean)\n",
"\n",
" print('reward', rewards_mean, 'moves', moves_mean)\n",
" \n",
" # hyperopt will minimize objective, number of moves in this case\n",
" return{'loss': moves_mean, 'status': STATUS_OK }"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"space = {\n",
" 'env_copies': hp.choice('env_copies', [10]),\n",
" 'num_timesteps': hp.choice('num_timesteps', [1000000]), #np.arange(1000000, 1000001, 1000000, dtype=int)\n",
" 'gamma': hp.choice('gamma', [0.99, 0.95, 0.9]),\n",
" 'n_steps': hp.choice('n_steps', [5, 1, 10]),\n",
" 'vf_coef': hp.choice('vf_coef', [0.25, 0.1, 0.5]),\n",
" 'ent_coef': hp.choice('ent_coef', [0.01, 0.1]), \n",
" 'learning_rate': hp.choice('learning_rate', [0.0007]),\n",
" 'max_grad_norm': hp.choice('max_grad_norm', [0.5, 0.2, 0.7]), \n",
" 'alpha': hp.choice('lam', [0.99, 0.95, 0.9]), \n",
" 'epsilon': hp.choice('epsilon', [1e-5, 1e-3, 1e-4]), \n",
" 'lr_schedule': hp.choice('lr_schedule', ['constant', 'linear'])\n",
"}\n",
"\n",
"\n",
"trials = Trials()\n",
"best = fmin(fn=objective,\n",
" space=space,\n",
" algo=tpe.suggest,\n",
" max_evals=30, \n",
" trials=trials, verbose=1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#%debug"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"param_dist = space_eval(space, best)\n",
"param_dist"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Links\n",
"\n",
"https://colab.research.google.com/github/araffin/rl-tutorial-jnrr19/blob/master/5_custom_gym_env.ipynb#scrollTo=rYzDXA9vJfz1\n",
"\n",
"https://stable-baselines.readthedocs.io/en/master/guide/examples.html\n",
"\n",
"https://gym.openai.com/envs/#classic_control"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Reward scheme\n",
"\n",
"For any action: \n",
"$$r=-1,$$ \n",
"but if an action is illegal (moving to a non-empty cell), a random action is drawn from the action space. \n",
"\n",
"This action is penalized assigning:\n",
"\n",
"$$r=-2*S.$$\n",
"\n",
"where $S$ is the grid side length.\n",
"\n",
"If an action results into a hit:\n",
"$$\n",
"r = S.\n",
"$$\n",
"If all ship cells are hit (game is completed)\n",
"$$R = S*S.$$"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Skeleton Battleship Environmnt"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"class BattleshipEnv(gym.Env):\n",
" \n",
" \"\"\"Custom Environment that follows gym interface\"\"\"\n",
" \"\"\"see https://github.com/openai/gym/blob/master/gym/core.py\"\"\"\n",
" \n",
" metadata = {'render.modes': ['human']} \n",
"\n",
" def __init__(self, enemy_board, ship_locs, grid_size, ships):\n",
" \n",
" super(BattleshipEnv, self).__init__()\n",
" \n",
" # Define action and observation space\n",
" # They must be gym.spaces objects\n",
" # In our case the action space is discrete: index of action\n",
" self.action_space = spaces.Discrete(self.grid_size * self.grid_size)\n",
" # The observation will be the state or configuration of the board\n",
" self.observation_space = spaces.Box(low=-1, high=1,shape=(self.grid_size, self.grid_size), \n",
" dtype=np.int)\n",
" \n",
" pass\n",
" \n",
" # an action will be an index of action_space either from epsilon-greedy\n",
" # or from model prediction\n",
" def step(self, action):\n",
" \n",
" \"\"\"\n",
" Rewards for action and sets next state\n",
" Also, checks if game is completed (done)\n",
" :return: next_state, reward, done, info\n",
" \"\"\"\n",
" \n",
" pass\n",
" \n",
" def reset(self):\n",
" \"\"\"\n",
" Resets the state of the environment to an initial state\n",
" :return: (np.array) state\n",
" \"\"\"\n",
" \n",
" pass\n",
" \n",
" def render(self, mode='human'):\n",
" \"\"\"\n",
" Human readable state. In this case the scoring board\n",
" \"\"\"\n",
" \n",
" pass"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
},
"toc": {
"base_numbering": 1,
"nav_menu": {},
"number_sections": true,
"sideBar": true,
"skip_h1_title": false,
"title_cell": "Table of Contents",
"title_sidebar": "Contents",
"toc_cell": true,
"toc_position": {},
"toc_section_display": true,
"toc_window_display": true
}
},
"nbformat": 4,
"nbformat_minor": 4
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment