Skip to content

Instantly share code, notes, and snippets.

View hristian-carabulea's full-sized avatar
💭
Looking for new opportunities

Hristian Carabulea hristian-carabulea

💭
Looking for new opportunities
View GitHub Profile
import time
import numpy as np
from collections import deque
from RoadEnv import RoadEnv
from DQNAgent import DQNAgent
# Initialize environment
env = RoadEnv()
# size of input image
import random
import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Activation, Flatten
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.models import model_from_yaml
from tensorflow.keras.models import load_model
from collections import deque
import gym
from gym import spaces
import numpy as np
from gym import utils
from random import randint
class Obstacle:
def __init__(self):
#
# .... body of model_fn
#
optimizer = tf.train.AdamOptimizer()
if FLAGS.use_tpu:
optimizer = tf.contrib.tpu.CrossShardOptimizer(optimizer)
train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
# This address identifies the TPU we'll use when configuring TensorFlow.
TPU_WORKER = 'grpc://' + os.environ['COLAB_TPU_ADDR']
tf.logging.set_verbosity(tf.logging.INFO)
resnet_model = tf.contrib.tpu.keras_to_tpu_model(
resnet_model,
strategy=tf.contrib.tpu.TPUDistributionStrategy(
tf.contrib.cluster_resolver.TPUClusterResolver(TPU_WORKER)))
import os
import pprint
import tensorflow as tf
if 'COLAB_TPU_ADDR' not in os.environ:
print('ERROR: Not connected to a TPU runtime; please see the first cell in this notebook for instructions!')
else:
tpu_address = 'grpc://' + os.environ['COLAB_TPU_ADDR']
print ('TPU address is', tpu_address)
#!/usr/bin/env python3
# coding=utf-8
import collections
import sys
import itertools
import matplotlib.pyplot as plt
import matplotlib as mpl
import time
import datetime