Skip to content

Instantly share code, notes, and snippets.

@mcohen01
Created May 13, 2019 02:17
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mcohen01/87dcc0e054234a500fc0179378dbecb2 to your computer and use it in GitHub Desktop.
Save mcohen01/87dcc0e054234a500fc0179378dbecb2 to your computer and use it in GitHub Desktop.
gcloud config set compute/zone us-central1-c
gcloud compute instances create tpu-demo-vm --machine-type=n1-standard-4 --boot-disk-size=500GB --image-project=ml-images --image-family=tf-1-8 --scopes=cloud-platform
gcloud beta compute tpus create demo-tpu --range=10.240.1.0/29 --version=1.13 --network=default
gcloud compute ssh tpu-demo-vm
export TPU_NAME="demo-tpu"
touch ~/.kaggle/kaggle.json
echo '{"username":"michaelcohen","key":"bc26b9f431007579370639b577e0f623"}' > ~/.kaggle/kaggle.json
chmod 600 ~/.kaggle/kaggle.json
pip install kaggle
.local/binkaggle competitions download -c histopathologic-cancer-detection
mkdir train
mkdir test
mv train_labels.csv.zip train
mv train.zip train
mv test.zip test
sudo su -
apt-get install unzip
cd train
unzip -qq train_labels.csv.zip && rm train_labels.csv.zip
chmod +x train_labels.csv
unzip -qq train.zip && rm train.zip
cd ../test
unzip -qq test.zip && rm test.zip
pip3 install pandas keras Pillow scikit-learn tensorflow --upgrade
vim cancer.py
import json
import glob
import numpy as np
import pandas as pd
import tensorflow as tf
from PIL import Image
from sklearn.model_selection import train_test_split
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras.preprocessing.image import ImageDataGenerator
from keras.applications import VGG19
from keras.optimizers import Adam
filelist = sorted(glob.glob('train/*.tif'))
train_images = np.array([np.array(Image.open(fname)) for fname in filelist])
labels = pd.read_csv('train/train_labels.csv').sort_values(by='id')
# sanity check
assert(list(labels.id.array) == list(map(lambda x: x.replace('train/','').replace('.tif',''), filelist)))
train_labels = np.asarray(labels.label.array)
X_train, X_test, y_train, y_test = train_test_split(train_images[1:1000],
train_labels[1:1000],
test_size=0.33)
def get_model():
vgg16_net = VGG19(weights='imagenet',
include_top=False,
input_shape=(96, 96, 3))
vgg16_net.trainable = False
#vgg16_net.summary()
model = Sequential()
model.add(vgg16_net)
model.add(Flatten())
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer=Adam(lr=.00014),
metrics=['accuracy'])
return model
model = get_model()
history = model.fit(X_train, y_train, epochs=20, batch_size=64)
test_loss, test_acc = model.evaluate(X_test, y_test)
print('Loss: {}'.format(test_loss))
print('Accuracy: {}'.format(test_acc))
resolver = tf.contrib.cluster_resolver.TPUClusterResolver(tpu='demo-tpu')
from tensorflow.python.tpu.tpu_strategy_util import initialize_tpu_system
initialize_tpu_system(resolver)
#or tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.contrib.distribute.TPUStrategy(resolver)
from tensorflow.python.distribute.tpu_strategy import TPUStrategyV1 as TPUStrategy
strategy = TPUStrategy(resolver)
with strategy.scope():
history_df = pd.DataFrame(history.history)
history_df[['loss']].plot()
_ = history_df[['acc']].plot()
gcloud beta compute tpus delete demo-tpu
gcloud compute instances delete tpu-demo-vm
Delete VPC network
Go to the VPC network section in the Google Cloud Platform Console.
VPC network
Navigate to the VPC network peering section and select the VPC network that Google automatically created as part of the Cloud TPU setup. The peering entry starts with cp-to-tp-peering in the ID.
At the top of the page, click Delete.
Delete VPC route
Navigate to the Routes section and select the route that Google automatically created as part of the Cloud TPU setup. The peering entry starts with peering-route in the ID.
At the top of the page, click Delete.
nano cloud-tpu.py
import os
import tensorflow as tf
from tensorflow.contrib import tpu
from tensorflow.contrib.cluster_resolver import TPUClusterResolver
def axy_computation(a, x, y):
return a * x + y
inputs = [
3.0,
tf.ones([3, 3], tf.float32),
tf.ones([3, 3], tf.float32),
]
tpu_computation = tpu.rewrite(axy_computation, inputs)
tpu_grpc_url = TPUClusterResolver(
tpu=[os.environ['TPU_NAME']]).get_master()
with tf.Session(tpu_grpc_url) as sess:
sess.run(tpu.initialize_system())
sess.run(tf.global_variables_initializer())
output = sess.run(tpu_computation)
print(output)
sess.run(tpu.shutdown_system())
print('Done!')
python cloud-tpu.py
exit
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment