Skip to content

Instantly share code, notes, and snippets.

💭
learning reinforcement learning

aymericdelab

💭
learning reinforcement learning
Block or report user

Report or block aymericdelab

Hide content and notifications from this user.

Learn more about blocking users

Contact Support about this user’s behavior.

Learn more about reporting abuse

Report abuse
View GitHub Profile
@aymericdelab
aymericdelab / export_and_evaluate_azure_tfestimator_model.py
Last active Oct 17, 2019
export your saved model from azure to local computer and evaluate its accuracy on the test set
View export_and_evaluate_azure_tfestimator_model.py
#imports
import json
import numpy as np
from tensorflow.contrib import predictor
## transfer the saved model from azure blob storage to your local computer
ds = ws.get_default_datastore()
ds.download(target_path='outputs',
prefix='founder-classifier/outputs/model',
@aymericdelab
aymericdelab / azure_launch_training.py
Created Oct 17, 2019
create your estimator and submit it to Azure for training
View azure_launch_training.py
from azureml.train.dnn import TensorFlow
datastore = ws.get_default_datastore()
script_params = {
'--data-folder': datastore.as_mount(),
'--batch-size': 32,
'--learning-rate': 0.001,
'--prefix': 'founder-classifier',
'--steps': 1000
View deploy_model_Sage.py
# deployment as an endpoint on a SageMaker instance
predictor = estimator.deploy(initial_instance_count=1,
instance_type='ml.p2.xlarge',
endpoint_name='founder-classifier-endpoint')
# image should be an array of dimension [-1,28,28,1] and type float64 as specified in our serving_input_fn
predictor.predict({'x': image})
# don't forget to delete your endpoint when you're finished using it
@aymericdelab
aymericdelab / launch_training.py
Created Oct 17, 2019
launch training on sagemaker
View launch_training.py
from sagemaker import get_execution_role
from sagemaker.tensorflow import TensorFlow
role=get_execution_role()
estimator = TensorFlow(entry_point='aws_entry_point.py',
role=role,
training_steps=1000,
train_instance_count=1,
train_instance_type='ml.p2.xlarge',
View upload_data_to_S3_bucket.py
import sagemaker
bucket = sagemaker.Session().default_bucket()
prefix = 'founder-classifier/data'
train_response = sagemaker.Session().upload_data(path='data/train.json',
bucket=bucket,
key_prefix=prefix)
@aymericdelab
aymericdelab / split_and_store_images_in_json.py
Created Oct 17, 2019
split train and test set and store into json
View split_and_store_images_in_json.py
from sklearn.model_selection import train_test_split
import cv2
import json
import numpy as np
def images_to_json(prefix):
founders=[
{'name': 'Bill Gates',
View face_images_preprocessing.py
import cv2
import os
dim1,dim2 = 28,28
founders=['Jeff Bezos','Larry Page','Bill Gates']
for founder in founders:
#prefix=r'.\data\google\{}'.format(founder)
View download_bing_images.py
from azure.cognitiveservices.search.imagesearch import ImageSearchAPI
from msrest.authentication import CognitiveServicesCredentials
from PIL import Image
import requests
from io import BytesIO
import os
# create an Azure account
# and get your API key from here:
# https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/
View download_google_images.py
from google_images_download import google_images_download
downloader=google_images_download.googleimagesdownload()
## if we want to retrieve more than 100 images from google drive we need to download chromedriver
cd_path='C:\Program Files\chromedriver_win32\chromedriver.exe'
output_dir=r'./data/google/'
founders=['Jeff Bezos','Larry Page','Bill Gates']
@aymericdelab
aymericdelab / from_image_to_json.py
Created Oct 16, 2019
transfrom image into a json file to send to GCP deployed model
View from_image_to_json.py
import cv2
import json
import numpy as np
def from_image_to_json(image_directory):
image=cv2.imread(image_directory)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faceCascade = cv2.CascadeClassifier(cv2.data.haarcascades + "haarcascade_frontalface_default.xml")
faces = faceCascade.detectMultiScale(
You can’t perform that action at this time.