Skip to content

Instantly share code, notes, and snippets.

View jeanmidevacc's full-sized avatar

Jean-Michel Daignan jeanmidevacc

View GitHub Profile
@jeanmidevacc
jeanmidevacc / offers_collecter.py
Last active November 4, 2019 00:27
Script to collect offers from a Turo research for a specific city
# Load the dependencies
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
from bs4 import BeautifulSoup as bs
from time import sleep
# Define the main url (where to log the location)
url_main_page = "https://turo.com/en-us?locale=en_US"
@jeanmidevacc
jeanmidevacc / offer_scraper.py
Last active November 3, 2019 23:27
Script to collect the data of an offer on Turo (details on the car + picture)
# Load the dependencies
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
from bs4 import BeautifulSoup as bs
from time import sleep
import requests
# Url to scrap
url_toscrap = "https://turo.com/ca/en-us/car-rental/montreal-qc/ford/mustang/702436?searchId=OD83L624"
import mlflow.sagemaker as mfs
# Define mlflow parameter
experimentid = 1
runid = "xxxxxxx"
# AWS setup
awsid = "xxxxxx"# id of the AWS user that will deploy the system
region "xxxxx" # AWS region to deploy the API
arn = f"arn:aws:iam::{awsid}:role/xxxxx" # Arn of the role that will be used to do the deployment on sagemaker
import boto3
import json
# Name of the app that you defined during the deployment on sagemaker
app_name = "xxxxx"
# AWS region of the deployment of the app on sagemaker
region = "xxxxx"
# Function to collect data from the endpoint on sagemaker
def query_endpoint(input_json):
from sklearn.neighbors import KNeighborsRegressor
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score, explained_variance_score
import mlflow
import mlflow.sklearn
import numpy as np
# Launch the experiment on mlflow
experiment_name = "electricityconsumption-forecast"
from PIL import Image
def build_mlimage(path, config_resize = (100,50), is_bw = True):
# Access the image
img = Image.open(path)
# Resizing and conversion in black and white (if necessary)
if is_bw:
newimg = img.resize(config_resize, Image.ANTIALIAS).convert('L')
else:
@jeanmidevacc
jeanmidevacc / decoratorexample_flow.py
Created January 22, 2020 23:27
A Flow design to explained the potential of metaflow decorator.
"""
pipeline.py
Script to test the different decorator on the metaflow framework
"""
import random
from metaflow import FlowSpec, step, Parameter, conda, conda_base
@conda_base(disabled = False ,python="3.7.4", libraries={"pandas" : "0.25.2"})
class ExampleFlow(FlowSpec):
informations = []
for i,run in enumerate(runs):
if run.successful:
# collect some details on the fisrt and last step of the flow
step_start = Step(f"{flowname}/{run.id}/start")
step_end = run.end_task
# Collect the number of cards picked for the features computation
nbr_cardsselected = step_start.task.data.limittopcards
"""
Based on https://www.tensorflow.org/tutorials/images/cnn
"""
import pathlib
import tensorflow as tf
# Definition of the constant
BATCH_SIZE = 30
EPOCHS = 5
import tensorflow as tf
model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(16, 3, padding='same', activation='relu', input_shape=(IMG_HEIGHT, IMG_WIDTH ,3)),
tf.keras.layers.MaxPooling2D(),
tf.keras.layers.Conv2D(32, 3, padding='same', activation='relu'),
tf.keras.layers.MaxPooling2D(),
tf.keras.layers.Conv2D(64, 3, padding='same', activation='relu'),
tf.keras.layers.MaxPooling2D(),
tf.keras.layers.Flatten(),