Skip to content

Instantly share code, notes, and snippets.

View araffin's full-sized avatar

Antonin RAFFIN araffin

View GitHub Profile
import os
import gym
import numpy as np
import matplotlib.pyplot as plt
from stable_baselines.common.vec_env.dummy_vec_env import DummyVecEnv
from stable_baselines.bench import Monitor
from stable_baselines.results_plotter import load_results, ts2xy
from stable_baselines import DDPG
@araffin
araffin / one_line_rl.py
Last active September 18, 2018 09:29
Train a RL agent in one line of code!
from stable_baselines import PPO2
# Define and train a model in one line of code !
trained_model = PPO2('MlpPolicy', 'CartPole-v1').learn(total_timesteps=10000)
# you can then access the gym env using trained_model.get_env()
from stable_baselines.common.policies import FeedForwardPolicy
from stable_baselines import A2C
# Custom MLP policy of three layers of size 128 each
class CustomPolicy(FeedForwardPolicy):
def __init__(self, *args, **kwargs):
super(CustomPolicy, self).__init__(*args, **kwargs,
layers=[128, 128, 128],
feature_extraction="mlp")
import gym
import numpy as np
from stable_baselines.common.policies import MlpPolicy
from stable_baselines.common.vec_env import SubprocVecEnv
from stable_baselines.common import set_global_seeds
from stable_baselines import ACKTR
def make_env(env_id, rank, seed=0):
"""
@araffin
araffin / a2c_lunar.py
Last active October 16, 2022 13:53
Training, Saving and Loading an A2C agent
import gym
from stable_baselines import A2C
from stable_baselines.common.policies import MlpPolicy
from stable_baselines.common.vec_env import DummyVecEnv
# Create and wrap the environment
env = gym.make('LunarLander-v2')
env = DummyVecEnv([lambda: env])
@araffin
araffin / demo_baselines.py
Last active April 10, 2020 19:13
Getting Started With Stable Baselines
# from https://github.com/hill-a/stable-baselines
import gym
from stable_baselines.common.policies import MlpPolicy
from stable_baselines import PPO2
env = gym.make('CartPole-v1')
model = PPO2(MlpPolicy, env, verbose=1)
# Train the agent
@araffin
araffin / classic_serial.cpp
Created April 2, 2018 19:24
Example of common approach to use Arduino Serial
// If we have received data
if (Serial.available() > 0)
{
// Read the order sent by the computer
order_received = Serial.read();
// If the received byte is the character 'a'
if (order_received == 'a')
// This correspond to the action GO_FORWARD
// It will be used later to send speed order to the motor
action = GO_FORWARD;
@araffin
araffin / example_serial.rs
Last active April 8, 2018 12:18
Example Use of Robust Serial in Rust
// Extracted from https://github.com/araffin/rust-arduino-serial
extern crate robust_arduino_serial;
use robust_arduino_serial::*;
// Open Serial Port
let mut port = serial::open(&serial_port).unwrap();
// Please see the original file to have a complete example
...
// Send the order "MOTOR", i.e. to change the speed of the car
// equivalent to write_i8(&mut port, Order::MOTOR as i8)
@araffin
araffin / example_serial.py
Last active March 3, 2023 16:12
Example Use of Robust Serial in Python
# From https://github.com/araffin/python-arduino-serial
from robust_serial import Order, write_order, write_i8, write_i16
from robust_serial.utils import open_serial_port
# Open serial port with a baudrate of 9600 (bits/s)
serial_file = open_serial_port(baudrate=9600)
# Send the order "MOTOR", i.e. to change the speed of the car
# equivalent to write_i8(serial_file, Order.MOTOR.value)
write_order(serial_file, Order.MOTOR)
@araffin
araffin / example_serial.cpp
Last active April 2, 2018 19:15
Example Use of Robust Serial in C++
// Extracted from https://github.com/araffin/cpp-arduino-serial/
#include <robust_serial.hpp>
// Open Serial Port
serial_file.open(serial_filename);
// Send the order "MOTOR", i.e. to change the speed of the car
write_order(serial_file, MOTOR);
// with parameter speed=56 (going forward at 56% of the maximum speed)
// The parameter "speed" is encoded as a 8 bits (1 bytes) signed int