SD Card needs to be more than 4GB
Download the ISO here.
Unzip, then open balenaEtcher.
- Insert SD Before turning on Brick
- Plug in cable, and power on
- Go to All Networks > Ensure 'Connected'
- Should be visible in VSCode, or SSH-able.
- Find device in EV3Device Browser
- Press the upload button to upload your currently opened folder
- Two options:
- Right click device, open ssh, and then use
ls
andcd
to locate your file. Then runpython3 <filename.py>
.
- Add shebang
#!/usr/bin/env python3
- CRLF -> LF
- Select file from brick screen
- Host:
ev3dev
- Port:
22
- Username:
robot
- Password:
maker
from ev3dev2.motor import LargeMotor, MediumMotor, OUTPUT_A, OUTPUT_B, OUTPUT_C
# Instantiate connection
lm1 = LargeMotor(OUTPUT_A)
lm2 = LargeMotor(OUTPUT_B)
mm = MediumMotor(OUTPUT_C)
# 4 seconds, 70% speed forwards
# Blocking operation
lm1.on_for_seconds(70, 4)
print("LM1 Finished!")
# 2 seconds backwards 30%.
# Both lines execute simulataneously.
lm2.on_for_seconds(-30, 2, block=False)
# On for 40% speed.
mm.on(40)
# If using ev3sim, you should wait_for_tick here.
lm2.wait_while("running")
mm.off()
print("LM2 Finished!")
from ev3dev2.motor import MoveTank, MoveSteering
tank = MoveTank(OUTPUT_A, OUTPUT_B)
# 70% left, 30% right, 2 seconds
tank.on_for_seconds(70, 30, 2)
print("Tank Finished!")
steering = MoveSteering(OUTPUT_A, OUTPUT_B)
# Steering constant 30, speed -40 (So backwards to the left) for 2 seconds
steering.on_for_seconds(30, -40, 2)
print("Steering Finished!")
Specific Sensor Class Documentation
import time
from ev3dev2.sensor.lego import ColorSensor, UltrasonicSensor
from ev3dev2.sensor import INPUT_1, INPUT_2
# Instantiate connection
color = ColorSensor(INPUT_1)
ultrasonic = UltrasonicSensor(INPUT_2)
# Color sensor needs calibration to work optimally.
# Call this function when over a white strip
color.calibrate_white()
while True:
# ColorSensor stuff
r, g, b = color.rgb
c = color.color_name
intensity = color.reflected_light_intensity
print("Color Data: R:{}, G:{}. B:{}. Predicted: {}. RLI: {}".format(r, g, b, c, intensity))
# UltrasonicSensor stuff
dist = ultrasonic.distance_centimeters
print("Ultrasonic Data: {}cm away".format(dist))
time.sleep(0.5)
import time
from ev3dev2.sensor import Sensor, INPUT_1, INPUT_2
ir = Sensor(INPUT_1, driver_name="ht-nxt-ir-seek-v2")
ir.mode = "AC-ALL"
compass = Sensor(INPUT_2, driver_name="ht-nxt-compass")
# Do calibration so we are at 0.
compass.COMMAND = "BEGIN-CAL"
compass.COMMAND = "END-CAL"
while True:
direction = ir.value(0)
subsensor_values = [
ir.value(x)
for x in range(1, 6)
]
print("Infrared Data: D:{}, V:{}".format(direction, subsensor_values))
bearing = compass.value(0)
print("Compass Data: B:{}".format(bearing))
time.sleep(0.5)
Console, Display can be good debugging tools, but in no way vital.
Sound is pretty self explanatory. Make sure related files are also uploaded to the brick.
Leds are super simple debug tools. Some documented functionality does not work well. Best to stick to basic methods.
Usage is a bit more complicated.
import time
from ev3dev2.button import Button
btn = Button()
def on_right(pressed):
if pressed:
print("Right button pressed!")
else:
print("Right button released!")
# Subscribe the event. This alone does nothing, we need to check for events later.
btn.on_right = on_right
# Rather than subscribing, you can wait for events:
print("Waiting for enter...")
btn.wait_for_released(["enter"])
print("Enter got pressed!")
while True:
# This actually checks the events you've subscribed to.
btn.process()
# Just checking whether buttons are currently pressed.
if btn.check_buttons(["left", "right"]):
print("Left and right pressed! Exiting...")
break
# Same thing
elif btn.left:
print("Left button is being held down! (And not right button)")
print(btn.buttons_pressed, "<- This is the full list of buttons")
time.sleep(0.01)