Skip to content

Instantly share code, notes, and snippets.

@rianadon
Last active April 10, 2019 05:34
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save rianadon/1e9528a2daedf4fcd7d2e736d8ae2164 to your computer and use it in GitHub Desktop.
Save rianadon/1e9528a2daedf4fcd7d2e736d8ae2164 to your computer and use it in GitHub Desktop.
Files for "A Guide to H.264 Streaming with Regards to FRC"
"""
This is the code we use to view the camera output.
It also had code to construct a birds-eye view from the four camera streams
our robot runs. Don't worry about that part though. We don't use it.
"""
import cv2
import numpy as np
# Define a few utility functions
array = np.array
unzip = lambda x: zip(*x) # The inverse of the zip function
# Here are the numbers of the streams we wish to use
cams = [1, 2, 3, 4]
# Some constants for the bird's eye view stuff
SCALE = 4 # How much to scale down the outputted bird's eye video
matrs = [array([[-2.57738482e+00, -6.02161725e+00,1.95984254e+03], [-2.92991022e-01, -5.84669931e+00,1.69815169e+03], [-6.20140939e-04, -4.11602611e-03,1.00000000e+00]]), array([[-1.59379313e-01, -6.15997791e+00,1.29704075e+03], [-8.54180920e-01, -5.64936893e+00,1.59149091e+03], [-2.29296551e-05, -4.20450955e-03,1.00000000e+00]]), array([[ 2.38423630e+00, -7.16491735e+00,8.81147315e+02], [ 6.74209899e-01, -5.87250433e+00,6.82229552e+02], [ 3.49910802e-04, -4.86193013e-03,1.00000000e+00]]), array([[-1.23478369e-01, -4.65842846e+00,1.45208474e+03], [ 7.38559847e-01, -5.13411641e+00,1.09008691e+03], [-2.89831683e-05, -3.81333346e-03,1.00000000e+00]])]
masklines = [((184, 0), (4216, 5661)), ((2584, 0), (-1448, 4496)), ((3024, 2682), (-1008, -1026)), ((0, 2050), (4032, -432))]
seeds = [(0.5, 0), (1, 0.5), (0.5, 1), (0, 0.5)]
shape = (4032//SCALE, 3024//SCALE, 3)
doubmat = np.array([[2, 0, 0], [0, 2, 0], [0, 0, 1]])
halfmat = np.array([[1/SCALE, 0, 0], [0, 1/SCALE, 0], [0, 0, 1]])
matrs = [np.matmul(mat, doubmat) for mat in matrs]
matrs = [np.matmul(halfmat, mat) for mat in matrs]
for i, y in enumerate(masklines):
masklines[i] = tuple(tuple(y//SCALE for y in x) for x in masklines[i])
def gencap(i):
"""Return a opened stream for the ith camera."""
print('Opening stream {}'.format(i))
# If all your cameras are connected locally to your computer, use this!
# stream = cv2.VideoCapture('v4l2src device=/dev/video{} ! video/x-raw,width=320,height=240 ! videoconvert ! appsink sync=false drop=true'.format(i), cv2.CAP_GSTREAMER)
# If you're running your RTSP server locally, use this!
# stream = cv2.VideoCapture('rtspsrc location=rtsp://127.0.0.1:5800/stream{} latency=0 transport=tcp ! rtph264depay ! decodebin ! videoconvert ! appsink sync=false drop=true'.format(i), cv2.CAP_GSTREAMER)
# Stream from the Kangaroo and flip certain videos
if i == 1 or i == 3:
stream = cv2.VideoCapture('rtspsrc location=rtsp://10.10.72.12:5800/stream{} latency=0 transport=tcp ! rtph264depay ! decodebin ! tee name=t ! queue ! videoconvert ! appsink sync=false drop=true t. ! queue ! videoconvert ! autovideosink'.format(i), cv2.CAP_GSTREAMER)
else:
stream = cv2.VideoCapture('rtspsrc location=rtsp://10.10.72.12:5800/stream{} latency=0 transport=tcp ! rtph264depay ! decodebin ! tee name=t ! queue ! videoconvert ! appsink sync=false drop=true t. ! queue ! videoflip method=rotate-180 ! videoconvert ! autovideosink'.format(i), cv2.CAP_GSTREAMER)
print('Opened!')
return stream
def genmasks():
"""Return an array of masks, each used to contstrain a warped birds eye image to the area it covers."""
masks = []
for e1, e2, seed in zip(masklines, masklines[1:] + [masklines[0]], seeds):
mask = np.zeros(shape[:2], np.uint8)
cv2.line(mask, e1[0], e1[1], 255, 1)
cv2.line(mask, e2[0], e2[1], 255, 1)
sseed = int(seed[0]*(shape[1]-1)), int(seed[1]*(shape[0]-1))
fmask = np.zeros((mask.shape[0]+2, mask.shape[1]+2), np.uint8) # See documentation for why this is needed
cv2.floodFill(mask, fmask, (sseed), 255)
# The generated mask conveniently includes the white area minus the line
# So we'll just use that to avoid having to erase the line
mask = fmask[1:-1, 1:-1]
masks.append(mask)
return masks
def composite(images, masks):
"""Mask each image in the given array then combine them into one image."""
result = np.zeros(shape, np.uint8)
for img, mask in zip(images, masks):
masked = cv2.bitwise_and(img, img, mask=mask)
result = cv2.add(result, masked)
dim = 2600 // SCALE
cropped = result[:dim,:dim]
return cropped
masks = genmasks()
caps = [gencap(i) for i in cams]
for _ in range(30): # Let the camera figure out auto settings
statuses, images = unzip(c.read() for c in caps)
while True:
statuses, images = list(unzip(c.read() for c in caps))
images = list(images)
for i, (c, status) in enumerate(zip(cams, statuses)):
if not status:
if i != 3:
caps[i] = gencap(c)
images[i] = np.zeros((240, 360, 3), dtype=np.uint8)
# If you only want to display a bird's eye view if all cameras are up, uncomment this.
# if not all(statuses):
# print('Skipping frame', statuses)
# continue
# Preview the images with OpenCV.
# In practice GStreamer's display code has lower latency, so we split our pipelines into
# a to-OpenCV path and a to-display path and let GStreamer handle showing the videos.
# for i, img in enumerate(images):
# cv2.imshow('image {}'.format(i), img)
homs = list(cv2.warpPerspective(img, mat, (shape[1], shape[0])) for img, mat in zip(images, matrs))
frame = composite(homs, masks)
cv2.imshow('birdseye', frame[::-1, ::-1])
if cv2.waitKey(10) == ord('q'):
break
#!/bin/fish
# This is a little script I use for converting the videos saved by gstreamer and
# grabbed by the limelight into a format that plays better on most peoples' computers.
# It assumes you've placed all your videos into a folder called `videos` and have
# created afolder called `nice` in the same directory to place them in.
cd video
function process
ffmpeg -i $argv[1] -c:v libx264 -preset slow -crf 22 ../nice/$argv[2].mp4
end
for file in *.mp4
process $file (basename $file .mp4)
end
for file in *.mjpeg
process $file (basename $file .mjpeg)
end
#!/bin/bash
# This script starts a Gstreamer process to grab them from your limelight and save them to a `video` folder.
file=/home/team1072/video/$(date "+%Y-%m-%d;%H-%M-%S-limelight.mjpeg")
gst-launch-1.0 souphttpsrc location=http://10.10.72.11:5802 ! filesink location=$file append=true

Copyright 2019 rianadon

Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

#!/bin/sh
set -e # Exit if any of the commands below error
# Reload the uvcvideo driver with a specific quirks option
# quirks=128 enables a fix for fixing bandwidth issues with the type of cameras we use.
# Basically, it allows us to use multiple cameras on the same usb port.set -e
rmmod uvcvideo
modprobe uvcvideo quirks=128
env GST_DEBUG=2 /home/team1072/gst-rtsp-server-1.12.3/examples/test-launch -p 5800
# /etc/systemd/system/streaming.service
[Unit]
Description=Team 1072 Camera Streaming
[Service]
ExecStart=/home/team1072/startstreaming.sh
KillMode=process
Restart=on-failure
RestartPreventExitStatus=255
Type=simple
WorkingDirectory=/home/team1072/video
[Install]
WantedBy=multi-user.target
/* This code streams four cameras to our driver station.
* It also saves the stream to videos stored in `/home/team1072/video`.
* The names of these files are based off the current date.
*
* Past copyright notice:
*
* GStreamer
* Copyright (C) 2008 Wim Taymans <wim.taymans at gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <gst/gst.h>
#include <stdio.h>
#include <time.h>
#include <gst/rtsp-server/rtsp-server.h>
#define DEFAULT_RTSP_PORT "8554"
static char *port = (char *) DEFAULT_RTSP_PORT;
static GOptionEntry entries[] = {
{"port", 'p', 0, G_OPTION_ARG_STRING, &port,
"Port to listen on (default: " DEFAULT_RTSP_PORT ")", "PORT"},
{NULL}
};
int
main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory1;
GstRTSPMediaFactory *factory2;
GstRTSPMediaFactory *factory3;
GstRTSPMediaFactory *factory4;
GOptionContext *optctx;
GError *error = NULL;
time_t rawtime;
struct tm *info;
char buffer1[1024];
char buffer2[1024];
char buffer3[1024];
char buffer4[1024];
optctx = g_option_context_new ("<launch line> - Test RTSP Server, Launch\n\n"
"Example: \"( videotestsrc ! x264enc ! rtph264pay name=pay0 pt=96 )\"");
g_option_context_add_main_entries (optctx, entries, NULL);
g_option_context_add_group (optctx, gst_init_get_option_group ());
if (!g_option_context_parse (optctx, &argc, &argv, &error)) {
g_printerr ("Error parsing options: %s\n", error->message);
g_option_context_free (optctx);
g_clear_error (&error);
return -1;
}
g_option_context_free (optctx);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
g_object_set (server, "service", port, NULL);
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
time( &rawtime );
info = localtime( &rawtime );
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory1 = gst_rtsp_media_factory_new ();
strftime(buffer1,sizeof buffer1,"v4l2src device=/dev/video0 ! video/x-raw,width=320,height=240,framerate=15/1 ! videoconvert ! video/x-raw,format=I420 ! x264enc tune=zerolatency bitrate=250 threads=1 ! tee name=t ! queue ! rtph264pay config-interval=1 name=pay0 pt=96 t. ! queue ! mpegtsmux ! filesink append=true location=/home/team1072/video/%Y-%m-%d;%H-%M-%S-1.mp4", info);
gst_rtsp_media_factory_set_launch (factory1, buffer1);
gst_rtsp_media_factory_set_shared (factory1, TRUE);
gst_rtsp_mount_points_add_factory (mounts, "/stream1", factory1);
factory2 = gst_rtsp_media_factory_new ();
/* gst_rtsp_media_factory_set_launch (factory2, "videotestsrc ! video/x-raw,width=320,height=240,framerate=15/1,format=I420 ! videoconvert ! x264enc tune=zerolatency bitrate=250 threads=1 ! rtph264pay config-interval=1 name=pay0 pt=96"); */
strftime(buffer2,sizeof buffer2,"v4l2src device=/dev/video1 ! video/x-raw,width=320,height=240,framerate=15/1 ! videoconvert ! video/x-raw,format=I420 ! x264enc tune=zerolatency bitrate=250 threads=1 ! tee name=t ! queue ! rtph264pay config-interval=1 name=pay0 pt=96 t. ! queue ! mpegtsmux ! filesink append=true location=/home/team1072/video/%Y-%m-%d;%H-%M-%S-2.mp4", info);
gst_rtsp_media_factory_set_launch (factory2, buffer2);
gst_rtsp_media_factory_set_shared (factory2, TRUE);
gst_rtsp_mount_points_add_factory (mounts, "/stream2", factory2);
factory3 = gst_rtsp_media_factory_new ();
/* gst_rtsp_media_factory_set_launch (factory3, "videotestsrc ! video/x-raw,width=320,height=240,framerate=15/1,format=I420 ! videoconvert ! x264enc tune=zerolatency bitrate=250 threads=1 ! rtph264pay config-interval=1 name=pay0 pt=96"); */
strftime(buffer3,sizeof buffer3,"v4l2src device=/dev/video2 ! video/x-raw,width=320,height=240,framerate=15/1 ! videoconvert ! video/x-raw,format=I420 ! x264enc tune=zerolatency bitrate=250 threads=1 ! tee name=t ! queue ! rtph264pay config-interval=1 name=pay0 pt=96 t. ! queue ! mpegtsmux ! filesink append=true location=/home/team1072/video/%Y-%m-%d;%H-%M-%S-3.mp4", info);
gst_rtsp_media_factory_set_launch (factory3, buffer3);
gst_rtsp_media_factory_set_shared (factory3, TRUE);
gst_rtsp_mount_points_add_factory (mounts, "/stream3", factory3);
factory4 = gst_rtsp_media_factory_new ();
/* gst_rtsp_media_factory_set_launch (factory4, "videotestsrc ! video/x-raw,width=320,height=240,framerate=15/1,format=I420 ! videoconvert ! x264enc tune=zerolatency bitrate=250 threads=1 ! rtph264pay config-interval=1 name=pay0 pt=96 ! filesink"); */
strftime(buffer4,sizeof buffer4,"v4l2src device=/dev/video3 ! video/x-raw,width=320,height=240,framerate=15/1 ! videoconvert ! video/x-raw,format=I420 ! x264enc tune=zerolatency bitrate=250 threads=1 ! tee name=t ! queue ! rtph264pay config-interval=1 name=pay0 pt=96 t. ! queue ! mpegtsmux ! filesink append=true location=/home/team1072/video/%Y-%m-%d;%H-%M-%S-4.mp4", info);
gst_rtsp_media_factory_set_launch (factory4, buffer4);
gst_rtsp_media_factory_set_shared (factory4, TRUE);
gst_rtsp_mount_points_add_factory (mounts, "/stream4", factory4);
/* don't need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_print ("stream ready at rtsp://127.0.0.1:%s/stream1 and more\n", port);
g_print ("Launch with gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test2 latency=0 ! rtph264depay ! decodebin ! autovideosink");
g_main_loop_run (loop);
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment