Last active
August 10, 2018 10:37
-
-
Save yoelk/5ff12e10f0ab2f0e5665e74f86e5f4a5 to your computer and use it in GitHub Desktop.
Test code for showing that it's not possible to keep 30 FPS over time
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
tcambin serial="20810358" name=source ! video/x-raw,format=GRAY8,width=1920,height=1080,framerate=30/1 ! videoconvert ! appsink name=sink | |
Press Esc to stop | |
No image reveived | |
--- FPS = 14.021855258540066 | |
--- FPS = 14.048198373782677 | |
--- FPS = 14.053658882220258 | |
--- FPS = 14.06174532870176 | |
--- FPS = 14.06463707090887 | |
--- FPS = 14.064004083831776 | |
--- FPS = 14.063271080079687 | |
--- FPS = 14.063111375092966 | |
--- FPS = 14.06380848443846 | |
--- FPS = 14.06594175484966 | |
--- FPS = 14.066781612567786 | |
--- FPS = 14.065088965411785 | |
--- FPS = 14.065515996072874 | |
No image reveived | |
No image reveived | |
No image reveived | |
late frame: 272.7081775665283 ms | |
No image reveived | |
No image reveived | |
late frame: 205.8720588684082 ms | |
No image reveived | |
No image reveived | |
late frame: 205.57355880737305 ms | |
No image reveived | |
No image reveived | |
No image reveived | |
late frame: 272.97377586364746 ms | |
No image reveived | |
No image reveived | |
late frame: 204.75530624389648 ms | |
No image reveived | |
No image reveived | |
No image reveived | |
late frame: 273.043155670166 ms | |
No image reveived | |
No image reveived | |
late frame: 206.34746551513672 ms | |
No image reveived | |
No image reveived | |
late frame: 205.75237274169922 ms | |
No image reveived | |
No image reveived | |
No image reveived | |
late frame: 273.3330726623535 ms | |
No image reveived | |
No image reveived | |
late frame: 205.6136131286621 ms | |
No image reveived | |
No image reveived | |
No image reveived | |
late frame: 273.3936309814453 ms | |
No image reveived | |
No image reveived | |
late frame: 205.76763153076172 ms | |
No image reveived | |
No image reveived | |
late frame: 205.2288055419922 ms | |
No image reveived | |
No image reveived | |
No image reveived | |
late frame: 273.12517166137695 ms | |
No image reveived | |
No image reveived | |
late frame: 205.83701133728027 ms | |
No image reveived | |
No image reveived |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import cv2 | |
import numpy | |
import gi | |
from collections import namedtuple | |
from time import time | |
gi.require_version("Gst", "1.0") | |
gi.require_version("Tcam", "0.1") | |
from gi.repository import Tcam, Gst, GLib, GObject | |
CameraProperty = namedtuple("CameraProperty", | |
"status value min max default step type flags category group") | |
class TIS: | |
'The Imaging Source Camera' | |
def __init__(self, serial, width, height, numerator, denumerator, color): | |
Gst.init([]) | |
self.height = height | |
self.width = width | |
self.sample = None | |
self.samplelocked = False | |
self.newsample = False | |
format = "BGRx" | |
if (color == False): | |
format = "GRAY8" | |
p = 'tcambin serial="%s" name=source ! video/x-raw,format=%s,width=%d,height=%d,framerate=%d/%d' % ( | |
serial, format, width, height, numerator, denumerator,) | |
p += ' ! videoconvert ! appsink name=sink' | |
print(p) | |
try: | |
self.pipeline = Gst.parse_launch(p) | |
except GLib.Error as error: | |
print("Error creating pipeline: {0}".format(err)) | |
raise | |
self.pipeline.set_state(Gst.State.READY) | |
self.pipeline.get_state(Gst.CLOCK_TIME_NONE) | |
# Query a pointer to our source, so we can set properties. | |
self.source = self.pipeline.get_by_name("source") | |
# Query a pointer to the appsink, so we can assign the callback function. | |
self.appsink = self.pipeline.get_by_name("sink") | |
self.appsink.set_property("max-buffers", 5) | |
self.appsink.set_property("drop", 1) | |
self.appsink.set_property("emit-signals", 1) | |
self.appsink.connect('new-sample', self.on_new_buffer) | |
def on_new_buffer(self, appsink): | |
self.newsample = True | |
if (self.samplelocked == False): | |
try: | |
self.sample = appsink.get_property('last-sample') | |
except GLib.Error as error: | |
print("Error on_new_buffer pipeline: {0}".format(err)) | |
raise | |
return False | |
def Start_pipeline(self): | |
try: | |
self.pipeline.set_state(Gst.State.PLAYING) | |
self.pipeline.get_state(Gst.CLOCK_TIME_NONE) | |
except GLib.Error as error: | |
print("Error starting pipeline: {0}".format(err)) | |
raise | |
def Get_image(self): | |
# Sample code from https://gist.github.com/cbenhagen/76b24573fa63e7492fb6#file-gst-appsink-opencv-py-L34 | |
if (self.sample != None and self.newsample == True): | |
self.samplelocked = True | |
buf = self.sample.get_buffer() | |
caps = self.sample.get_caps() | |
bpp = 4; | |
if (caps.get_structure(0).get_value('format') == "BGRx"): | |
bpp = 4; | |
if (caps.get_structure(0).get_value('format') == "GRAY8"): | |
bpp = 1; | |
self.img_mat = numpy.ndarray( | |
(caps.get_structure(0).get_value('height'), | |
caps.get_structure(0).get_value('width'), | |
bpp), | |
buffer=buf.extract_dup(0, buf.get_size()), | |
dtype=numpy.uint8) | |
self.newsample = False | |
self.samplelocked = False | |
return self.img_mat | |
return None | |
def Stop_pipeline(self): | |
self.pipeline.set_state(Gst.State.PAUSED) | |
self.pipeline.set_state(Gst.State.READY) | |
self.pipeline.set_state(Gst.State.NULL) | |
def List_Properties(self): | |
for name in self.source.get_tcam_property_names(): | |
print(name) | |
def Get_Property(self, PropertyName): | |
try: | |
return CameraProperty(*self.source.get_tcam_property(PropertyName)) | |
except GLib.Error as error: | |
print("Error get Property {0}: {1}", PropertyName, format(error)) | |
raise | |
def Set_Property(self, PropertyName, value): | |
try: | |
self.source.set_tcam_property(PropertyName, GObject.Value(type(value), value)) | |
except GLib.Error as error: | |
print("Error set Property {0}: {1}", PropertyName, format(error)) | |
raise | |
Camera = TIS("20810358", 1920, 1080, 30, 1, False) | |
cv2.namedWindow('Camera', 1) | |
# Start the pipeline | |
Camera.Start_pipeline() | |
cv2.waitKey(1000) | |
error = 0 | |
print('Press Esc to stop') | |
lastkey = 0 | |
KEYS = {'esc': 27, '1': 49, '2': 50, '3': 51} | |
try: | |
last_frame_time = None | |
frame_threshold_time = 0.2 # seconds | |
inter_frame_time_average = 0 | |
frames = 0 | |
inter_frame_time_average_print_interval = 100 # measured in frames | |
while (lastkey != KEYS['esc']): | |
# Grab an image | |
image = Camera.Get_image() | |
if image is not None: | |
error = 0 | |
# cv2.imwrite('img.jpg' , image) | |
# show the image | |
cv2.imshow('Camera', image) | |
# Code for timing frames | |
new_time = time() | |
if last_frame_time is not None: | |
# Look for lags | |
inter_frame_time = new_time - last_frame_time | |
if inter_frame_time > frame_threshold_time: | |
print(f"late frame: {(inter_frame_time) * 1000} ms") | |
# Record inter-frame time | |
frames += 1 | |
inter_frame_time_average = (inter_frame_time_average * (frames - 1) | |
+ inter_frame_time) / frames | |
if frames % inter_frame_time_average_print_interval == 0: | |
print(f'--- FPS = {1 / inter_frame_time_average}') | |
last_frame_time = new_time | |
else: | |
print("No image reveived ") | |
error = error + 1 | |
# wait. The wait period should be somewhat longer than 1000 / fps. | |
lastkey = cv2.waitKey(67) | |
if lastkey == KEYS['1']: | |
print(f'auto=True') | |
Camera.Set_Property("Exposure Auto", True) | |
Camera.Set_Property("Gain Auto", True) | |
Camera.Set_Property("Focus Auto", True) | |
elif lastkey == KEYS['2']: | |
print(f'auto=False') | |
Camera.Set_Property("Exposure Auto", False) | |
Camera.Set_Property("Gain Auto", False) | |
Camera.Set_Property("Focus Auto", False) | |
elif lastkey == KEYS['3']: | |
print(f'focus: {Camera.Get_Property("Focus Auto")}') | |
print(f'expos: {Camera.Get_Property("Exposure Auto")}') | |
print(f' gain: {Camera.Get_Property("Gain Auto")}') | |
except KeyboardInterrupt: | |
cv2.destroyWindow('Window') | |
# Stop the pipeline and clean ip | |
Camera.Stop_pipeline() | |
print('Program ended') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
This is not how video capture should work. The "last-sample" property of a sink element gives you exactly that: The last sample seen by the element. There is no queue of "last-sample" buffers in the element so you will just miss images.
The "opencv.py" example shows how to use the appsink element correctly: https://github.com/TheImagingSource/tiscamera/blob/master/examples/python/opencv.py
I modified this example to print the achieved frame rate every 30 frames here:
https://gist.github.com/TIS-Arne/76452cb32b3284984282da596ce3602a