Skip to content

Instantly share code, notes, and snippets.

@techyian
Created July 25, 2020 18:36
Show Gist options
  • Save techyian/f0c815b1cb2789a2c8d1ec0f5d39fdbc to your computer and use it in GitHub Desktop.
Save techyian/f0c815b1cb2789a2c8d1ec0f5d39fdbc to your computer and use it in GitHub Desktop.
Proposed changes to Motion API.
// <copyright file="CircularBufferCaptureHandler.cs" company="Techyian">
// Copyright (c) Ian Auty. All rights reserved.
// Licensed under the MIT License. Please see LICENSE.txt for License info.
// </copyright>
using System;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using MMALSharp.Common;
using MMALSharp.Common.Utility;
using MMALSharp.Processors;
using MMALSharp.Processors.Motion;
namespace MMALSharp.Handlers
{
/// <summary>
/// Represents a capture handler working as a circular buffer.
/// </summary>
public sealed class CircularBufferCaptureHandler : VideoStreamCaptureHandler, IMotionCaptureHandler
{
private bool _recordToFileStream;
private int _bufferSize;
private bool _shouldDetectMotion;
private bool _receivedIFrame;
private Stopwatch _recordingElapsed;
private IFrameAnalyser _analyser;
private TaskCompletionSource<bool> _trigger;
private TimeSpan _recordDuration;
/// <summary>
/// The circular buffer object responsible for storing image data.
/// </summary>
public CircularBuffer<byte> Buffer { get; private set; }
/// <summary>
/// Creates a new instance of the <see cref="CircularBufferCaptureHandler"/> class with the specified Circular buffer capacity without file output.
/// </summary>
/// <param name="bufferSize">The buffer's size.</param>
public CircularBufferCaptureHandler(int bufferSize)
: base()
{
_bufferSize = bufferSize;
this.Buffer = new CircularBuffer<byte>(bufferSize);
_trigger = new TaskCompletionSource<bool>();
}
/// <summary>
/// Creates a new instance of the <see cref="CircularBufferCaptureHandler"/> class with the specified Circular buffer capacity and directory/extension of the working file.
/// </summary>
/// <param name="bufferSize">The buffer's size.</param>
/// <param name="directory">The directory to save captured videos.</param>
/// <param name="extension">The filename extension for saving files.</param>
public CircularBufferCaptureHandler(int bufferSize, string directory, string extension)
: base(directory, extension)
{
_bufferSize = bufferSize;
this.Buffer = new CircularBuffer<byte>(bufferSize);
_trigger = new TaskCompletionSource<bool>();
}
/// <summary>
/// Creates a new instance of the <see cref="CircularBufferCaptureHandler"/> class with the specified Circular buffer capacity and working file path.
/// </summary>
/// <param name="bufferSize">The buffer's size.</param>
/// <param name="fullPath">The absolute full path to save captured data to.</param>
public CircularBufferCaptureHandler(int bufferSize, string fullPath)
: base(fullPath)
{
_bufferSize = bufferSize;
this.Buffer = new CircularBuffer<byte>(bufferSize);
_trigger = new TaskCompletionSource<bool>();
}
/// <inheritdoc />
public override void Process(ImageContext context)
{
if (!_recordToFileStream)
{
for (var i = 0; i < context.Data.Length; i++)
{
this.Buffer.PushBack(context.Data[i]);
}
}
else
{
if (context.Encoding == MMALEncoding.H264)
{
if (context.IFrame)
{
_receivedIFrame = true;
}
if (_receivedIFrame && this.Buffer.Size > 0)
{
// The buffer contains data.
MMALLog.Logger.LogInformation($"Buffer contains data. Writing {this.Buffer.Size} bytes.");
this.CurrentStream.Write(this.Buffer.ToArray(), 0, this.Buffer.Size);
this.Processed += this.Buffer.Size;
this.Buffer = new CircularBuffer<byte>(this.Buffer.Capacity);
}
if (_receivedIFrame)
{
// We need to have received an IFrame for the recording to be valid.
this.CurrentStream.Write(context.Data, 0, context.Data.Length);
this.Processed += context.Data.Length;
}
}
else
{
if (this.Buffer.Size > 0)
{
// The buffer contains data.
this.CurrentStream.Write(this.Buffer.ToArray(), 0, this.Buffer.Size);
this.Processed += this.Buffer.Size;
this.Buffer = new CircularBuffer<byte>(this.Buffer.Capacity);
}
this.CurrentStream.Write(context.Data, 0, context.Data.Length);
this.Processed += context.Data.Length;
}
}
if (_shouldDetectMotion && !_recordToFileStream)
{
_analyser?.Apply(context);
}
this.CheckRecordingProgress();
// Not calling base method to stop data being written to the stream when not recording.
this.ImageContext = context;
}
/// <summary>
/// Call to enable motion detection.
/// </summary>
/// <param name="config">The motion configuration.</param>
/// <param name="onDetect">A callback for when motion is detected.</param>
public void ConfigureMotionDetection(MotionConfig config, Func<Task> onDetect)
{
switch (this.MotionType)
{
case MotionType.FrameDiff:
_analyser = new FrameDiffAnalyser(config, onDetect);
break;
case MotionType.MotionVector:
// TODO Motion vector analyser
break;
}
this.EnableMotionDetection();
}
/// <summary>
/// Enables motion detection. When configured, this will instruct the capture handler to detect motion.
/// </summary>
public void EnableMotionDetection()
{
_shouldDetectMotion = true;
MMALLog.Logger.LogInformation("Enabling motion detection.");
}
/// <summary>
/// Disables motion detection. When configured, this will instruct the capture handler not to detect motion.
/// </summary>
public void DisableMotionDetection()
{
_shouldDetectMotion = false;
(_analyser as FrameDiffAnalyser)?.ResetAnalyser();
MMALLog.Logger.LogInformation("Disabling motion detection.");
}
/// <summary>
/// Call to start recording to FileStream.
/// </summary>
public async Task StartRecording(TimeSpan recordDuration, CancellationToken cancellationToken)
{
if (this.CurrentStream == null)
{
throw new InvalidOperationException($"Recording unavailable, {nameof(CircularBufferCaptureHandler)} was not created with output-file arguments.");
}
_recordToFileStream = true;
_recordDuration = recordDuration;
_recordingElapsed = new Stopwatch();
_recordingElapsed.Start();
await Task.WhenAny(_trigger.Task, cancellationToken.AsTask()).ConfigureAwait(false);
}
/// <summary>
/// Call to stop recording to FileStream.
/// </summary>
public void StopRecording()
{
if (this.CurrentStream == null)
{
throw new InvalidOperationException($"Recording unavailable, {nameof(CircularBufferCaptureHandler)} was not created with output-file arguments.");
}
MMALLog.Logger.LogInformation("Stop recording.");
_recordToFileStream = false;
_receivedIFrame = false;
_recordingElapsed?.Stop();
_recordingElapsed = null;
_trigger = new TaskCompletionSource<bool>();
}
/// <inheritdoc />
public override void Dispose()
{
this.CurrentStream?.Dispose();
}
/// <inheritdoc />
public override string TotalProcessed()
{
return $"{this.Processed}";
}
private void CheckRecordingProgress()
{
if (_recordingElapsed != null && _recordingElapsed.Elapsed >= _recordDuration)
{
MMALLog.Logger.LogInformation("Duration elapsed. Setting trigger.");
Task.Run(() => _trigger.SetResult(true));
this.StopRecording();
}
}
}
}
// <copyright file="FrameDiffAnalyser.cs" company="Techyian">
// Copyright (c) Ian Auty. All rights reserved.
// Licensed under the MIT License. Please see LICENSE.txt for License info.
// </copyright>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using MMALSharp.Common;
using MMALSharp.Common.Utility;
namespace MMALSharp.Processors.Motion
{
/// <summary>
/// The <see cref="FrameDiffAnalyser"/> is used to detect changes between two image frames.
/// </summary>
public class FrameDiffAnalyser : FrameAnalyser
{
private Stopwatch _testFrameAge;
internal Func<Task> OnDetect { get; set; }
/// <summary>
/// Working storage for the Test Frame. This is the image we are comparing against new incoming frames.
/// </summary>
protected List<byte> TestFrame { get; set; }
/// <summary>
/// Indicates whether we have a full test frame.
/// </summary>
protected bool FullTestFrame { get; set; }
/// <summary>
/// The motion configuration object.
/// </summary>
protected MotionConfig MotionConfig { get; set; }
/// <summary>
/// The image metadata.
/// </summary>
protected ImageContext ImageContext { get; set; }
/// <summary>
/// Creates a new instance of <see cref="FrameDiffAnalyser"/>.
/// </summary>
/// <param name="config">The motion configuration object.</param>
/// <param name="onDetect">A callback when changes are detected.</param>
public FrameDiffAnalyser(MotionConfig config, Func<Task> onDetect)
{
this.TestFrame = new List<byte>();
this.MotionConfig = config;
this.OnDetect = onDetect;
_testFrameAge = new Stopwatch();
}
/// <inheritdoc />
public override void Apply(ImageContext context)
{
this.ImageContext = context;
if (this.FullTestFrame)
{
MMALLog.Logger.LogDebug("Have full test frame.");
// If we have a full test frame stored then we can start storing subsequent frame data to check.
base.Apply(context);
}
else
{
this.TestFrame.AddRange(context.Data);
if (context.Eos)
{
this.FullTestFrame = true;
if(this.MotionConfig.TestFrameInterval != TimeSpan.Zero)
{
_testFrameAge.Restart();
}
MMALLog.Logger.LogDebug("EOS reached for test frame.");
}
}
if (this.FullFrame && !TestFrameExpired())
{
MMALLog.Logger.LogDebug("Have full frame, checking for changes.");
this.CheckForChanges(this.OnDetect);
}
}
/// <summary>
/// Resets the test and working frames this analyser is using.
/// </summary>
public void ResetAnalyser()
{
this.TestFrame = new List<byte>();
this.WorkingData = new List<byte>();
this.FullFrame = false;
this.FullTestFrame = false;
_testFrameAge.Reset();
}
private bool TestFrameExpired()
{
if(this.MotionConfig.TestFrameInterval == TimeSpan.Zero || _testFrameAge.Elapsed < this.MotionConfig.TestFrameInterval)
{
return false;
}
MMALLog.Logger.LogDebug("Have full frame, updating test frame.");
this.TestFrame = this.WorkingData;
this.WorkingData = new List<byte>();
_testFrameAge.Restart();
return true;
}
private void CheckForChanges(Func<Task> onDetect)
{
this.PrepareDifferenceImage(this.ImageContext, this.MotionConfig.Threshold);
var diff = this.Analyse();
if (diff >= this.MotionConfig.Threshold)
{
MMALLog.Logger.LogInformation($"Motion detected! Frame difference {diff}.");
onDetect();
}
}
private Bitmap LoadBitmap(MemoryStream stream)
{
if (this.ImageContext.Raw)
{
PixelFormat format = default;
// RGB16 doesn't appear to be supported by GDI?
if (this.ImageContext.PixelFormat == MMALEncoding.RGB24)
{
format = PixelFormat.Format24bppRgb;
}
if (this.ImageContext.PixelFormat == MMALEncoding.RGB32)
{
format = PixelFormat.Format32bppRgb;
}
if (this.ImageContext.PixelFormat == MMALEncoding.RGBA)
{
format = PixelFormat.Format32bppArgb;
}
if (format == default)
{
throw new Exception("Unsupported pixel format for Bitmap");
}
return new Bitmap(this.ImageContext.Resolution.Width, this.ImageContext.Resolution.Height, format);
}
return new Bitmap(stream);
}
private void InitBitmapData(BitmapData bmpData, byte[] data)
{
var pNative = bmpData.Scan0;
Marshal.Copy(data, 0, pNative, data.Length);
}
private int Analyse()
{
using (var testMemStream = new MemoryStream(this.TestFrame.ToArray()))
using (var currentMemStream = new MemoryStream(this.WorkingData.ToArray()))
using (var testBmp = this.LoadBitmap(testMemStream))
using (var currentBmp = this.LoadBitmap(currentMemStream))
{
var testBmpData = testBmp.LockBits(new Rectangle(0, 0, testBmp.Width, testBmp.Height), System.Drawing.Imaging.ImageLockMode.ReadWrite, testBmp.PixelFormat);
var currentBmpData = currentBmp.LockBits(new Rectangle(0, 0, currentBmp.Width, currentBmp.Height), System.Drawing.Imaging.ImageLockMode.ReadWrite, currentBmp.PixelFormat);
if (this.ImageContext.Raw)
{
this.InitBitmapData(testBmpData, this.TestFrame.ToArray());
this.InitBitmapData(currentBmpData, this.WorkingData.ToArray());
}
var quadA = new Rectangle(0, 0, testBmpData.Width / 2, testBmpData.Height / 2);
var quadB = new Rectangle(testBmpData.Width / 2, 0, testBmpData.Width / 2, testBmpData.Height / 2);
var quadC = new Rectangle(0, testBmpData.Height / 2, testBmpData.Width / 2, testBmpData.Height / 2);
var quadD = new Rectangle(testBmpData.Width / 2, testBmpData.Height / 2, testBmpData.Width / 2, testBmpData.Height / 2);
int diff = 0;
var bpp = Image.GetPixelFormatSize(testBmp.PixelFormat) / 8;
var t1 = Task.Run(() =>
{
diff += this.CheckDiff(quadA, testBmpData, currentBmpData, bpp, this.MotionConfig.Threshold);
});
var t2 = Task.Run(() =>
{
diff += this.CheckDiff(quadB, testBmpData, currentBmpData, bpp, this.MotionConfig.Threshold);
});
var t3 = Task.Run(() =>
{
diff += this.CheckDiff(quadC, testBmpData, currentBmpData, bpp, this.MotionConfig.Threshold);
});
var t4 = Task.Run(() =>
{
diff += this.CheckDiff(quadD, testBmpData, currentBmpData, bpp, this.MotionConfig.Threshold);
});
Task.WaitAll(t1, t2, t3, t4);
testBmp.UnlockBits(testBmpData);
currentBmp.UnlockBits(currentBmpData);
return diff;
}
}
private void PrepareDifferenceImage(ImageContext context, int threshold)
{
BitmapData bmpData = null;
IntPtr pNative = IntPtr.Zero;
int bytes;
byte[] store = null;
using (var ms = new MemoryStream(context.Data))
using (var bmp = this.LoadBitmap(ms))
{
bmpData = bmp.LockBits(new Rectangle(0, 0,
bmp.Width,
bmp.Height),
ImageLockMode.ReadWrite,
bmp.PixelFormat);
if (context.Raw)
{
this.InitBitmapData(bmpData, ms.ToArray());
}
pNative = bmpData.Scan0;
// Split image into 4 quadrants and process individually.
var quadA = new Rectangle(0, 0, bmpData.Width / 2, bmpData.Height / 2);
var quadB = new Rectangle(bmpData.Width / 2, 0, bmpData.Width / 2, bmpData.Height / 2);
var quadC = new Rectangle(0, bmpData.Height / 2, bmpData.Width / 2, bmpData.Height / 2);
var quadD = new Rectangle(bmpData.Width / 2, bmpData.Height / 2, bmpData.Width / 2, bmpData.Height / 2);
bytes = bmpData.Stride * bmp.Height;
var rgbValues = new byte[bytes];
// Copy the RGB values into the array.
Marshal.Copy(pNative, rgbValues, 0, bytes);
var bpp = Image.GetPixelFormatSize(bmp.PixelFormat) / 8;
var t1 = Task.Run(() =>
{
this.ApplyThreshold(quadA, bmpData, bpp, threshold);
});
var t2 = Task.Run(() =>
{
this.ApplyThreshold(quadB, bmpData, bpp, threshold);
});
var t3 = Task.Run(() =>
{
this.ApplyThreshold(quadC, bmpData, bpp, threshold);
});
var t4 = Task.Run(() =>
{
this.ApplyThreshold(quadD, bmpData, bpp, threshold);
});
Task.WaitAll(t1, t2, t3, t4);
if (context.Raw)
{
store = new byte[bytes];
Marshal.Copy(pNative, store, 0, bytes);
}
bmp.UnlockBits(bmpData);
}
context.Data = store;
}
private void ApplyThreshold(Rectangle quad, BitmapData bmpData, int pixelDepth, int threshold)
{
unsafe
{
// Declare an array to hold the bytes of the bitmap.
var stride = bmpData.Stride;
byte* ptr1 = (byte*)bmpData.Scan0;
for (int column = quad.X; column < quad.X + quad.Width; column++)
{
for (int row = quad.Y; row < quad.Y + quad.Height; row++)
{
var rgb1 = ptr1[(column * pixelDepth) + (row * stride)] +
ptr1[(column * pixelDepth) + (row * stride) + 1] +
ptr1[(column * pixelDepth) + (row * stride) + 2];
if (rgb1 > threshold)
{
ptr1[(column * pixelDepth) + (row * stride)] = 255;
ptr1[(column * pixelDepth) + (row * stride) + 1] = 255;
ptr1[(column * pixelDepth) + (row * stride) + 2] = 255;
}
else
{
ptr1[(column * pixelDepth) + (row * stride)] = 0;
ptr1[(column * pixelDepth) + (row * stride) + 1] = 0;
ptr1[(column * pixelDepth) + (row * stride) + 2] = 0;
}
}
}
}
}
private int CheckDiff(Rectangle quad, BitmapData bmpData, BitmapData bmpData2, int pixelDepth, int threshold)
{
unsafe
{
var stride1 = bmpData.Stride;
var stride2 = bmpData2.Stride;
byte* ptr1 = (byte*)bmpData.Scan0;
byte* ptr2 = (byte*)bmpData2.Scan0;
int diff = 0;
int lowestX = 0, highestX = 0, lowestY = 0, highestY = 0;
for (int column = quad.X; column < quad.X + quad.Width; column++)
{
for (int row = quad.Y; row < quad.Y + quad.Height; row++)
{
var rgb1 = ptr1[(column * pixelDepth) + (row * stride1)] +
ptr1[(column * pixelDepth) + (row * stride1) + 1] +
ptr1[(column * pixelDepth) + (row * stride1) + 2];
var rgb2 = ptr2[(column * pixelDepth) + (row * stride2)] +
ptr2[(column * pixelDepth) + (row * stride2) + 1] +
ptr2[(column * pixelDepth) + (row * stride2) + 2];
if (rgb2 - rgb1 > threshold)
{
diff++;
if (row < lowestY || lowestY == 0)
{
lowestY = row;
}
if (row > highestY)
{
highestY = row;
}
if (column < lowestX || lowestX == 0)
{
lowestX = column;
}
if (column > highestX)
{
highestX = column;
}
}
// If the threshold has been exceeded, we want to exit from this method immediately for performance reasons.
if (diff > threshold)
{
break;
}
}
if (diff > threshold)
{
break;
}
}
return diff;
}
}
}
}
// <copyright file="IMotionCaptureHandler.cs" company="Techyian">
// Copyright (c) Ian Auty. All rights reserved.
// Licensed under the MIT License. Please see LICENSE.txt for License info.
// </copyright>
using MMALSharp.Processors.Motion;
using System;
using System.Threading.Tasks;
namespace MMALSharp.Handlers
{
/// <summary>
/// Represents a capture handler which can detect motion.
/// </summary>
public interface IMotionCaptureHandler
{
/// <summary>
/// The motion type associated with this MotionCaptureHandler.
/// </summary>
MotionType MotionType { get; set; }
/// <summary>
/// Call to configure motion detection.
/// </summary>
/// <param name="config">The motion configuration.</param>
/// <param name="onDetect">A callback for when motion is detected.</param>
void ConfigureMotionDetection(MotionConfig config, Func<Task> onDetect);
/// <summary>
/// Enables motion detection. When configured, this will instruct the capture handler to detect motion.
/// </summary>
void EnableMotionDetection();
/// <summary>
/// Disables motion detection. When configured, this will instruct the capture handler not to detect motion.
/// </summary>
void DisableMotionDetection();
}
}
// <copyright file="MMALCamera.cs" company="Techyian">
// Copyright (c) Ian Auty. All rights reserved.
// Licensed under the MIT License. Please see LICENSE.txt for License info.
// </copyright>
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using MMALSharp.Common;
using MMALSharp.Common.Utility;
using MMALSharp.Components;
using MMALSharp.Config;
using MMALSharp.Handlers;
using MMALSharp.Native;
using MMALSharp.Ports;
using MMALSharp.Ports.Outputs;
using MMALSharp.Processors.Motion;
namespace MMALSharp
{
/// <summary>
/// This class provides an interface to the Raspberry Pi camera module.
/// </summary>
public sealed class MMALCamera
{
/// <summary>
/// Gets the singleton instance of the MMAL Camera. Call to initialise the camera for first use.
/// </summary>
public static MMALCamera Instance => Lazy.Value;
private static readonly Lazy<MMALCamera> Lazy = new Lazy<MMALCamera>(() => new MMALCamera());
/// <summary>
/// Reference to the camera component.
/// </summary>
public MMALCameraComponent Camera { get; }
private MMALCamera()
{
BcmHost.bcm_host_init();
this.Camera = new MMALCameraComponent();
}
/// <summary>
/// Begin capture on one of the camera's output ports.
/// </summary>
/// <param name="port">An output port of the camera component.</param>
public void StartCapture(IOutputPort port)
{
if (port == this.Camera.StillPort || port == this.Camera.VideoPort)
{
port.SetImageCapture(true);
}
}
/// <summary>
/// Stop capture on one of the camera's output ports.
/// </summary>
/// <param name="port">An output port of the camera component.</param>
public void StopCapture(IOutputPort port)
{
if (port == this.Camera.StillPort || port == this.Camera.VideoPort)
{
port.SetImageCapture(false);
}
}
/// <summary>
/// Force capture to stop on a port (Still or Video).
/// </summary>
/// <param name="port">The capture port.</param>
public void ForceStop(IOutputPort port)
{
Task.Run(() =>
{
port.Trigger.SetResult(true);
});
}
/// <summary>
/// Self-contained method for recording raw video frames directly from the camera's video port.
/// Uses the encoding and pixel format as set in <see cref="MMALCameraConfig.VideoEncoding"/> and <see cref="MMALCameraConfig.VideoSubformat"/>.
/// </summary>
/// <param name="handler">The video capture handler to apply to the encoder.</param>
/// <param name="cancellationToken">A cancellationToken to signal when to stop video capture.</param>
/// <returns>The awaitable Task.</returns>
public async Task TakeRawVideo(IVideoCaptureHandler handler, CancellationToken cancellationToken)
{
using (var splitter = new MMALSplitterComponent())
using (var renderer = new MMALVideoRenderer())
{
this.ConfigureCameraSettings();
var splitterOutputConfig = new MMALPortConfig(MMALCameraConfig.Encoding, MMALCameraConfig.EncodingSubFormat);
// Force port type to SplitterVideoPort to prevent resolution from being set against splitter component.
splitter.ConfigureOutputPort<SplitterVideoPort>(0, splitterOutputConfig, handler);
// Create our component pipeline.
this.Camera.VideoPort.ConnectTo(splitter);
this.Camera.PreviewPort.ConnectTo(renderer);
MMALLog.Logger.LogInformation($"Preparing to take raw video. Resolution: {this.Camera.VideoPort.Resolution.Width} x {this.Camera.VideoPort.Resolution.Height}. " +
$"Encoder: {MMALCameraConfig.Encoding.EncodingName}. Pixel Format: {MMALCameraConfig.EncodingSubFormat.EncodingName}.");
// Camera warm up time
await Task.Delay(2000).ConfigureAwait(false);
await this.ProcessAsync(this.Camera.VideoPort, cancellationToken).ConfigureAwait(false);
}
}
/// <summary>
/// Self-contained method for recording H.264 video for a specified amount of time. Records at 30fps, 25Mb/s at the highest quality.
/// </summary>
/// <param name="handler">The video capture handler to apply to the encoder.</param>
/// <param name="cancellationToken">A cancellationToken to signal when to stop video capture.</param>
/// <param name="split">Used for Segmented video mode.</param>
/// <returns>The awaitable Task.</returns>
public async Task TakeVideo(IVideoCaptureHandler handler, CancellationToken cancellationToken, Split split = null)
{
if (split != null && !MMALCameraConfig.InlineHeaders)
{
MMALLog.Logger.LogWarning("Inline headers not enabled. Split mode not supported when this is disabled.");
split = null;
}
using (var vidEncoder = new MMALVideoEncoder())
using (var renderer = new MMALVideoRenderer())
{
this.ConfigureCameraSettings();
var portConfig = new MMALPortConfig(MMALEncoding.H264, MMALEncoding.I420, 10, MMALVideoEncoder.MaxBitrateLevel4, split: split);
vidEncoder.ConfigureOutputPort(portConfig, handler);
// Create our component pipeline.
this.Camera.VideoPort.ConnectTo(vidEncoder);
this.Camera.PreviewPort.ConnectTo(renderer);
MMALLog.Logger.LogInformation($"Preparing to take video. Resolution: {this.Camera.VideoPort.Resolution.Width} x {this.Camera.VideoPort.Resolution.Height}. " +
$"Encoder: {vidEncoder.Outputs[0].EncodingType.EncodingName}. Pixel Format: {vidEncoder.Outputs[0].PixelFormat.EncodingName}.");
// Camera warm up time
await Task.Delay(2000).ConfigureAwait(false);
await this.ProcessAsync(this.Camera.VideoPort, cancellationToken).ConfigureAwait(false);
}
}
/// <summary>
/// Self-contained method to capture raw image data directly from the Camera component - this method does not use an Image encoder.
/// Note: We cannot use the OPAQUE encoding format with this helper method, the capture will not fail, but will not produce valid data. For reference, RaspiStillYUV uses YUV420.
/// </summary>
/// <param name="handler">The image capture handler to use to save image.</param>
/// <returns>The awaitable Task.</returns>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="PiCameraError"/>
public async Task TakeRawPicture(IOutputCaptureHandler handler)
{
if (this.Camera.StillPort.ConnectedReference != null)
{
throw new PiCameraError("A connection was found to the Camera still port. No encoder should be connected to the Camera's still port for raw capture.");
}
if (handler == null)
{
throw new ArgumentNullException(nameof(handler));
}
using (var renderer = new MMALNullSinkComponent())
{
this.ConfigureCameraSettings(handler);
this.Camera.PreviewPort.ConnectTo(renderer);
MMALLog.Logger.LogInformation($"Preparing to take raw picture - Resolution: {this.Camera.StillPort.Resolution.Width} x {this.Camera.StillPort.Resolution.Height}. " +
$"Encoder: {MMALCameraConfig.Encoding.EncodingName}. Pixel Format: {MMALCameraConfig.EncodingSubFormat.EncodingName}.");
// Camera warm up time
await Task.Delay(2000).ConfigureAwait(false);
await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false);
}
}
/// <summary>
/// Self-contained method for capturing a single image from the camera still port.
/// An MMALImageEncoder component will be created and attached to the still port.
/// </summary>
/// <param name="handler">The image capture handler to apply to the encoder component.</param>
/// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param>
/// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param>
/// <returns>The awaitable Task.</returns>
public async Task TakePicture(IOutputCaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat)
{
using (var imgEncoder = new MMALImageEncoder())
using (var renderer = new MMALNullSinkComponent())
{
this.ConfigureCameraSettings();
var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90);
imgEncoder.ConfigureOutputPort(portConfig, handler);
// Create our component pipeline.
this.Camera.StillPort.ConnectTo(imgEncoder);
this.Camera.PreviewPort.ConnectTo(renderer);
MMALLog.Logger.LogInformation($"Preparing to take picture. Resolution: {this.Camera.StillPort.Resolution.Width} x {this.Camera.StillPort.Resolution.Height}. " +
$"Encoder: {encodingType.EncodingName}. Pixel Format: {pixelFormat.EncodingName}.");
// Camera warm up time
await Task.Delay(2000).ConfigureAwait(false);
await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false);
}
}
/// <summary>
/// Self-contained method for capturing a continual images from the camera still port for a specified period of time.
/// An MMALImageEncoder component will be created and attached to the still port.
/// </summary>
/// <param name="handler">The image capture handler to apply to the encoder component.</param>
/// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param>
/// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param>
/// <param name="cancellationToken">A cancellationToken to trigger stop capturing.</param>
/// <param name="burstMode">When enabled, burst mode will increase the rate at which images are taken, at the expense of quality.</param>
/// <returns>The awaitable Task.</returns>
public async Task TakePictureTimeout(IFileStreamCaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat, CancellationToken cancellationToken, bool burstMode = false)
{
if (burstMode)
{
MMALCameraConfig.StillBurstMode = true;
}
using (var imgEncoder = new MMALImageEncoder())
using (var renderer = new MMALNullSinkComponent())
{
this.ConfigureCameraSettings();
var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90);
imgEncoder.ConfigureOutputPort(portConfig, handler);
// Create our component pipeline.
this.Camera.StillPort.ConnectTo(imgEncoder);
this.Camera.PreviewPort.ConnectTo(renderer);
// Camera warm up time
await Task.Delay(2000).ConfigureAwait(false);
while (!cancellationToken.IsCancellationRequested)
{
await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false);
if (!cancellationToken.IsCancellationRequested)
{
handler.NewFile();
}
}
}
}
/// <summary>
/// Self-contained method for capturing timelapse images.
/// An MMALImageEncoder component will be created and attached to the still port.
/// </summary>
/// <param name="handler">The image capture handler to apply to the encoder component.</param>
/// <param name="encodingType">The image encoding type e.g. JPEG, BMP.</param>
/// <param name="pixelFormat">The pixel format to use with the encoder e.g. I420 (YUV420).</param>
/// <param name="timelapse">A Timelapse object which specifies the timeout and rate at which images should be taken.</param>
/// <returns>The awaitable Task.</returns>
/// <exception cref="ArgumentNullException"/>
public async Task TakePictureTimelapse(IFileStreamCaptureHandler handler, MMALEncoding encodingType, MMALEncoding pixelFormat, Timelapse timelapse)
{
int interval = 0;
if (timelapse == null)
{
throw new ArgumentNullException(nameof(timelapse), "Timelapse object null. This must be initialized for Timelapse mode");
}
using (var imgEncoder = new MMALImageEncoder())
using (var renderer = new MMALNullSinkComponent())
{
this.ConfigureCameraSettings();
var portConfig = new MMALPortConfig(encodingType, pixelFormat, 90);
imgEncoder.ConfigureOutputPort(portConfig, handler);
// Create our component pipeline.
this.Camera.StillPort.ConnectTo(imgEncoder);
this.Camera.PreviewPort.ConnectTo(renderer);
// Camera warm up time
await Task.Delay(2000).ConfigureAwait(false);
while (!timelapse.CancellationToken.IsCancellationRequested)
{
switch (timelapse.Mode)
{
case TimelapseMode.Millisecond:
interval = timelapse.Value;
break;
case TimelapseMode.Second:
interval = timelapse.Value * 1000;
break;
case TimelapseMode.Minute:
interval = (timelapse.Value * 60) * 1000;
break;
}
await Task.Delay(interval).ConfigureAwait(false);
MMALLog.Logger.LogInformation($"Preparing to take picture. Resolution: {MMALCameraConfig.Resolution.Width} x {MMALCameraConfig.Resolution.Height}. " +
$"Encoder: {encodingType.EncodingName}. Pixel Format: {pixelFormat.EncodingName}.");
await this.ProcessAsync(this.Camera.StillPort).ConfigureAwait(false);
if (!timelapse.CancellationToken.IsCancellationRequested)
{
handler.NewFile();
}
}
}
}
/// <summary>
/// Helper method to begin processing image data. Starts the Camera port and awaits until processing is complete.
/// Cleans up resources upon finish.
/// </summary>
/// <param name="cameraPort">The camera port which image data is coming from.</param>
/// <param name="cancellationToken">A CancellationToken to observe while waiting for a task to complete.</param>
/// <returns>The awaitable Task.</returns>
public async Task ProcessAsync(IOutputPort cameraPort, CancellationToken cancellationToken = default(CancellationToken))
{
var handlerComponents = this.PopulateProcessingList();
if (handlerComponents.Count == 0)
{
await this.ProcessRawAsync(cameraPort, cancellationToken);
return;
}
var tasks = new List<Task>();
// Enable all connections associated with these components
foreach (var component in handlerComponents)
{
component.ForceStopProcessing = false;
foreach (var port in component.ProcessingPorts.Values)
{
if (port.ConnectedReference == null)
{
port.Start();
tasks.Add(port.Trigger.Task);
}
}
component.EnableConnections();
}
this.Camera.SetShutterSpeed(MMALCameraConfig.ShutterSpeed);
// We now begin capturing on the camera, processing will commence based on the pipeline configured.
this.StartCapture(cameraPort);
if (cancellationToken == CancellationToken.None)
{
await Task.WhenAll(tasks).ConfigureAwait(false);
}
else
{
await Task.WhenAny(Task.WhenAll(tasks), cancellationToken.AsTask()).ConfigureAwait(false);
foreach (var component in handlerComponents)
{
component.ForceStopProcessing = true;
}
await Task.WhenAll(tasks).ConfigureAwait(false);
}
this.StopCapture(cameraPort);
// Cleanup each connected downstream component.
foreach (var component in handlerComponents)
{
foreach (var port in component.ProcessingPorts.Values)
{
if (port.ConnectedReference == null)
{
port.DisablePort();
}
}
component.CleanPortPools();
component.DisableConnections();
}
}
/// <summary>
/// Prints the currently configured component pipeline to the console window.
/// </summary>
public void PrintPipeline()
{
MMALLog.Logger.LogInformation("Current pipeline:");
MMALLog.Logger.LogInformation(string.Empty);
this.Camera.PrintComponent();
foreach (var component in MMALBootstrapper.DownstreamComponents)
{
component.PrintComponent();
}
}
/// <summary>
/// Disables processing on the camera component.
/// </summary>
public void DisableCamera()
{
this.Camera.DisableComponent();
}
/// <summary>
/// Enables processing on the camera component
/// </summary>
public void EnableCamera()
{
this.Camera.EnableComponent();
}
/// <summary>
/// Initialises the camera component ready for operation. This method can also be called if you want to change
/// configuration settings in <see cref="MMALCameraConfig"/>.
/// </summary>
/// <param name="stillCaptureHandler">Optional output capture handler for use with raw image capture.</param>
/// <param name="videoCaptureHandler">Optional output capture handler for use with raw video capture.</param>
/// <returns>The camera instance.</returns>
public MMALCamera ConfigureCameraSettings(IOutputCaptureHandler stillCaptureHandler = null, IOutputCaptureHandler videoCaptureHandler = null)
{
this.Camera.Initialise(stillCaptureHandler, videoCaptureHandler);
return this;
}
/// <summary>
/// Enables the annotation feature which will produce a textual overlay on produced frames.
/// </summary>
/// <returns>The camera instance.</returns>
public MMALCamera EnableAnnotation()
{
this.Camera.SetAnnotateSettings();
return this;
}
/// <summary>
/// Disables the annotation feature.
/// </summary>
/// <returns>The camera instance.</returns>
public MMALCamera DisableAnnotation()
{
this.Camera.DisableAnnotate();
return this;
}
/// <summary>
/// Creates an overlay renderer that is able to render an overlay from a static image source.
/// </summary>
/// <param name="parent">The parent renderer which is being used to overlay onto the display.</param>
/// <param name="config">The configuration for rendering a static preview overlay.</param>
/// <param name="source">A reference to the current stream being used in the overlay.</param>
/// <returns>The created <see cref="MMALOverlayRenderer"/> object.</returns>
public MMALOverlayRenderer AddOverlay(MMALVideoRenderer parent, PreviewOverlayConfiguration config, byte[] source)
=> new MMALOverlayRenderer(parent, config, source);
/// <summary>
/// Call to enable motion detection.
/// </summary>
/// <param name="handler">The motion capture handler.</param>
/// <param name="config">The motion configuration object.</param>
/// <param name="onDetect">The callback when motion is detected.</param>
/// <returns>The camera instance.</returns>
public MMALCamera WithMotionDetection(IMotionCaptureHandler handler, MotionConfig config, Func<Task> onDetect)
{
MMALCameraConfig.InlineMotionVectors = true;
handler.ConfigureMotionDetection(config, onDetect);
return this;
}
/// <summary>
/// Cleans up any unmanaged resources. It is intended for this method to be run when no more activity is to be done on the camera.
/// </summary>
public void Cleanup()
{
MMALLog.Logger.LogDebug("Destroying final components");
var tempList = new List<MMALDownstreamComponent>(MMALBootstrapper.DownstreamComponents);
tempList.ForEach(c => c.Dispose());
this.Camera.Dispose();
BcmHost.bcm_host_deinit();
}
/// <summary>
/// Acts as an isolated processor specifically used when capturing raw frames from the camera component.
/// </summary>
/// <param name="cameraPort">The camera component port (still or video).</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The awaitable task.</returns>
private async Task ProcessRawAsync(IOutputPort cameraPort,
CancellationToken cancellationToken = default(CancellationToken))
{
using (cancellationToken.Register(() =>
{
// this callback will be executed when token is cancelled
cameraPort.Trigger.SetResult(true);
}))
{
cameraPort.DisablePort();
cameraPort.Start();
this.StartCapture(cameraPort);
await cameraPort.Trigger.Task.ConfigureAwait(false);
this.StopCapture(cameraPort);
this.Camera.CleanPortPools();
}
}
/// <summary>
/// Helper method to check the Renderer component status. If a Renderer has not been initialized, a warning will
/// be shown to the user.
/// </summary>
private void CheckPreviewComponentStatus()
{
// Create connections
if (this.Camera.PreviewPort.ConnectedReference == null)
{
MMALLog.Logger.LogWarning("Preview port does not have a Render component configured. Resulting image will be affected.");
}
}
private List<IDownstreamComponent> PopulateProcessingList()
{
var list = new List<IDownstreamComponent>();
var initialStillDownstream = this.Camera.StillPort.ConnectedReference?.DownstreamComponent;
var initialVideoDownstream = this.Camera.VideoPort.ConnectedReference?.DownstreamComponent;
var initialPreviewDownstream = this.Camera.PreviewPort.ConnectedReference?.DownstreamComponent;
if (initialStillDownstream != null)
{
this.FindComponents(initialStillDownstream, list);
}
if (initialVideoDownstream != null)
{
this.FindComponents(initialVideoDownstream, list);
}
if (initialPreviewDownstream != null)
{
this.FindComponents(initialPreviewDownstream, list);
}
return list;
}
private void FindComponents(IDownstreamComponent downstream, List<IDownstreamComponent> list)
{
if (downstream.Outputs.Count == 0)
{
return;
}
if (downstream.Outputs.Count == 1 && downstream.Outputs[0].ConnectedReference == null)
{
list.Add(downstream);
return;
}
if (downstream.GetType().BaseType == typeof(MMALDownstreamHandlerComponent))
{
list.Add((MMALDownstreamHandlerComponent)downstream);
}
foreach (var output in downstream.Outputs)
{
if (output.ConnectedReference != null)
{
this.FindComponents(output.ConnectedReference.DownstreamComponent, list);
}
}
}
}
}
// <copyright file="MotionConfig.cs" company="Techyian">
// Copyright (c) Ian Auty. All rights reserved.
// Licensed under the MIT License. Please see LICENSE.txt for License info.
// </copyright>
using System;
namespace MMALSharp.Processors.Motion
{
/// <summary>
/// This class is used to store user preferences when detecting motion between two image frames.
/// </summary>
public class MotionConfig
{
/// <summary>
/// The amount of change which will trigger a motion event.
/// </summary>
public int Threshold { get; set; }
/// <summary>
/// The frequency at which the test frame is updated. The test frame is the baseline against
/// which the current frame is compared to detect motion.
/// </summary>
public TimeSpan TestFrameInterval { get; set; }
/// <summary>
/// Creates a new instance of <see cref="MotionConfig"/>.
/// </summary>
/// <param name="threshold">Motion sensitivity threshold. The default is 130 (suitable for many indoor scenes).</param>
/// <param name="testFrameInterval">Frequency at which the test frame is updated. The default is 10 seconds.</param>
public MotionConfig(int threshold = 130, TimeSpan testFrameInterval = default)
{
this.Threshold = threshold;
this.TestFrameInterval = testFrameInterval.Equals(TimeSpan.Zero) ? TimeSpan.FromSeconds(10) : testFrameInterval;
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment