Skip to content

Instantly share code, notes, and snippets.

@mekya
Created February 16, 2023 14:37
Show Gist options
  • Save mekya/3483d55dfeca4a34124fa11bff2eebdd to your computer and use it in GitHub Desktop.
Save mekya/3483d55dfeca4a34124fa11bff2eebdd to your computer and use it in GitHub Desktop.
Open Camera and use External Video Frame for iOS
//
// VideoViewController.swift
// AntMediaReferenceApplication
//
//
import UIKit
import WebRTC
import AVFoundation
import WebRTCiOSSDK
class VideoViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
@IBOutlet weak var pipVideoView: UIView!
@IBOutlet weak var fullVideoView: UIView!
@IBOutlet weak var containerView: UIView!
@IBOutlet weak var modeLabel: UILabel!
// Auto Layout Constraints used for animations
@IBOutlet weak var containerLeftConstraint: NSLayoutConstraint?
let client: AntMediaClient = AntMediaClient.init()
var clientUrl: String!
var clientStreamId: String!
var clientToken: String!
var clientMode: AntMediaClientMode!
var tapGesture: UITapGestureRecognizer!
override func viewDidLoad() {
super.viewDidLoad()
self.setGesture()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
_setupCaptureSession();
self.client.delegate = self;
self.client.setDebug(true)
self.client.setOptions(url: self.clientUrl, streamId: self.clientStreamId, token: self.clientToken, mode: self.clientMode, enableDataChannel: true, useExternalCameraSource:true)
self.client.setExternalVideoCapture(externalVideoCapture: true);
self.client.setTargetResolution(width: 1280, height: 720);
self.client.setTargetFps(fps: 30);
self.client.setExternalAudio(externalAudioEnabled: false)
self.client.initPeerConnection();
self.client.start();
}
private class _PreviewView: UIView {
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
return layer as! AVCaptureVideoPreviewLayer
}
}
private func _setupCaptureSession() {
let session = AVCaptureSession()
session.sessionPreset = .hd1280x720
guard
let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .unspecified),
let input = try? AVCaptureDeviceInput(device: device),
session.canAddInput(input) else { return }
session.beginConfiguration()
session.addInput(input)
session.commitConfiguration()
let output = AVCaptureVideoDataOutput()
guard session.canAddOutput(output) else { return }
output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "io.antmedia.external.video.dispatch"))
session.beginConfiguration()
session.addOutput(output)
session.commitConfiguration()
DispatchQueue.main.async {
let previewView = _PreviewView()
previewView.videoPreviewLayer.session = session
previewView.frame = self.fullVideoView.bounds
previewView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
self.fullVideoView.addSubview(previewView)
}
session.startRunning()
}
/*
* Mirror the view. fullVideoView or pipViewVideo can provided as parameter
*/
private func mirrorView(view:UIView) {
view.transform = CGAffineTransform(scaleX: -1.0, y: 1.0)
}
/*
* WebRTC Framework ask for mic permission by default even it's only playing
* stream. If you run this method before starting the webrtc client in play mode,
* It will not ask for mic permission
*
* ATTENTION: Calling this method in sending stream cause not sending the audio. So if you publish
* and play stream at the same time, don't use this method
*/
private func dontAskMicPermissionForPlaying() {
let webRTCConfiguration = RTCAudioSessionConfiguration.init()
webRTCConfiguration.mode = AVAudioSession.Mode.moviePlayback.rawValue
webRTCConfiguration.category = AVAudioSession.Category.playback.rawValue
webRTCConfiguration.categoryOptions = AVAudioSession.CategoryOptions.duckOthers
RTCAudioSessionConfiguration.setWebRTC(webRTCConfiguration)
}
private func resetDefaultWebRTCAudioConfiguation() {
RTCAudioSessionConfiguration.setWebRTC(RTCAudioSessionConfiguration.init())
}
@IBAction func audioTapped(_ sender: UIButton!) {
sender.isSelected = !sender.isSelected
self.client.setMicMute(mute: sender.isSelected, completionHandler: { (mute, error) in
if (error == nil) {
AntMediaClient.printf("Microphone is set to " + (mute ? "muted" : "unmuted"))
}
else {
AntMediaClient.printf("Failed to set microphone status to " + (mute ? "muted" : "unmuted"))
}
});
}
@IBAction func videoTapped(_ video: UIButton!) {
video.isSelected = !video.isSelected
//self.client.toggleVideo()
self.client.switchCamera()
}
@IBAction func closeTapped(_ sender: UIButton!) {
self.client.stop()
self.dismiss(animated: true, completion: nil)
}
@IBAction func messageButtonTapped(_ sender: Any) {
//show alert window if data channel is enabled
if self.client.isDataChannelActive()
{
let alert = UIAlertController(title: "Send Message", message: "Send message with WebRTC Datachannel", preferredStyle: .alert)
alert.addTextField { (textField) in
textField.text = ""
}
alert.addAction(UIAlertAction(title: "Send", style: .default, handler: { [weak alert] (_) in
guard let textValue = alert?.textFields?.first?.text else {
return
}
if let data = textValue.data(using: .utf8) {
/*
Send data through data channel
*/
self.client.sendData(data: data, binary: false)
/*
You can either use some simple JSON formatting in order to have better
let candidateJson = ["command": "message",
"content" : textValue,
] as [String : Any]
self.client.sendData(data: candidateJson.json.data(using: .utf8) ?? Data.init(capacity: 1), binary: false)
*/
}
}))
alert.addAction(UIAlertAction(title: "Cancel", style: .default, handler: { [weak alert] (_) in
alert?.dismiss(animated: false, completion: nil)
}))
self.present(alert, animated: true, completion: nil)
}
else {
AlertHelper.getInstance().show("Warning", message: "Data channel is not active. Please make sure data channel is enabled in both server side and mobile sdk ")
}
}
private func setGesture() {
self.tapGesture = UITapGestureRecognizer(target: self, action: #selector(VideoViewController.toggleContainer))
self.tapGesture.numberOfTapsRequired = 1
self.fullVideoView.addGestureRecognizer(tapGesture)
self.view.addGestureRecognizer(tapGesture)
}
@objc private func toggleContainer() {
UIView.animate(withDuration: 0.3, animations: { () -> Void in
if (self.containerLeftConstraint!.constant <= -45.0) {
self.containerLeftConstraint!.constant = 15.0
self.containerView.alpha = 1.0
} else {
self.containerLeftConstraint!.constant = -45.0
self.containerView.alpha = 0.0
}
self.view.layoutIfNeeded()
})
}
override func viewWillDisappear(_ animated: Bool) {
self.client.stop()
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let orientation = UIDevice.current.orientation;
var rotation = 0;
if (orientation == .portrait) {
rotation = 90;
}
else if (orientation == .portraitUpsideDown) {
rotation = 270;
}
else if (orientation == .landscapeLeft) {
rotation = 0;
}
else if (orientation == .landscapeRight) {
rotation = 180
}
AntMediaClient.printf("Receiving capture output. Orietnation: \(orientation) rotation:\(rotation)");
self.client.deliverExternalVideo(sampleBuffer: sampleBuffer, rotation: rotation);
}
}
extension VideoViewController: AntMediaClientDelegate {
func clientDidConnect(_ client: AntMediaClient) {
print("VideoViewController: Connected")
}
func clientDidDisconnect(_ message: String) {
print("VideoViewController: Disconnected: \(message)")
}
func clientHasError(_ message: String) {
AlertHelper.getInstance().show("Error!", message: message, cancelButtonText: "OK", cancelAction: {
self.dismiss(animated: true, completion: nil)
})
}
func disconnected(streamId: String) {
print("Disconnected -> \(streamId)")
}
func remoteStreamStarted(streamId: String) {
print("Remote stream started -> \(streamId)")
}
func remoteStreamRemoved(streamId: String) {
print("Remote stream removed -> \(streamId)")
if (self.client.getCurrentMode() == .join) {
Run.afterDelay(1, block: {
self.fullVideoView.isHidden = true
})
} else {
AlertHelper.getInstance().show("Caution!", message: "Remote stream is no longer available", cancelButtonText: "OK", cancelAction: {
//self.dismiss(animated: true, completion: nil)
//self.fullVideoView.
})
}
}
func localStreamStarted(streamId: String) {
print("Local stream added")
self.fullVideoView.isHidden = false
}
func playStarted(streamId: String)
{
print("play started");
}
func playFinished(streamId: String) {
print("play finished")
AlertHelper.getInstance().show("Caution!", message: "Remote stream is no longer available", cancelButtonText: "OK", cancelAction: {
self.dismiss(animated: true, completion: nil)
})
}
func publishStarted(streamId: String)
{
Run.onMainThread
{
Run.afterDelay(3, block: {
Run.onMainThread {
self.pipVideoView.bringSubviewToFront(self.fullVideoView)
}
})
}
}
func publishFinished(streamId: String) {
}
func audioSessionDidStartPlayOrRecord(streamId: String) {
AntMediaClient.speakerOn()
}
func dataReceivedFromDataChannel(streamId: String, data: Data, binary: Bool) {
AntMediaClient.printf("Data is received from stream: \(streamId) is binary:\(binary) content: " + String(decoding: data, as: UTF8.self))
Run.onMainThread {
self.showToast(controller: self, message: String(decoding: data, as: UTF8.self), seconds: 1.0)
}
}
func streamInformation(streamInfo: [StreamInformation]) {
AntMediaClient.printf("Incoming stream infos")
for result in streamInfo {
AntMediaClient.printf("resolution width:\(result.streamWidth) heigh:\(result.streamHeight) video " + "bitrate:\(result.videoBitrate) audio bitrate:\(result.audioBitrate) codec:\(result.videoCodec)");
}
}
func showToast(controller: UIViewController, message : String, seconds: Double)
{
let alert = UIAlertController(title: "Received Message", message: message, preferredStyle: .alert)
alert.view.backgroundColor = UIColor.black
alert.view.alpha = 0.6
alert.view.layer.cornerRadius = 15
controller.present(alert, animated: true)
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + seconds) {
alert.dismiss(animated: true)
}
}
func eventHappened(streamId: String, eventType: String) {
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment