Skip to content

Instantly share code, notes, and snippets.

View standinga's full-sized avatar

standinga

View GitHub Profile
@standinga
standinga / sh
Created July 20, 2023 21:46
Stream mac camera using ffmpeg
# stream facetime camera (0)
ffmpeg -f avfoundation -framerate 30 -i "0" -video_size 1024x720 -vcodec libx264 -preset ultrafast -f flv "rtmp://127.0.0.1/live"
# framerate needs to be set before the "-i" (input)
# list available devices:
ffmpeg -f avfoundation -list_devices true -i ""
@standinga
standinga / UserDefaults+Swizzle.swift
Created August 18, 2022 15:25
Swizzle UserDefaults set(object: forKey:) in objc and swift
/// ObjC
@implementation NSUserDefaults(Tracking)
+ (void)load {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
Class class = [self class];
SEL originalSelector = @selector(setObject:forKey:);
@standinga
standinga / AudioReverse.swift
Created July 28, 2021 01:19
Reverse audio file in Swift using Accelerate framework (this is fast)
import Accelerate
import AVFoundation
func reverse(fromUrl: URL) -> URL? {
do {
let input = try AVAudioFile(forReading: fromUrl)
let format = input.processingFormat
let frameCount = AVAudioFrameCount(input.length)
let outSettings = [AVNumberOfChannelsKey: format.channelCount,
@standinga
standinga / H264Coder.swift
Created December 15, 2020 11:15
updated H264Coder
import AVFoundation
import VideoToolbox
class H264Coder {
var session: VTCompressionSession?
var onFrame: ((CMSampleBuffer) -> Void)?
let outputCallback: VTCompressionOutputCallback = { refcon, sourceFrameRefCon, status, infoFlags, sampleBuffer in
guard let refcon = refcon,
@standinga
standinga / ViewController.swift
Created December 15, 2020 11:14
ViewController.swift updated
import AVFoundation
import UIKit
class AVSampleBufferView: UIView {
var bufferLayer: AVSampleBufferDisplayLayer {
return layer as! AVSampleBufferDisplayLayer
}
override static var layerClass: AnyClass {
@standinga
standinga / H264Coder.swift
Last active December 15, 2020 00:07
initializer
init(width: Int32, height: Int32) {
let status = VTCompressionSessionCreate(allocator: kCFAllocatorDefault,
width: width,
height: height,
codecType: kCMVideoCodecType_H264,
encoderSpecification: nil,
imageBufferAttributes: nil,
compressedDataAllocator: nil,
outputCallback: outputCallback,
refcon: Unmanaged.passUnretained(self).toOpaque(),
@standinga
standinga / H264Coder.swift
Created December 15, 2020 00:00
H264Coder.swift initial
import AVFoundation
import VideoToolbox
class H264Coder {
var session: VTCompressionSession?
let outputCallback: VTCompressionOutputCallback = { refcon, sourceFrameRefCon, status, infoFlags, sampleBuffer in
guard let refcon = refcon,
status == noErr,
@standinga
standinga / ViewController.swift
Created December 14, 2020 23:41
Updated ViewController
import AVFoundation
import UIKit
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
private let avManager = AVManager()
private let cameraView = AVSampleBufferView()
private let encodedView = AVSampleBufferView()
@standinga
standinga / ViewController.swift
Created December 14, 2020 23:36
ViewController with 3 AVSampleBufferView
import AVFoundation
import UIKit
class ViewController: UIViewController {
private let cameraView = AVSampleBufferView()
private let encodedView = AVSampleBufferView()
private let decodedView = AVSampleBufferView()
@standinga
standinga / AVSampleBufferView.swift
Created December 14, 2020 23:32
view displaying CMSampleBuffer frames
import AVFoundation
import UIKit
class AVSampleBufferView: UIView {
var bufferLayer: AVSampleBufferDisplayLayer {
return layer as! AVSampleBufferDisplayLayer
}
override static var layerClass: AnyClass {