Skip to content

Instantly share code, notes, and snippets.

@thatmarcel
Created May 25, 2019 19:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save thatmarcel/d7facec31a59ac62fbe85cdaaa4e0e6e to your computer and use it in GitHub Desktop.
Save thatmarcel/d7facec31a59ac62fbe85cdaaa4e0e6e to your computer and use it in GitHub Desktop.
import ReplayKit
import Speech
import UserNotifications
class SampleHandler: RPBroadcastSampleHandler {
var running = true
override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
do {
videoWriter = try AVAssetWriter(outputURL: FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "group.com.thatmarcel.captions.shared")!.appendingPathComponent("movie.mov"), fileType: .mov)
} catch {
print(error)
}
assert(videoWriter != nil, "Invalid parameter not satisfying: videoWriter != nil")
let videoSettings = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : UIScreen.main.bounds.size.width,
AVVideoHeightKey : UIScreen.main.bounds.size.height
] as [String : Any]
writerInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
writerInput.expectsMediaDataInRealTime = true
assert(videoWriter?.canAdd(writerInput) ?? false, "Invalid parameter not satisfying: videoWriter?.canAdd(writerInput) ?? false")
videoWriter?.add(writerInput)
DispatchQueue.main.asyncAfter(deadline: .now() + 10) {
self.save()
}
// Initialize speech regonition
}
func save() {
running = false
writerInput.markAsFinished()
videoWriter?.finishWriting {
}
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
switch sampleBufferType {
case RPSampleBufferType.video:
if videoWriter?.status == .unknown {
videoWriter?.startWriting()
videoWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
return
}
if running, writerInput.isReadyForMoreMediaData {
writerInput.append(sampleBuffer)
}
break
case RPSampleBufferType.audioApp:
// Handle audio sample buffer for app audio
if let _ = self.speechRecognizer, let request = self.request, let _ = self.task {
request.appendAudioSampleBuffer(sampleBuffer) // request is the SFSpeechAudioBufferRecognitionRequest
}
break
case RPSampleBufferType.audioMic:
// Handle audio sample buffer for mic audio
break
@unknown default:
print("unknown")
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment