Skip to content

Instantly share code, notes, and snippets.

class Tracker {
var trackers: [TrackerProtocol] = []
func track(info: TrackingInfo?) {
guard let info = info else {
assert(false, "TrackingInfo is nil")
}
trackers.forEach { (tracker) in
tracker.track(info: info)
import UIKit
import AVFoundation
let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
let mixer1 = AVAudioMixerNode()
let mixer2 = AVAudioMixerNode()
engine.attach(player)
engine.attach(mixer1)
engine.attach(mixer2)
import UIKit
import AVFoundation
public func createSampleBufferBy<T>(pcm: [T], timeStamp: AudioTimeStamp, bd: AudioStreamBasicDescription) -> CMSampleBuffer? {
var basicDescription = bd
var formatDescription: CMAudioFormatDescription?
_ = CMAudioFormatDescriptionCreate(allocator: kCFAllocatorDefault,
asbd: &basicDescription,
layoutSize: 0,
layout: nil,
import UIKit
import AVFoundation
let player = AVAudioPlayerNode()
let engine = AVAudioEngine()
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: player.outputFormat(forBus: 0))
try! engine.start()
player.play()
@tion-low
tion-low / gist:2bf2aded8fccd50b7200f9de995318d7
Created July 13, 2019 13:37
CGAffineTransform on Video
Portrait up
▿ CGAffineTransform
- a : 0.0
- b : 1.0
- c : -1.0
- d : 0.0
- tx : 1080.0
- ty : 0.0
Landscape left
@tion-low
tion-low / unity.modulemap
Created July 11, 2019 06:25
Unity.modulemap
framework module CustomFramework {
umbrella header "CustomFramework.h"
link framework "GameKit"
link framework "iAd"
link framework "CoreMotion"
link framework "CoreGraphics"
link framework "AVFoundation"
link framework "MediaToolbox"
GCC_PREFIX_HEADER = Classes/Prefix.pch;
HEADER_SEARCH_PATHS = "$(SRCROOT)/Classes" "$(SRCROOT)/Classes/Native" "$(SRCROOT)/Libraries/libil2cpp/include";
LIBRARY_SEARCH_PATHS = "$(SRCROOT)/Libraries" "$(SRCROOT)/Libraries/libil2cpp/include" "$(SRCROOT)";
OTHER_LDFLAGS = -weak-lSystem -weak_framework CoreMotion -weak_framework GameKit -weak_framework iAd -framework CoreGraphics -framework AVFoundation -framework MediaToolbox -framework CoreVideo -framework CoreMedia -framework SystemConfiguration -framework CoreLocation -framework MediaPlayer -framework CFNetwork -framework AudioToolbox -framework OpenAL -framework OpenGLES -framework UIKit -framework Foundation -liconv.2 -liPhone-lib -lil2cpp;
ENABLE_BITCODE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MACH_O_TYPE = staticlib;
import UIKit
import AVFoundation
var tb = mach_timebase_info() // イニシャライザ
mach_timebase_info(&tb) // こっちは関数呼び出し
let tsc = mach_absolute_time()
let t = Double(tsc) * Double(tb.numer) / Double(tb.denom) / 1000000000.0
let seconds = AVAudioTime.seconds(forHostTime: tsc)
let cmtime = CMTime(seconds: seconds, preferredTimescale: 1000000000)
func convertCMSampleBuffer(from pcmBuffer: AVAudioPCMBuffer) -> CMSampleBuffer? {
let bufferList = pcmBuffer.audioBufferList
let asbd = pcmBuffer.format.streamDescription
var sampleBuffer: CMSampleBuffer!
var formatDescription: CMFormatDescription!
var status = CMAudioFormatDescriptionCreate(allocator: kCFAllocatorDefault,
asbd: asbd,
layoutSize: 0,