Skip to content

Instantly share code, notes, and snippets.

@Martini024
Last active March 21, 2024 20:47
Show Gist options
  • Star 11 You must be signed in to star a gist
  • Fork 3 You must be signed in to fork a gist
  • Save Martini024/9d84c9cd230ab171b9fd054035f5c260 to your computer and use it in GitHub Desktop.
Save Martini024/9d84c9cd230ab171b9fd054035f5c260 to your computer and use it in GitHub Desktop.
SwiftUI: Rewrite iOS Photos Video Scrubber
import Foundation
import AVKit
class VideoHelper {
static func getThumbnail(from player: AVPlayer, at time: CMTime) -> CGImage? {
do {
guard let currentItem = player.currentItem else { return nil }
let asset = currentItem.asset
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: time, actualTime: nil)
return cgImage
} catch _ {
return nil
}
}
static func getThumbnail(from asset: AVAsset?, at time: CMTime) -> CGImage? {
do {
guard let asset = asset else { return nil }
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: time, actualTime: nil)
return cgImage
} catch _ {
return nil
}
}
static func generateThumbnailImages(_ player: AVPlayer, _ containerSize: CGSize) -> [UIImage] {
var images: [UIImage] = []
guard let currentItem = player.currentItem else { return images }
guard let track = player.currentItem?.asset.tracks(withMediaType: AVMediaType.video).first else { return images }
let assetSize = track.naturalSize.applying(track.preferredTransform)
let height = containerSize.height
let ratio = assetSize.width / assetSize.height
let width = height * ratio
let thumbnailCount = Int(ceil(containerSize.width / abs(width)))
let interval = currentItem.asset.duration.seconds / Double(thumbnailCount)
for i in 0..<thumbnailCount {
guard let thumbnail = VideoHelper.getThumbnail(from: currentItem.asset, at: CMTime(seconds: Double(i) * interval, preferredTimescale: 1000)) else { return images }
images.append(UIImage(cgImage: thumbnail))
}
return images
}
static func getVideoAspectRatio(_ player: AVPlayer) -> CGFloat? {
guard let track = player.currentItem?.asset.tracks(withMediaType: AVMediaType.video).first else { return nil}
let assetSize = track.naturalSize.applying(track.preferredTransform)
return assetSize.width / assetSize.height
}
static func getCurrentTime(_ player: AVPlayer) -> CMTime? {
guard let currentItem = player.currentItem else { return nil }
return currentItem.currentTime()
}
static func getDuration(_ player: AVPlayer) -> CMTime? {
guard let currentItem = player.currentItem else { return nil }
return currentItem.asset.duration
}
}
import SwiftUI
import AVKit
struct VideoPlayerControls: View {
let player: AVPlayer
@Binding var currentTime: CGFloat
var height: CGFloat = 50
var actionImage: String = "plus"
@State private var isPlaying: Bool = false
@State private var isTracking: Bool = false
@State private var timeObserver: Any?
var action: (() -> Void)?
var body: some View {
HStack(spacing: 0) {
Button {
isPlaying ? player.pause() : player.play()
isPlaying.toggle()
} label: {
Image(systemName: isPlaying ? "pause.fill" : "play.fill")
.resizable()
.padding()
.frame(width: height, height: height, alignment: .center)
}
.foregroundColor(.white)
.overlay(Rectangle().frame(width: 1, height: nil).foregroundColor(Color.black), alignment: .trailing)
VideoScrollPreview(player: player, isPlaying: $isPlaying, currentTime: $currentTime, isTracking: $isTracking)
.padding(4)
.frame(width: nil, height: height)
if let action = action {
Button {
action()
} label: {
Image(systemName: actionImage)
.resizable()
.padding()
.frame(width: height, height: height, alignment: .center)
}
.foregroundColor(.white)
.overlay(Rectangle().frame(width: 1, height: nil).foregroundColor(Color.black), alignment: .leading)
}
}
.background(Color.darkGray)
.cornerRadius(5)
.onAppear {
startPeriodicTimeObserver()
}
.onDisappear {
stopPeriodicTimeObserver()
}
}
func startPeriodicTimeObserver() {
timeObserver = player.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.1, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), queue: nil) { time in
guard isTracking == false else { return }
guard let duration = VideoHelper.getDuration(player) else { return }
self.currentTime = CGFloat(CMTimeGetSeconds(time) / CMTimeGetSeconds(duration))
if self.currentTime == 1.0 {
self.isPlaying = false
}
}
}
func stopPeriodicTimeObserver() {
guard let observer = timeObserver else { return }
player.removeTimeObserver(observer)
}
}
import SwiftUI
import AVKit
struct VideoScrollPreview: View {
let player: AVPlayer
@Binding var isPlaying: Bool
@Binding var currentTime: CGFloat
@Binding var isTracking: Bool
@State private var images: [UIImage] = []
var body: some View {
GeometryReader { geometry in
ZStack {
HStack(spacing: 0) {
ForEach(images, id: \.self) { image in
Image(uiImage: image)
.resizable()
.scaledToFit()
}
}
RoundedRectangle(cornerRadius: 10, style: .continuous)
.frame(width: 4, height: geometry.size.height + 4)
.position(x: currentTime * geometry.size.width, y: geometry.size.height / 2)
.foregroundColor(.white)
.shadow(radius: 10)
}
.gesture(
DragGesture(minimumDistance: 0)
.onChanged({
isTracking = true
if isPlaying {
player.pause()
}
currentTime = min(geometry.size.width, max(0, $0.location.x)) / geometry.size.width
guard let duration = VideoHelper.getDuration(player) else { return }
let targetTime = CMTimeMultiplyByFloat64(duration, multiplier: Float64(currentTime))
player.seek(to: targetTime)
})
.onEnded({ _ in
isTracking = false
if isPlaying {
player.play()
}
})
)
.position(x: geometry.frame(in: .local).midX, y: geometry.frame(in: .local).midY)
.onAppear {
images = VideoHelper.generateThumbnailImages(player, geometry.size)
}
}
}
}
@arshiacont
Copy link

Thanks for sharing! Any chance you could point to an implementation of VideoHelper.generateThumbnailImages(:)?

@inder663
Copy link

Thank you for this important code but VideoHelper is missing

@Martini024
Copy link
Author

Martini024 commented Apr 22, 2022

Oh ya, totally forgot about it, just updated, please check. @arshiacont @inder663

@d0nq123
Copy link

d0nq123 commented Jan 15, 2023

@Martini024 Thanks for this. A quick question. Let’s assume that I have a video called video.mov. How would use this in ContentView?

@Martini024
Copy link
Author

You probably should have another Higher order component accepting video.mov's URL as a param, creating AVPlayer instance by player = AVPlayer(url: videoUrl) , then pass player to VideoPlayerControls. This is generally how I use the component, and hopefully, can help you a bit.

The only thing I'm not sure about is whether AVPlayer supports loading .mov or not, currently, I only tested .mp4, that one is another research topic, you better take some time to validate it.

@d0nq123
Copy link

d0nq123 commented Jan 16, 2023

@Martini024 Thanks for the prompt reply. Looks like AVPlayer is supporting .mov when using VideoPlayer but have an issue passing currentTime to VideoPlayerControls

VideoPlayerControls(player:player, currentTime:CGFloat(0.0)) throws up an error in

struct ContentView: View {
    @State var player = AVPlayer(url: Bundle.main.url(forResource: "Video", withExtension: "mov")!)
    var body: some View {
        VStack{
            VideoPlayerControls(player:player, currentTime:CGFloat(0.0))
                .frame(width: .infinity, height: .infinity, alignment: .center)
            
        }
    }
}

@Martini024
Copy link
Author

Okay, I think you probably need some preknowledge about SwiftUI, check on this about how to initialize @Binding https://stackoverflow.com/questions/56685964/swiftui-binding-initialize, in short for your quick solution you should use VideoPlayerControls(player: player, currentTime: .constant(GFloat(0.0))).

@d0nq123
Copy link

d0nq123 commented Jan 17, 2023

Thanks!

@donrodriguez
Copy link

donrodriguez commented Mar 21, 2024

THANK YOU SO MUCH!!! You freaking genius

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment