Skip to content

Instantly share code, notes, and snippets.

@joshbalfour
Last active April 2, 2019 04:40
Show Gist options
  • Save joshbalfour/35afac16027c8745f7e6e71fabcc4226 to your computer and use it in GitHub Desktop.
Save joshbalfour/35afac16027c8745f7e6e71fabcc4226 to your computer and use it in GitHub Desktop.
react native streaming audio player
import React from 'react'
import PropTypes from 'prop-types'
import ReactNative from 'react-native'
const audio = ReactNative.NativeModules.RNStreamingAudioPlayer
const audioEvent = new ReactNative.NativeEventEmitter(ReactNative.NativeModules.RNStreamingAudioPlayer)
import {
View,
Text,
} from 'react-native'
class AudioPlayer extends React.PureComponent {
constructor(props) {
super(props)
this.state = {
loaded: false,
error: false,
}
}
static propTypes = {
volume: PropTypes.number,
url: PropTypes.string,
skippedTo: PropTypes.number,
isPlaying: PropTypes.bool,
playerDurationChange: PropTypes.func,
playerCurrentTimeChange: PropTypes.func,
}
newSound(props) {
const track = {
name: props.title,
artist_name: props.artist,
artwork: props.artwork,
}
audio.initPlayer(props.url, track)
}
handleLoaded(error) {
if (error) {
console.error('error loading music')
console.error(error)
this.setState({
loaded: false,
error: true,
})
} else {
this.setState({
loaded: true,
error: false,
})
this.props.playerDurationChange(this.sound.getDuration())
}
}
handlePlayComplete() {
console.log('handlePlayComplete')
}
componentDidMount() {
}
componentWillUnmount() {
}
componentWillReceiveProps(props) {
if (props.url !== this.props.url) {
this.newSound(props)
}
if (props.isPlaying !== this.props.isPlaying) {
if (props.isPlaying) {
audio.play()
} else {
audio.pause()
}
}
if (props.volume !== this.props.volume) {
audio.setVolume(props.volume)
}
if (props.skippedTo !== this.props.skippedTo) {
TrackPlayer.seekToTime(props.skippedTo)
}
}
render() {
const { url, isPlaying } = this.props
return (
null
)
}
}
export default AudioPlayer

Usage

Drag and drop the .h and .m files into your xcode project. Make sure the Background Modes "Audio, airplay..." and "Background fetch" are enabled.

You should be good to go!

@import MediaPlayer;
@import AVFoundation;
@interface RNStreamingAudioPlayer : RCTEventEmitter <RCTBridgeModule>
@property (strong, nonatomic) AVPlayerItem *audioItem;
@property (strong, nonatomic) AVPlayer *audioPlayer;
@end
//React Native Audio Player logic(no UI)
#import "RNStreamingAudioPlayer.h"
#import <React/RCTBridge.h>
#import <React/RCTConvert.h>
@implementation RNStreamingAudioPlayer
@synthesize bridge = _bridge;
RCT_EXPORT_MODULE();
RCT_EXPORT_METHOD(initPlayer:(NSString *)url songInfo:(NSDictionary *)songInfo){
if(!([url length]>0)) return;
NSString *name = [RCTConvert NSString:songInfo[@"name"]];
NSString *artist_name = [RCTConvert NSString:songInfo[@"artist_name"]];
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = @{
MPMediaItemPropertyTitle : name,
MPMediaItemPropertyArtist : artist_name,
MPNowPlayingInfoPropertyPlaybackRate : @1.0f,
};
NSString *artwork = [RCTConvert NSString:songInfo[@"artwork"]];
if (artwork != nil) {
[self setNowPlayingArtwork: artwork];
}
NSURL *soundUrl = [[NSURL alloc] initWithString:url];
self.audioItem = [AVPlayerItem playerItemWithURL:soundUrl];
self.audioPlayer = [AVPlayer playerWithPlayerItem:self.audioItem];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
[[AVAudioSession sharedInstance] setActive:YES error:nil];
dispatch_async(dispatch_get_main_queue(), ^{
[[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
});
MPRemoteCommandCenter *commandCenter = [MPRemoteCommandCenter sharedCommandCenter];
commandCenter.previousTrackCommand.enabled = NO;
[commandCenter.previousTrackCommand addTarget:self action:@selector(previousTapped:)];
commandCenter.likeCommand.enabled = YES;
commandCenter.likeCommand.active = YES;
commandCenter.playCommand.enabled = YES;
[commandCenter.playCommand addTarget:self action:@selector(playAudio)];
commandCenter.pauseCommand.enabled = YES;
[commandCenter.pauseCommand addTarget:self action:@selector(pauseAudio)];
commandCenter.nextTrackCommand.enabled = NO;
[commandCenter.nextTrackCommand addTarget:self action:@selector(nextTapped:)];
[[NSNotificationCenter defaultCenter]
addObserver:self selector:@selector(playerItemDidReachEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:self.audioItem];
[[NSNotificationCenter defaultCenter]
addObserver:self selector:@selector(playerItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:self.audioItem];
}
- (void)setNowPlayingArtwork:(NSString*)url
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^{
UIImage *image = nil;
// check whether artwork path is present
if (![url isEqual: @""]) {
// artwork is url download from the interwebs
if ([url hasPrefix: @"http://"] || [url hasPrefix: @"https://"]) {
NSURL *imageURL = [NSURL URLWithString:url];
NSData *imageData = [NSData dataWithContentsOfURL:imageURL];
image = [UIImage imageWithData:imageData];
} else {
// artwork is local. so create it from a UIImage
NSString *basePath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *fullPath = [NSString stringWithFormat:@"%@%@", basePath, url];
BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:fullPath];
if (fileExists) {
image = [UIImage imageNamed:fullPath];
}
}
}
// Check if image was available otherwise don't do anything
if (image == nil) {
return;
}
// check whether image is loaded
CGImageRef cgref = [image CGImage];
CIImage *cim = [image CIImage];
if (cim != nil || cgref != NULL) {
// Callback to main queue to set nowPlayingInfo
dispatch_async(dispatch_get_main_queue(), ^{
MPNowPlayingInfoCenter *center = [MPNowPlayingInfoCenter defaultCenter];
MPMediaItemArtwork *artwork = [[MPMediaItemArtwork alloc] initWithImage: image];
NSMutableDictionary *mediaDict = (center.nowPlayingInfo != nil) ? [[NSMutableDictionary alloc] initWithDictionary: center.nowPlayingInfo] : [NSMutableDictionary dictionary];
[mediaDict setValue:artwork forKey:MPMediaItemPropertyArtwork];
center.nowPlayingInfo = mediaDict;
});
}
});
}
- (void)playerItemDidReachEnd:(NSNotification *)notification{
NSLog(@"Called");
[self.audioItem seekToTime:kCMTimeZero];
[self sendEventWithName:@"AudioEnded" body:@{@"event": @"finished"}];
}
- (void)playerItemStalled:(NSNotification *)notification{
[self.audioPlayer play];
}
-(void)playAudio{
[self.audioPlayer play];
}
-(void)pauseAudio{
[self.audioPlayer pause];
}
-(void)nextTapped:(MPRemoteCommandEvent *)event{
[self sendEventWithName:@"goToNext" body:@{@"event": @"nextSong"}];
}
-(void)previousTapped:(MPRemoteCommandEvent *)event{
[self sendEventWithName:@"goToPrevious" body:@{@"event": @"previousSong"}];
}
RCT_EXPORT_METHOD(getDuration:(RCTResponseSenderBlock)callback){
while(self.audioItem.status != AVPlayerItemStatusReadyToPlay){
} //this is kind of crude but it will prevent the app from crashing due to a "NAN" return(this allows the getDuration method to be executed in the componentDidMount function of the React class without the app crashing
float duration = CMTimeGetSeconds(self.audioItem.duration);
callback(@[[[NSNumber alloc] initWithFloat:duration]]);
}
RCT_EXPORT_METHOD(getCurrentTime:(RCTResponseSenderBlock)callback){
while(self.audioItem.status != AVPlayerItemStatusReadyToPlay){
} //this is kind of crude but it will prevent the app from crashing due to a "NAN" return(this allows the getDuration method to be executed in the componentDidMount function of the React class without the app crashing
float duration = CMTimeGetSeconds(self.audioItem.currentTime);
callback(@[[[NSNumber alloc] initWithFloat:duration]]);
}
- (NSArray<NSString *> *)supportedEvents
{
NSArray *events = @[@"AudioEnded", @"goToPrevious", @"goToNext"];
return events;
}
RCT_EXPORT_METHOD(play){
[self.audioPlayer play];
}
RCT_EXPORT_METHOD(pause){
[self.audioPlayer pause];
}
RCT_EXPORT_METHOD(setVolume:(nonnull NSNumber *)volumeValue){
 self.audioPlayer.volume = [volumeValue floatValue];
}
RCT_EXPORT_METHOD(seekToTime:(nonnull NSNumber *)toTime){
[self.audioPlayer seekToTime: CMTimeMakeWithSeconds([toTime floatValue], NSEC_PER_SEC)];
}
@end
import AudioPlayer from './AudioPlayer'
const Example = () => (
<AudioPlayer
url=""
title=""
artist=""
artwork=""
date=""
volume={8}
skippedTo={0}
isPlaying={true}
playerCurrentTimeChange={newCurrentTime => {}}
playerDurationChange={newDuration => {}}
/>
)
export default Example
@dengue8830
Copy link

im getting

[AudioHAL_Client] AudioHardware.cpp:1210:AudioObjectRemovePropertyListener: AudioObjectRemovePropertyListener: no object with given ID 0

with this audio url

https://sample-videos.com/audio/mp3/crowd-cheering.mp3

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment