Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Set State fix and Manual Metadata Definition #95

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,15 @@ class Player extends Component {
);

ReactNativeAudioStreaming.getStatus((error, status) => {
console.log('Status', status);
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Log to remove

(error) ? console.log(error) : this.setState(status)
});
}

componentWillUnmount() {
this.subscription.remove();
}

_onPress() {
switch (this.state.status) {
case PLAYING:
Expand Down
3 changes: 3 additions & 0 deletions ios/ReactNativeAudioStreaming.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@
@property (nonatomic, readwrite) BOOL showNowPlayingInfo;
@property (nonatomic, readwrite) NSString *lastUrlString;
@property (nonatomic, retain) NSString *currentSong;
@property (nonatomic, retain) NSString *currentArtist;
@property (nonatomic, retain) NSString *currentArtwork;
@property (nonatomic, readwrite) BOOL fetchedAlbumArt;

- (void)play:(NSString *) streamUrl options:(NSDictionary *)options;
- (void)pause;
Expand Down
142 changes: 95 additions & 47 deletions ios/ReactNativeAudioStreaming.m
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ - (ReactNativeAudioStreaming *)init
[self.audioPlayer setDelegate:self];
self.lastUrlString = @"";
[NSTimer scheduledTimerWithTimeInterval:0.5 target:self selector:@selector(tick:) userInfo:nil repeats:YES];
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Don't commit indentation changes plz


NSLog(@"AudioPlayer initialized");
}

return self;
}

Expand All @@ -40,12 +40,12 @@ -(void) tick:(NSTimer*)timer
if (!self.audioPlayer) {
return;
}

if (self.audioPlayer.currentlyPlayingQueueItemId != nil && self.audioPlayer.state == STKAudioPlayerStatePlaying) {
NSNumber *progress = [NSNumber numberWithFloat:self.audioPlayer.progress];
NSNumber *duration = [NSNumber numberWithFloat:self.audioPlayer.duration];
NSString *url = [NSString stringWithString:self.audioPlayer.currentlyPlayingQueueItemId];

[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent" body:@{
@"status": @"STREAMING",
@"progress": progress,
Expand Down Expand Up @@ -73,20 +73,32 @@ - (void)dealloc
}

[self activate];

if (self.audioPlayer.state == STKAudioPlayerStatePaused && [self.lastUrlString isEqualToString:streamUrl]) {
[self.audioPlayer resume];
} else {
[self.audioPlayer play:streamUrl];
}

self.fetchedAlbumArt = false;
self.lastUrlString = streamUrl;
self.showNowPlayingInfo = false;

if ([options objectForKey:@"showIniOSMediaCenter"]) {
if ([options objectForKey:@"metadata"]) {
NSDictionary *metadata = [options objectForKey:@"metadata"];
self.currentSong = [metadata objectForKey:@"title"];
self.currentArtist = [metadata objectForKey:@"artist"];
self.currentArtwork = [metadata objectForKey:@"artwork"];
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent" body:@{
@"status": @"METADATA_UPDATED",
@"key": @"StreamTitle",
@"value": self.currentSong
}];
}
self.showNowPlayingInfo = [[options objectForKey:@"showIniOSMediaCenter"] boolValue];
}

if (self.showNowPlayingInfo) {
//unregister any existing registrations
[self unregisterAudioInterruptionNotifications];
Expand All @@ -95,7 +107,7 @@ - (void)dealloc
[self registerAudioInterruptionNotifications];
[self registerRemoteControlEvents];
}

[self setNowPlayingInfo:true];
}

Expand All @@ -113,9 +125,9 @@ - (void)dealloc
if (!self.audioPlayer) {
return;
}

double newtime = self.audioPlayer.progress + seconds;

if (self.audioPlayer.duration < newtime) {
[self.audioPlayer stop];
[self setNowPlayingInfo:false];
Expand All @@ -129,9 +141,9 @@ - (void)dealloc
if (!self.audioPlayer) {
return;
}

double newtime = self.audioPlayer.progress - seconds;

if (newtime < 0) {
[self.audioPlayer seekToTime:0.0];
} else {
Expand Down Expand Up @@ -187,7 +199,7 @@ - (void)dealloc
} else if ([self.audioPlayer state] == STKAudioPlayerStateBuffering) {
status = @"BUFFERING";
}

callback(@[[NSNull null], @{@"status": status, @"progress": progress, @"duration": duration, @"url": self.lastUrlString}]);
}

Expand Down Expand Up @@ -215,7 +227,7 @@ - (void)audioPlayer:(STKAudioPlayer *)player unexpectedError:(STKAudioPlayerErro

- (void)audioPlayer:(STKAudioPlayer *)audioPlayer didReadStreamMetadata:(NSDictionary *)dictionary {
NSLog(@"AudioPlayer SONG NAME %@", dictionary[@"StreamTitle"]);

self.currentSong = dictionary[@"StreamTitle"] ? dictionary[@"StreamTitle"] : @"";
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent" body:@{
@"status": @"METADATA_UPDATED",
Expand All @@ -229,33 +241,33 @@ - (void)audioPlayer:(STKAudioPlayer *)player stateChanged:(STKAudioPlayerState)s
{
NSNumber *duration = [NSNumber numberWithFloat:self.audioPlayer.duration];
NSNumber *progress = [NSNumber numberWithFloat:self.audioPlayer.progress];

switch (state) {
case STKAudioPlayerStatePlaying:
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent"
body:@{@"status": @"PLAYING", @"progress": progress, @"duration": duration, @"url": self.lastUrlString}];
break;

case STKAudioPlayerStatePaused:
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent"
body:@{@"status": @"PAUSED", @"progress": progress, @"duration": duration, @"url": self.lastUrlString}];
break;

case STKAudioPlayerStateStopped:
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent"
body:@{@"status": @"STOPPED", @"progress": progress, @"duration": duration, @"url": self.lastUrlString}];
break;

case STKAudioPlayerStateBuffering:
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent"
body:@{@"status": @"BUFFERING"}];
break;

case STKAudioPlayerStateError:
[self.bridge.eventDispatcher sendDeviceEventWithName:@"AudioBridgeEvent"
body:@{@"status": @"ERROR"}];
break;

default:
break;
}
Expand All @@ -267,10 +279,10 @@ - (void)audioPlayer:(STKAudioPlayer *)player stateChanged:(STKAudioPlayerState)s
- (void)activate
{
NSError *categoryError = nil;

[[AVAudioSession sharedInstance] setActive:YES error:&categoryError];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:&categoryError];

if (categoryError) {
NSLog(@"Error setting category! %@", [categoryError description]);
}
Expand All @@ -279,9 +291,9 @@ - (void)activate
- (void)deactivate
{
NSError *categoryError = nil;

[[AVAudioSession sharedInstance] setActive:NO error:&categoryError];

if (categoryError) {
NSLog(@"Error setting category! %@", [categoryError description]);
}
Expand All @@ -294,7 +306,7 @@ - (void)setSharedAudioSessionCategory

[[AVAudioSession sharedInstance] setActive:NO error:&categoryError];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryAmbient error:&categoryError];

if (categoryError) {
NSLog(@"Error setting category! %@", [categoryError description]);
}
Expand Down Expand Up @@ -328,24 +340,24 @@ - (void)onAudioInterruption:(NSNotification *)notification
{
// Get the user info dictionary
NSDictionary *interruptionDict = notification.userInfo;

// Get the AVAudioSessionInterruptionTypeKey enum from the dictionary
NSInteger interuptionType = [[interruptionDict valueForKey:AVAudioSessionInterruptionTypeKey] integerValue];

// Decide what to do based on interruption type
switch (interuptionType)
{
case AVAudioSessionInterruptionTypeBegan:
NSLog(@"Audio Session Interruption case started.");
[self.audioPlayer pause];
break;

case AVAudioSessionInterruptionTypeEnded:
NSLog(@"Audio Session Interruption case ended.");
self.isPlayingWithOthers = [[AVAudioSession sharedInstance] isOtherAudioPlaying];
(self.isPlayingWithOthers) ? [self.audioPlayer stop] : [self.audioPlayer resume];
break;

default:
NSLog(@"Audio Session Interruption Notification case default.");
break;
Expand All @@ -354,41 +366,41 @@ - (void)onAudioInterruption:(NSNotification *)notification

- (void)onRouteChangeInterruption:(NSNotification *)notification
{

NSDictionary *interruptionDict = notification.userInfo;
NSInteger routeChangeReason = [[interruptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue];

switch (routeChangeReason)
{
case AVAudioSessionRouteChangeReasonUnknown:
NSLog(@"routeChangeReason : AVAudioSessionRouteChangeReasonUnknown");
break;

case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
// A user action (such as plugging in a headset) has made a preferred audio route available.
NSLog(@"routeChangeReason : AVAudioSessionRouteChangeReasonNewDeviceAvailable");
break;

case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
// The previous audio output path is no longer available.
[self.audioPlayer stop];
break;

case AVAudioSessionRouteChangeReasonCategoryChange:
// The category of the session object changed. Also used when the session is first activated.
NSLog(@"routeChangeReason : AVAudioSessionRouteChangeReasonCategoryChange"); //AVAudioSessionRouteChangeReasonCategoryChange
break;

case AVAudioSessionRouteChangeReasonOverride:
// The output route was overridden by the app.
NSLog(@"routeChangeReason : AVAudioSessionRouteChangeReasonOverride");
break;

case AVAudioSessionRouteChangeReasonWakeFromSleep:
// The route changed when the device woke up from sleep.
NSLog(@"routeChangeReason : AVAudioSessionRouteChangeReasonWakeFromSleep");
break;

case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
// The route changed because no suitable route is now available for the specified category.
NSLog(@"routeChangeReason : AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory");
Expand Down Expand Up @@ -431,19 +443,55 @@ - (void)unregisterRemoteControlEvents
[commandCenter.pauseCommand removeTarget:self];
}

+ (void) loadFromURL: (NSURL*) url callback:(void (^)(UIImage *image))callback {
dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0ul);
dispatch_async(queue, ^{
NSData * imageData = [NSData dataWithContentsOfURL:url];
dispatch_async(dispatch_get_main_queue(), ^{
UIImage *image = [UIImage imageWithData:imageData];
callback(image);
});
});
}

- (void)setNowPlayingInfo:(bool)isPlaying
{
if (self.showNowPlayingInfo) {
// TODO Get artwork from stream
// MPMediaItemArtwork *artwork = [[MPMediaItemArtwork alloc]initWithImage:[UIImage imageNamed:@"webradio1"]];

NSString* appName = [[[NSBundle mainBundle] infoDictionary] objectForKey:@"CFBundleName"];
NSDictionary *nowPlayingInfo = [NSDictionary dictionaryWithObjectsAndKeys:
self.currentSong ? self.currentSong : @"", MPMediaItemPropertyAlbumTitle,
@"", MPMediaItemPropertyAlbumArtist,
appName ? appName : @"AppName", MPMediaItemPropertyTitle,
[NSNumber numberWithFloat:isPlaying ? 1.0f : 0.0], MPNowPlayingInfoPropertyPlaybackRate, nil];
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = nowPlayingInfo;

NSString* bundleDisplayName =[[[NSBundle mainBundle] infoDictionary] objectForKey:@"CFBundleDisplayName"];
NSString* appName = bundleDisplayName? bundleDisplayName :
[[[NSBundle mainBundle] infoDictionary] objectForKey:@"CFBundleName"];

NSString* title = self.currentSong ? self.currentSong : @"";
NSString* artist = self.currentArtist ? self.currentArtist : @"";
NSString* applicationName = appName ? appName : @"AppName";

if (!self.fetchedAlbumArt && self.currentArtwork != nil) {
NSString* imageUrl = self.currentArtwork;
dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
dispatch_async(queue, ^{
MPNowPlayingInfoCenter *infoCenter = [MPNowPlayingInfoCenter defaultCenter];
UIImage *artworkImage = [UIImage imageWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:imageUrl]]];
if(artworkImage) {
MPMediaItemArtwork *albumArt = [[MPMediaItemArtwork alloc] initWithImage: artworkImage];
NSMutableDictionary *songInfo =
infoCenter.nowPlayingInfo ? [infoCenter.nowPlayingInfo mutableCopy] : [NSMutableDictionary dictionary];
[songInfo setValue:albumArt forKey:MPMediaItemPropertyArtwork];
infoCenter.nowPlayingInfo = songInfo;
}
self.fetchedAlbumArt = true;
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It seems that with your implementation you can only fetch the first song artwork, what happens if a new song from an other album is playing ?

});
}

MPNowPlayingInfoCenter *infoCenter = [MPNowPlayingInfoCenter defaultCenter];
NSDictionary *nowPlayingInfo = infoCenter.nowPlayingInfo ?
[NSMutableDictionary dictionaryWithDictionary:infoCenter.nowPlayingInfo ] : [NSMutableDictionary dictionary];

[nowPlayingInfo setValue:title forKey:MPMediaItemPropertyAlbumTitle];
[nowPlayingInfo setValue:artist forKey:MPMediaItemPropertyArtist];
[nowPlayingInfo setValue:applicationName forKey:MPMediaItemPropertyTitle];
[nowPlayingInfo setValue:[NSNumber numberWithFloat:isPlaying ? 1.0f : 0.0] forKey:MPNowPlayingInfoPropertyPlaybackRate];
infoCenter.nowPlayingInfo = nowPlayingInfo;
} else {
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = nil;
}
Expand Down