Hope that the issue goes to the correct stream.
I play HLS, i.e. I use AVPlayer for playing a mixed content( audio & pure video) from a server.
I keep playing audio after moving the app into background: I have enabled such feature in plist, plus I do store Prev Player, disable visual tracks, and set to nil AVPlayerLayer.
Here are the methods:
- (void)changePlayerState:(BOOL)restored
{
if (restored && !self.storedPlayer)
return;
if (!restored){
self.storedPlayer = self.playerView.player;
[self.playerView setPlayer:nil];
} else if (self.storedPlayer) {
[self.playerView setPlayer:self.storedPlayer];
self.storedPlayer = nil;
}
AVPlayerItem *playerItem = self.playerView.playerItem;
NSArray *tracks = [playerItem tracks];
for (AVPlayerItemTrack *playerItemTrack in tracks)
{
//
if ([playerItemTrack.assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual])
playerItemTrack.enabled = restored; //
}
}
And following one:
- (void)setPlayer:(AVPlayer *)player
{
if (_player != player) {
[self removePlayerObservers];
_player = player;
[self configurePlayerLayer];
if (_player)
[self addPlayerObservers];
}
}
- (void)removePlayerObservers
{
if ([_player observationInfo] != nil) {
[_player removeObserver:self
forKeyPath:#"rate"
context:NPUIPlayerViewPlayerRateObservationContext];
[_player removeObserver:self
forKeyPath:#"isExternalPlaybackActive"
context:NPUIPlayerViewAirPlayVideoActiveObservationContext];
}
[self removePeriodicTimeObserver];
}
- (void)addPlayerObservers {
[_player addObserver:self
forKeyPath:#"rate"
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:NPUIPlayerViewPlayerRateObservationContext];
[_player addObserver:self
forKeyPath:#"isExternalPlaybackActive"
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:NPUIPlayerViewAirPlayVideoActiveObservationContext];
[self addPeriodicTimeObserver];
}
- (void)removePeriodicTimeObserver
{
if (_registeredTimeObserver) {
[_player removeTimeObserver:_registeredTimeObserver];
_registeredTimeObserver = nil;
}
}
- (void)addPeriodicTimeObserver
{
__weak NPUIPlayerView *weakSelf = self;
_registeredTimeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(0.5f, 10) queue:dispatch_get_main_queue()
So after moving the app to the background , pressing HOme button, and activating Control Center(sliding up from the button, where is a calculator, camera, light buttons) , I can set the title of item being played, but apple's player controls doesn't work : pressing pause, play doesn't work also slider doesn't work.
I do receive events when I press the buttons, but at the same time, inspite of the fact, that I am calling [player pause], or change rate it doesn't change the button. But when there is an issue with accessing audio via internet, or sound buffer is empty, the button is changed, it has paused UI.
Below goes the code for setting now playing media center and adding commands
- (void)adjustNowPlayingScreen
{
MPNowPlayingInfoCenter *infoCenter = [MPNowPlayingInfoCenter defaultCenter];
NSMutableDictionary *mDic =[NSMutableDictionary dictionary];
NSString *newTitle = self.playerTitle.text ?: self.itemToPlay.shortName ?: self.itemToPlay.name ?: #"";
if (newTitle.length)
mDic[MPMediaItemPropertyTitle] = newTitle;
AVPlayerItem * item = [self.playerView.player currentItem];
CMTime itemDuration = [self.playerView.player.currentItem duration];
if (CMTIME_IS_VALID(itemDuration) ) {
NSTimeInterval duration = CMTimeGetSeconds(itemDuration);
if (duration)
mDic[MPMediaItemPropertyPlaybackDuration] = #(duration);
}
else
{
NSTimeInterval duration = CMTimeGetSeconds([item.asset duration]);
if (!isnan(duration) && duration > 0)
mDic[MPMediaItemPropertyPlaybackDuration] = #(duration);
else {
duration = CMTimeGetSeconds([[[[self playerView] playerItem] asset] duration]);
if (!isnan(duration) && duration > 0)
mDic[MPMediaItemPropertyPlaybackDuration] = #(duration);
}
}
NSString *urlStr = self.itemToPlay.autoQualityURL ?: self.itemToPlay.lowAutoQualityURL;
if (urlStr.length)
mDic[MPMediaItemPropertyAssetURL] = urlStr;
CMTime curTime = self.playerView.playerItem.currentTime;
if (CMTIME_IS_VALID(curTime)) {
NSTimeInterval duration = CMTimeGetSeconds(curTime);
mDic[MPNowPlayingInfoPropertyElapsedPlaybackTime]= #(duration);
}
if (mDic.count) {
#warning #"HACK: Necessary to change number of played items & index"
mDic[ MPMediaItemPropertyAlbumTrackNumber] = #(1);
mDic[ MPMediaItemPropertyAlbumTrackCount] = #(1);
mDic[ MPNowPlayingInfoPropertyPlaybackRate] = #(self.playerView.isPlaying ? 1.0 : 0.0);
UIImage *img = [UIImage imageNamed:#"team_4f6ae0b99d4b856118000124"];
mDic[MPMediaItemPropertyArtwork] = [[MPMediaItemArtwork alloc]initWithImage:img];
infoCenter.nowPlayingInfo = mDic;
}
}
- (void)addRemoteCommands
{
MPRemoteCommandCenter *commandCenter = [MPRemoteCommandCenter sharedCommandCenter];
MPRemoteCommand *command = [commandCenter pauseCommand];
command.enabled = true;
[command addTarget:self action:#selector(pauseCommand:)];
command = [commandCenter playCommand];
command.enabled = true;
[command addTarget:self action:#selector(playCommand:)];
command = [commandCenter togglePlayPauseCommand];
command.enabled = true;
[command addTarget:self action:#selector(toggleCommand:)];
command = [commandCenter seekForwardCommand];
command.enabled = true;
[command addTarget:self action:#selector(seekForwardCommand:)];
command = [commandCenter seekBackwardCommand];
command.enabled = true;
[command addTarget:self action:#selector(seekBackwardCommand:)];
}
- (void)updateElapsedTime
{
MPRemoteCommandCenter *commandCenter = [MPRemoteCommandCenter sharedCommandCenter];
MPRemoteCommand *command = [commandCenter togglePlayPauseCommand];
if (!command.enabled)
return;
[self adjustNowPlayingScreen];
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, 2);
dispatch_after(popTime, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^(void){
[self updateElapsedTime];
});
}
- (void)removeRemoteCommands
{
MPRemoteCommandCenter *commandCenter = [MPRemoteCommandCenter sharedCommandCenter];
MPRemoteCommand *command = [commandCenter pauseCommand];
command.enabled = false;
[command removeTarget:self action:#selector(pauseCommand:)];
command = [commandCenter playCommand];
command.enabled = false;
[command removeTarget:self action:#selector(playCommand:)];
command = [commandCenter togglePlayPauseCommand];
command.enabled = false;
[command removeTarget:self action:#selector(toggleCommand:)];
command = [commandCenter seekForwardCommand];
command.enabled = false;
[command removeTarget:self action:#selector(seekForwardCommand:)];
command = [commandCenter seekBackwardCommand];
command.enabled = false;
[command removeTarget:self action:#selector(seekBackwardCommand:)];
}
- (void)seekBackwardCommand:(MPRemoteCommandEvent *)event
{
NSLog(#"%#",NSStringFromClass([event class]));
}
- (void)seekForwardCommand:(MPRemoteCommandEvent *)event
{
NSLog(#"%#",NSStringFromClass([event class]));
}
- (void)pauseCommand:(MPRemoteCommandEvent *)event
{
[self pause]; //_player pause];
AVPlayerItem *playerItem = self.playerView.playerItem;
NSArray *tracks = [playerItem tracks];
for (AVPlayerItemTrack *playerItemTrack in tracks)
{
/
if ([playerItemTrack.assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible])
playerItemTrack.enabled = false; /
}
MPNowPlayingInfoCenter *infoCenter = [MPNowPlayingInfoCenter defaultCenter];
NSMutableDictionary *mDic =[NSMutableDictionary dictionaryWithDictionary:infoCenter.nowPlayingInfo];
mDic[ MPNowPlayingInfoPropertyPlaybackRate] = #(0.0);
infoCenter.nowPlayingInfo = mDic;
}
So , my pause command is called, I call AVPlayer's pause method. But it doesn't stop audio stream.
And Slider doesn't work, therefore my seekBackwardCommand/seekForwardCommand are not called.
But as I said when the stream is over (audio also) the player on Control Center changes button from playing into pause, i.e. somehow it listen to Audio Session changes.
I adjust the sound category by setting AVAudioSessionCategoryPlayback, and setting mode : AVAudioSessionModeMoviePlayback
Please help how properly to handle pause/ play buttons on Control Screen, how to enable slider?
I am running my code on iOS9, iPhone 6.
Related
I am working with some camera recoding app. I want to record video using front and back camera both. For back camera my video is working fine but for front camera my final video is mute (without audio).
CODE:
- (id)initWithPreviewView:(UIView *)previewView {
self = [super init];
if (self) {
NSError *error;
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
//AVCaptureSessionPresetHigh AVCaptureSessionPresetPhoto
// AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *videoDevice;
// if (isNeededToSave)
// {
// //for Front cam
// videoDevice = [self frontCamera];
//
// }
// else
// {
// //for back cam
videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// }
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error) {
NSLog(#"Video input creation failed");
return nil;
}
if (![self.captureSession canAddInput:videoIn]) {
NSLog(#"Video input add-to-session failed");
return nil;
}
[self.captureSession addInput:videoIn];
/*Take PHoto*/
self.isUsingFrontFacingCamera = 0;
// Make a still image output
stillImageOutput = [AVCaptureStillImageOutput new];
[stillImageOutput addObserver:self forKeyPath:#"capturingStillImage" options:NSKeyValueObservingOptionNew context:(__bridge void *)(AVCaptureStillImageIsCapturingStillImageContext)];
if ( [self.captureSession canAddOutput:stillImageOutput] )
[self.captureSession addOutput:stillImageOutput];
// Make a video data output
videoDataOutput = [AVCaptureVideoDataOutput new];
// we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'
NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[videoDataOutput setVideoSettings:rgbOutputSettings];
[videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked (as we process the still image)
// create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
// a serial dispatch queue must be used to guarantee that video frames will be delivered in order
// see the header doc for setSampleBufferDelegate:queue: for more information
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
if ( [self.captureSession canAddOutput:videoDataOutput] )
[self.captureSession addOutput:videoDataOutput];
[[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:NO];
/*Take PHoto*/
// save the default format
self.defaultFormat = videoDevice.activeFormat;
defaultVideoMaxFrameDuration = videoDevice.activeVideoMaxFrameDuration;
AVCaptureDevice *audioDevice= [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioIn = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
[self.captureSession addInput:audioIn];
self.fileOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.captureSession addOutput:self.fileOutput];
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
self.previewLayer.frame = previewView.bounds;
self.previewLayer.contentsGravity = kCAGravityResizeAspectFill;
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[previewView.layer insertSublayer:self.previewLayer atIndex:0];
[self.captureSession startRunning];
}
return self;
}
- (void)switchCameras
{
// [self frontCamera];
// AVCaptureDevicePosition desiredPosition;
// desiredPosition = AVCaptureDevicePositionFront;
AVCaptureDevicePosition desiredPosition;
NSInteger isFront = [[NSUserDefaults standardUserDefaults] integerForKey:#"isUsingFrontFacingCamera"];
if (isFront)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[self.previewLayer session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[self.previewLayer session] inputs]) {
[[self.previewLayer session] removeInput:oldInput];
}
[[self.previewLayer session] addInput:input];
[[self.previewLayer session] commitConfiguration];
break;
}
}
if (isFront==0)
{
[[NSUserDefaults standardUserDefaults] setInteger:1 forKey:#"isUsingFrontFacingCamera"];
}
else
{
[[NSUserDefaults standardUserDefaults] setInteger:0 forKey:#"isUsingFrontFacingCamera"];
}
[[NSUserDefaults standardUserDefaults] synchronize];
//NSInteger isFront1= [[NSUserDefaults standardUserDefaults] integerForKey:#"isUsingFrontFacingCamera"];
}
I created a class that would generate a hud item, this hud item can animate the resulting texture which is a gradient created using cicolor that is then saved into a uiimage which is in turn used for an sktexture. I've noticed now that i am getting a lot of memory growth in my app and running it through instruments has shown me this, but i can't for the life figure out whats going on:
Here's the error i get
You can't really see the issue so it's giving me 91.4% on this line of code:
animatedGraphic = [[SKSpriteNode alloc]initWithTexture:[[TextureList sharedManager]returnGradientofSize:[[TextureList sharedManager]returnTextureSize:kGMHUDFlowerTarget] topColor:[CIColor colorWithRed:255.0/255.0 green:171.0/255.0 blue:121.0/255.0] bottomColor:[CIColor colorWithRed:225.0/255.0 green:57.0/255.0 blue:86.0/255.0]] color:[UIColor orangeColor] size:CGSizeMake(0, self.frame.size.height)];
animatedGraphic.anchorPoint = CGPointMake(0, 0.5);
animatedGraphic.zPosition = self.zPosition+1;
[self addChild:animatedGraphic];
Heres the code for the sktexture with a gradient:
-(SKTexture*)returnHorizontalGradientofSize:(CGSize)size
leftColor:(CIColor*)leftColor
rightColor:(CIColor*)rightColor{
CIContext *coreImageContext = [CIContext contextWithOptions:nil];
CIFilter *gradientFilter = [CIFilter filterWithName:#"CILinearGradient"];
[gradientFilter setDefaults];
CIVector *startVector = [CIVector vectorWithX:0 Y:size.height/2];
CIVector *endVector = [CIVector vectorWithX:size.width Y:size.height/2];
[gradientFilter setValue:startVector forKey:#"inputPoint0"];
[gradientFilter setValue:endVector forKey:#"inputPoint1"];
[gradientFilter setValue:leftColor forKey:#"inputColor0"];
[gradientFilter setValue:rightColor forKey:#"inputColor1"];
CGImageRef cgimg = [coreImageContext createCGImage:[gradientFilter outputImage]
fromRect:CGRectMake(0, 0, size.width, size.height)];
UIImage *theImage = [UIImage imageWithCGImage:cgimg];
CFRelease(cgimg);
return [SKTexture textureWithImage:theImage];
}
Heres the code for the hud item:
#import "ItemHud.h"
#import "TextureList.h"
#import "UnlockController.h"
#interface ItemHud ()
#property (nonatomic) double scoreIncrement;
#property (nonatomic) double increment;
#property (nonatomic) double barIncrement;
#property (nonatomic) double updateIncrement;
#property (nonatomic) BOOL barAnimating;
#end
#implementation ItemHud
#synthesize theLabel;
#synthesize theLabelTwo;
#synthesize animatedGraphic;
#synthesize iconGraphic;
-(id)initWithImageNamed:(NSString *)ImageName
withLabel:(NSString *)LabelName
withLabelTwo:(NSString *)LabelNameTwo
withIconGraphic:(NSString *)iconGraphicName
withAnimatedGraphic:(BOOL)AnimatedGraphicName{
if (self = [super init]) {
if (ImageName)
{
self.size = [[TextureList sharedManager]returnTextureSize:ImageName];
self.texture = nil;
self.color = [UIColor colorWithRed:0.0/255.0 green:0.0/255.0 blue:0.0/255.0 alpha:0.65];
self.userInteractionEnabled = NO;
_barAnimating = NO;
}
if (AnimatedGraphicName) {
animatedGraphic = [[SKSpriteNode alloc]initWithTexture:[[TextureList sharedManager]returnGradientofSize:[[TextureList sharedManager]returnTextureSize:kGMHUDFlowerTarget] topColor:[CIColor colorWithRed:255.0/255.0 green:171.0/255.0 blue:121.0/255.0] bottomColor:[CIColor colorWithRed:225.0/255.0 green:57.0/255.0 blue:86.0/255.0]] color:[UIColor orangeColor] size:CGSizeMake(0, self.frame.size.height)];
animatedGraphic.anchorPoint = CGPointMake(0, 0.5);
animatedGraphic.zPosition = self.zPosition+1;
[self addChild:animatedGraphic];
}
if (iconGraphicName) {
if ([iconGraphicName isEqualToString:kGMHUDLevelIcon1] || [iconGraphicName isEqualToString:kGMHUDLevelIcon2] || [iconGraphicName isEqualToString:kGMHUDLevelIcon3] || [iconGraphicName isEqualToString:kGMHUDLevelIcon4]|| [iconGraphicName isEqualToString:kGMHUDLevelIcon5] || [iconGraphicName isEqualToString:kGMHUDLevelIcon6] || [iconGraphicName isEqualToString:kGMHUDLevelIcon7] || [iconGraphicName isEqualToString:kGMHUDLevelIcon8] || [iconGraphicName isEqualToString:kGMHUDLevelIcon9]) {
iconGraphic = [[SKSpriteNode alloc]initWithTexture:[SKTexture textureWithImageNamed:iconGraphicName] color:nil size:[[TextureList sharedManager]returnTextureSize:kGMHUDLevelIcon1]];
}
else{
iconGraphic = [[SKSpriteNode alloc]initWithTexture:[SKTexture textureWithImageNamed:iconGraphicName] color:nil size:[[TextureList sharedManager]returnTextureSize:iconGraphicName]];
}
iconGraphic.zPosition = self.zPosition+1;
[self addChild:iconGraphic];
[self setGraphicRight:NO];
}
if (LabelName) {
theLabel = [SKLabelNode labelNodeWithFontNamed:kFontName];
[theLabel setFontColor:[UIColor whiteColor]];
[theLabel setFontName:kFontName];
[theLabel setFontSize:kFontSizeMDMedium];
[theLabel setHorizontalAlignmentMode:SKLabelHorizontalAlignmentModeLeft];
[theLabel setVerticalAlignmentMode:SKLabelVerticalAlignmentModeCenter];
theLabel.text = LabelName;
[self addChild:theLabel];
[self setHudDefaults:YES];
}
if (LabelNameTwo) {
theLabelTwo = [SKLabelNode labelNodeWithFontNamed:kFontName];
[theLabelTwo setFontColor:[UIColor whiteColor]];
[theLabelTwo setFontName:kFontName];
[theLabelTwo setFontSize:kFontSizeMDMedium];
[theLabelTwo setHorizontalAlignmentMode:SKLabelHorizontalAlignmentModeRight];
[theLabelTwo setVerticalAlignmentMode:SKLabelVerticalAlignmentModeCenter];
theLabelTwo.text = LabelNameTwo;
[self addChild:theLabelTwo];
[self setHudDefaults:NO];
}
}
return self;
}
-(void)setBackgroundImage:(SKTexture*)theTexture{
self.texture = theTexture;
}
-(void)setHudDefaults:(BOOL)singleLabel{
theLabelTwo.position = CGPointMake(self.position.x+self.frame.size.width/2,self.position.y);
animatedGraphic.position = CGPointMake(-self.frame.size.width/2,self.position.y);
if (singleLabel) {
[theLabel setHorizontalAlignmentMode:SKLabelHorizontalAlignmentModeCenter];
theLabel.position = CGPointMake(self.position.x,self.position.y);
}
else{
theLabel.position = CGPointMake(theLabelTwo.position.x-theLabelTwo.frame.size.width/2-20,self.position.y);
[theLabel setHorizontalAlignmentMode:SKLabelHorizontalAlignmentModeRight];
}
theLabel.zPosition = self.zPosition+1;
theLabelTwo.zPosition = self.zPosition+1;
}
-(void)setGraphicRight:(BOOL)placeRight{
if (placeRight) {
iconGraphic.position = CGPointMake(self.frame.size.width/2,-self.frame.size.height/4);
iconGraphic.zPosition = animatedGraphic.zPosition+1;
}
else{
iconGraphic.position = CGPointMake(-self.frame.size.width/2,-self.frame.size.height/4);
iconGraphic.zPosition = animatedGraphic.zPosition+1;
}
}
-(void)setBarProgress:(int)flowerTarget currentFlowers:(int)currentFlowers{
double increment = (double)flowerTarget/100;
//NSLog(#"increment is %f",increment);
double barIncrement = (double)self.frame.size.width/100;
//NSLog(#"BAR increment is %f",barIncrement);
double barState = (barIncrement/increment)*currentFlowers;
//NSLog(#"BAR state is %f",barState);
/*if (animatedGraphic.frame.size.width >= self.frame.size.width && !_barAnimating) {
_barAnimating = YES;
[self animateBar:YES];
}
else if (animatedGraphic.frame.size.width < self.frame.size.width && _barAnimating){
_barAnimating = NO;
[self animateBar:NO];
}*/
animatedGraphic.size = CGSizeMake(barState, self.frame.size.height);
}
-(void)setBarValues:(int)startValue increment:(int)increment nextObject:(int)nextObject{
//NSLog(#"0:Totalscore is %i",[[UserDetails sharedManager]userTotalScore]);
//NSLog(#"1:StartValue %i",startValue);
//NSLog(#"2:Increment %i",increment);
//NSLog(#"3:Nextobject %i",nextObject);
_scoreIncrement = (double)startValue/(double)nextObject;
//NSLog(#"increment is %f",increment);
_barIncrement = (double)self.frame.size.width/100;
//NSLog(#"bar increment is %f",barIncrement);
_updateIncrement = ((double)startValue/_scoreIncrement)/100;
//NSLog(#"update increment is %f",updateIncrement);
//NSLog(#"4:Animate %f",_barIncrement/_updateIncrement*increment);
animatedGraphic.size = CGSizeMake(_barIncrement/_updateIncrement*increment, self.frame.size.height);
}
-(void)updateBarProgress:(int)update{
animatedGraphic.size = CGSizeMake(_barIncrement/_updateIncrement*update, self.frame.size.height);
//hudFx.position = CGPointMake(animatedGraphic.frame.size.width-2, animatedGraphic.position.y);
}
-(void)setBarValues:(int)startValue nextObject:(int)nextObject animated:(BOOL)animated{
// start value is difference between unlock score and current value
// next object is score to unlock item
// all unlocks done
if ([[UnlockController sharedManager]allunlocksOpen]) {
theLabel.text = #"ALL ITEMS UNLOCKED";
return;
}
__block int count = 0;
double increment = (double)startValue/(double)nextObject;
//NSLog(#"increment is %f",increment);
double countUp = nextObject-startValue;
//NSLog(#"countup is %f",countUp);
double barIncrement = (double)self.frame.size.width/100;
//NSLog(#"bar increment is %f",barIncrement);
double updateIncrement = ((double)startValue/increment)/100;
//NSLog(#"update increment is %f",updateIncrement);
if (!animated) {
animatedGraphic.size = CGSizeMake(barIncrement/updateIncrement*startValue, self.frame.size.height);
//hudFx.position = CGPointMake(animatedGraphic.frame.size.width-2, animatedGraphic.position.y);
}
else{
SKAction *delay = [SKAction waitForDuration:0.0];
SKAction *animateCount = [SKAction runBlock:^{
count++;
animatedGraphic.size = CGSizeMake(barIncrement*count, self.frame.size.height);
//hudFx.position = CGPointMake(animatedGraphic.frame.size.width-2, animatedGraphic.position.y);
}];
SKAction *animateSequence = [SKAction sequence:#[animateCount,delay]];
SKAction *repeatSequence = [SKAction repeatAction:animateSequence count:(double)countUp/updateIncrement];
[animatedGraphic runAction:repeatSequence completion:^{
}];
}
}
-(void)animateBar:(BOOL)animate{
SKAction *delay = [SKAction waitForDuration:0.15];
SKAction *changeToAnimateBar = [SKAction runBlock:^{
animatedGraphic.texture = [[TextureList sharedManager]returnGradientofSize:[[TextureList sharedManager]returnTextureSize:kGMHUDFlowerTarget] topColor:[CIColor colorWithRed:255.0/255.0 green:244.0/255.0 blue:155.0/255.0] bottomColor:[CIColor colorWithRed:225.0/255.0 green:57.0/255.0 blue:86.0/255.0]];
}];
SKAction *changeToDefaultBar = [SKAction runBlock:^{
animatedGraphic.texture = [[TextureList sharedManager]returnGradientofSize:[[TextureList sharedManager]returnTextureSize:kGMHUDFlowerTarget] topColor:[CIColor colorWithRed:255.0/255.0 green:171.0/255.0 blue:121.0/255.0] bottomColor:[CIColor colorWithRed:225.0/255.0 green:57.0/255.0 blue:86.0/255.0]];
}];
SKAction *animateSequence = [SKAction sequence:#[changeToAnimateBar,delay,changeToDefaultBar,delay]];
SKAction *animatingBarLoop = [SKAction repeatActionForever:animateSequence];
if (animate) {
[self runAction:animatingBarLoop withKey:#"animatingBar"];
}
else{
[self removeActionForKey:#"animatingBar"];
[self runAction:changeToDefaultBar withKey:#"defaultBar"];
}
}
This turned out to be an issue with the AGSpriteButton class which was hogging memory and therefore eventually causing a crash when an advert loaded, you can find a fix here:
SKScene Fails to deallocate memory resulting in bounded memory growth
In my project, I use AVAudioSession to detect any headphone is plugged or unplugged. But in this case, I can't detect when bluetooth device is plugged. Here is my code for headphone state.
- (void)audioRouteChangeListenerCallback:(NSNotification*)notification
{
NSDictionary *interuptionDict = notification.userInfo;
NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue];
switch (routeChangeReason) {
case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
//NSLog(#"AVAudioSessionRouteChangeReasonNewDeviceAvailable");
NSLog(#"Headphone/Line plugged in");
[_soundButtonOutlet setImage:[UIImage imageNamed:#"sound-on.png"] forState:UIControlStateNormal];
_headSetState=YES;
break;
case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
NSLog(#"AVAudioSessionRouteChangeReasonOldDeviceUnavailable");
NSLog(#"Headphone/Line was pulled. Stopping player....");
[_soundButtonOutlet setImage:[UIImage imageNamed:#"sound-off.png"] forState:UIControlStateNormal];
if(_isPlaying==YES)
{
[self.player pause];
[_audioButtonOutlet setImage:[UIImage imageNamed:#"play.png"] forState:UIControlStateNormal];
_isPlaying=NO;
}
_headSetState=NO;
break;
case AVAudioSessionRouteChangeReasonCategoryChange:
// called at start - also when other audio wants to play
NSLog(#"AVAudioSessionRouteChangeReasonCategoryChange");
break;
}
- (BOOL)isHeadsetPluggedIn
{
AVAudioSessionRouteDescription* route = [[AVAudioSession sharedInstance] currentRoute];
for (AVAudioSessionPortDescription* desc in [route outputs]) {
if ([[desc portType] isEqualToString:AVAudioSessionPortHeadphones])
{
[_soundButtonOutlet setImage:[UIImage imageNamed:#"sound-on.png"] forState:UIControlStateNormal];
_headSetState=YES;
return YES;
}
else
{
[_soundButtonOutlet setImage:[UIImage imageNamed:#"sound-off.png"] forState:UIControlStateNormal];
_headSetState=NO;
return NO;
}
}
return NO;
}
}
- viewWillAppear {
[AVAudioSession sharedInstance];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(audioRouteChangeListenerCallback:) name:AVAudioSessionRouteChangeNotification object:nil];
[self isHeadsetPluggedIn];
}
So how can I detect if a bluetooth headset plugged or not iOS 8?
You can detect currently active bluetooth output devices (instead of input devices)
Swift Code:
import AVFoundation
func bluetoothAudioConnected() -> Bool{
let outputs = AVAudioSession.sharedInstance().currentRoute.outputs
for output in outputs{
if output.portType == AVAudioSessionPortBluetoothA2DP || output.portType == AVAudioSessionPortBluetoothHFP || output.portType == AVAudioSessionPortBluetoothLE{
return true
}
}
return false
}
Bluetooth devices are based on the following question: What's the difference among AVAudioSessionPortBluetoothHFP, A2DP and LE?
I hope it helps someone
Edit for Swift 5.1 (Thanks iago849 for the fix)
var bluetoothDeviceConnected: Bool {
!AVAudioSession.sharedInstance().currentRoute.outputs.compactMap {
($0.portType == .bluetoothA2DP ||
$0.portType == .bluetoothHFP ||
$0.portType == .bluetoothLE) ? true : nil
}.isEmpty
}
I was able to detect whether a bluetooth headset (HFP) device was currently connected using the following:
NSArray *arrayInputs = [[AVAudioSession sharedInstance] availableInputs];
for (AVAudioSessionPortDescription *port in arrayInputs)
{
if ([port.portType isEqualToString:AVAudioSessionPortBluetoothHFP])
{
bHas = YES;
break;
}
}
However, your AVAudioSession category must be set as AVAudioSessionCategoryPlayAndRecord in order for this to work. If it isn't, the port will not show up in the list even if the HFP device is connected.
You can detect it with routeChangeNotification:
func activateHeadPhonesStatus(){
NotificationCenter.default.addObserver(self, selector: #selector(audioRouteChangeListener(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
}
#objc func audioRouteChangeListener(_ notification:Notification) {
guard let userInfo = notification.userInfo,
let reasonValue = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt,
let reason = AVAudioSession.RouteChangeReason(rawValue:reasonValue) else {
return
}
if reason == .newDeviceAvailable {
let session = AVAudioSession.sharedInstance()
for output in session.currentRoute.outputs where output.portType == AVAudioSession.Port.bluetoothA2DP {
print("Bluetooth Headphone Connected")
break
}
}
}
I have a UIWebView that loads text from an htmlString.
I need when the user selects a part of the text and presses a button, i will be capable of extracting it in order to use it elsewhere, so i am using this code :
// The JS File
NSString *filePath = [[NSBundle mainBundle] pathForResource:#"HighlightedString" ofType:#"js" inDirectory:#""];
NSData *fileData = [NSData dataWithContentsOfFile:filePath];
NSString *jsString = [[NSMutableString alloc] initWithData:fileData encoding:NSUTF8StringEncoding];
[WebV2 stringByEvaluatingJavaScriptFromString:jsString];
// The JS Function
NSString *startSearch = [NSString stringWithFormat:#"getHighlightedString()"];
[WebV2 stringByEvaluatingJavaScriptFromString:startSearch];
NSString *selectedText = [NSString stringWithFormat:#"selectedText"];
NSString * highlightedString = [WebV2 stringByEvaluatingJavaScriptFromString:selectedText];
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Highlighted String"
message:highlightedString
delegate:nil
cancelButtonTitle:#"Oh Yeah"
otherButtonTitles:nil];
[alert show];
Along with HighlightedString.js :
/*!
------------------------------------------------------------------------
// Search Highlighted String
------------------------------------------------------------------------
*/
var selectedText = "";
function getHighlightedString() {
var text = window.getSelection();
selectedText = text.anchorNode.textContent.substr(text.anchorOffset, text.focusOffset - text.anchorOffset);
}
// ...
function stylizeHighlightedString() {
var range = window.getSelection().getRangeAt(0);
var selectionContents = range.extractContents();
var span = document.createElement("span");
span.appendChild(selectionContents);
span.setAttribute("class","uiWebviewHighlight");
span.style.backgroundColor = "black";
span.style.color = "white";
range.insertNode(span);
}
// helper function, recursively removes the highlights in elements and their childs
function uiWebview_RemoveAllHighlightsForElement(element) {
if (element) {
if (element.nodeType == 1) {
if (element.getAttribute("class") == "uiWebviewHighlight") {
var text = element.removeChild(element.firstChild);
element.parentNode.insertBefore(text,element);
element.parentNode.removeChild(element);
return true;
} else {
var normalize = false;
for (var i=element.childNodes.length-1; i>=0; i--) {
if (uiWebview_RemoveAllHighlightsForElement(element.childNodes[i])) {
normalize = true;
}
}
if (normalize) {
element.normalize();
}
}
}
}
return false;
}
// the main entry point to remove the highlights
function uiWebview_RemoveAllHighlights() {
selectedText = "";
uiWebview_RemoveAllHighlightsForElement(document.body);
}
I always get nothing as a result ... The alert view shows nothing...What's the problem with this code ? Any help ? Any ideas ? It will be really appreciated.
The solution was actually pretty simple and no need for all the above code!
For any future users just use:
NSString *textToSpeech = [WebV2 stringByEvaluatingJavaScriptFromString: #"window.getSelection().toString()"];
NSLog(#" -**-*--****-*---**--*-* This is the new select text %#",[WebV2 stringByEvaluatingJavaScriptFromString: #"window.getSelection().toString()"] );
NSString *theSelectedText = [self.webView stringByEvaluatingJavaScriptFromString:#"window.getSelection().toString()"];
This will pass your selection to the string variable.
I am little slow in English, please do understand.
Here is my source code:
- (void)createBall:(CGPoint)touchedAt{
CGSize winSize = [CCDirector sharedDirector].winSize;
ball2 = [CCSprite spriteWithFile:#"Ball.png" rect:CGRectMake(0, 0, 54, 54)];
ball2.position = ccp(touchedAt.x,touchedAt.y);
[self addChild:ball2];
b2BodyDef ballBodyDef2;
ballBodyDef2.type = b2_dynamicBody;
ballBodyDef2.position.Set(touchedAt.x/PTM_RATIO, touchedAt.y/PTM_RATIO);
ballBodyDef2.userData = ball2;
b2Body *body2 = _world->CreateBody(&ballBodyDef2);
b2CircleShape circle;
circle.m_radius = 89.0/PTM_RATIO;//(arc4random()*26.0)/PTM_RATIO;
b2FixtureDef ballShapeDef2;
ballShapeDef2.shape = &circle;
ballShapeDef2.density = 1.0f;
ballShapeDef2.friction = 0.2f;
ballShapeDef2.restitution = 0.8f;
body2->CreateFixture(&ballShapeDef2);
}
-(void)createBall2
{
CGSize winSize = [CCDirector sharedDirector].winSize;
globalSprite = [CCSprite spriteWithFile:#"Ball.png"];
globalSprite.position = ccp(winSize.width/2 + globalSprite.contentSize.width, winSize.height/2);
[self addChild:globalSprite];
b2BodyDef ballBodyDef3;
ballBodyDef3.type = b2_dynamicBody;
ballBodyDef3.position.Set(100/PTM_RATIO, 100/PTM_RATIO);
ballBodyDef3.userData = globalSprite ;
b2Body *body3 = _world->CreateBody(&ballBodyDef3);
b2CircleShape circle;
circle.m_radius = 26.0/PTM_RATIO;//(arc4random()*26.0)/PTM_RATIO;
b2FixtureDef ballShapeDef3;
ballShapeDef3.shape = &circle;
ballShapeDef3.density = 1.0f;
ballShapeDef3.friction = 0.2f;
ballShapeDef3.restitution = 0.8f;
body3->CreateFixture(&ballShapeDef3);
}
// initialize your instance here
-(id) init
{
if( (self=[super init])) {
// enable touch
// enable accelerometer
CGSize winSize = [CCDirector sharedDirector].winSize;
self.isAccelerometerEnabled = YES;
self.isTouchEnabled = YES;
// Create sprite and add it to the layer
// Create a world
b2Vec2 gravity = b2Vec2(0.0f, 0.0f);
bool doSleep = true;
_world = new b2World(gravity, doSleep);
// Create edges around the entire screen
b2BodyDef groundBodyDef;
groundBodyDef.position.Set(0,0);
b2Body *groundBody = _world->CreateBody(&groundBodyDef);
b2PolygonShape groundBox;
b2FixtureDef boxShapeDef;
boxShapeDef.shape = &groundBox;
groundBox.SetAsEdge(b2Vec2(0,0), b2Vec2(winSize.width/PTM_RATIO, 0));
groundBody->CreateFixture(&boxShapeDef);
groundBox.SetAsEdge(b2Vec2(0,0), b2Vec2(0, winSize.height/PTM_RATIO));
groundBody->CreateFixture(&boxShapeDef);
groundBox.SetAsEdge(b2Vec2(0, winSize.height/PTM_RATIO), b2Vec2(winSize.width/PTM_RATIO, winSize.height/PTM_RATIO));
groundBody->CreateFixture(&boxShapeDef);
groundBox.SetAsEdge(b2Vec2(winSize.width/PTM_RATIO, winSize.height/PTM_RATIO), b2Vec2(winSize.width/PTM_RATIO, 0));
groundBody->CreateFixture(&boxShapeDef);
// Create ball body and shape
[self schedule:#selector(tick:)];
//[self schedule:#selector(gameLogic:) interval:1.0];
[self createBall2];
}
return self;
}
- (void)ccTouchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
// Choose one of the touches to work with
UITouch *touch = [touches anyObject];
CGPoint location = [touch locationInView:[touch view]];
location = [[CCDirector sharedDirector] convertToGL:location];
[self createBall:location];
}
- (void)tick:(ccTime) dt {
_world->Step(dt, 10, 10);
for(b2Body *b = _world->GetBodyList(); b; b=b->GetNext()) {
if (b->GetUserData() != NULL) {
CCSprite *ballData = (CCSprite *)b->GetUserData();
ballData.position = ccp(b->GetPosition().x * PTM_RATIO,
b->GetPosition().y * PTM_RATIO);
ballData.rotation = -1 * CC_RADIANS_TO_DEGREES(b->GetAngle());
}
}
}
I want to
touch -> sprite create(circle) -> sprite scale -> sprite remove
but
- (void)tick:(ccTime) dt <---------- this is simulator turn off!
I want to way
Try this :
world->DestroyBody(sprite);