Copy item from iPod Library - xamarin.ios

I'm trying to copy an item from the iPod Library to my local storage space - for later playback. I've got the item URl but it's (ipod-library://item/item.mp3?id=2398084975506389321) any idea how to access the actual file?
Thanks,
Rick

This will work https://gist.github.com/3304992
-(void)mediaPicker:(MPMediaPickerController *)mediaPicker didPickMediaItems:(MPMediaItemCollection *)mediaItemCollection{
NSString *tempPath = NSTemporaryDirectory();
int i=1;
for (MPMediaItem *theItem in mediaItemCollection.items) {
NSURL *url = [theItem valueForProperty:MPMediaItemPropertyAssetURL];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:url options:nil];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset: songAsset presetName: AVAssetExportPresetPassthrough];
exporter.outputFileType = #"com.apple.coreaudio-format";
NSString *fname = [[NSString stringWithFormat:#"%d",i] stringByAppendingString:#".caf"];
++i;
NSString *exportFile = [tempPath stringByAppendingPathComponent: fname];
exporter.outputURL = [NSURL fileURLWithPath:exportFile];
[exporter exportAsynchronouslyWithCompletionHandler:^{
//Code for completion Handler
}];
}
[picker dismissViewControllerAnimated:YES completion:Nil];
}
use MPMediaPickerController to pick the media

This is how I'm doing it in Objective-C:
#import <CoreMedia/CoreMedia.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreAudio/CoreAudio.h>
// or [NSURL URLWithString:#"ipod-library://item/item.mp3?id=2398084975506389321"]
NSURL *assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
NSMutableData *data = [[NSMutableData alloc] init];
const uint32_t sampleRate = 16000;
const uint16_t bitDepth = 16;
const uint16_t channels = 2;
NSDictionary *opts = [NSDictionary dictionary];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetURL options:opts];
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:asset error:NULL];
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat:(float)sampleRate], AVSampleRateKey,
[NSNumber numberWithInt:bitDepth], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil];
AVAssetReaderTrackOutput *output = [[AVAssetReaderTrackOutput alloc] initWithTrack:[[asset tracks] objectAtIndex:0] outputSettings:settings];
[asset release];
[reader addOutput:output];
[reader startReading];
// read the samples from the asset and append them subsequently
while ([reader status] != AVAssetReaderStatusCompleted) {
CMSampleBufferRef buffer = [output copyNextSampleBuffer];
if (buffer == NULL) continue;
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(buffer);
size_t size = CMBlockBufferGetDataLength(blockBuffer);
uint8_t *outBytes = malloc(size);
CMBlockBufferCopyDataBytes(blockBuffer, 0, size, outBytes);
CMSampleBufferInvalidate(buffer);
CFRelease(buffer);
[data appendBytes:outBytes length:size];
free(outBytes);
}
[output release];
Here data will contain the raw PCM data of the track. Please note that you cannot directly access the file of a song or video, only its data through this method. You can compress it using e. g. FLAC (that's how I'm processing it in my tweak).
Since MonoTouch has an 1:1 mapping to Objective-C class and method names, this should be fairly easy to copy over. :)

Related

Implemetation of videos in video with blur effect in objective c

I have to make a video editor with a blur effect on video .
Can someone please guide me some useful links or the way this task should be proceeded .I have tried doing overlapping of videos but it doesn't bring me videos in center exactly.
- (void) overlapVideos{
AVURLAsset* firstAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"BearVideo" ofType:#"mp4"]] options:nil];
AVURLAsset * secondAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"BearVideo" ofType:#"mp4"]] options:nil];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform Scale = CGAffineTransformMakeScale(0.6f,0.6f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(140,20);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform SecondScale = CGAffineTransformMakeScale(0.9f,0.9f);
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:instruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = CGSizeMake(1280, 720);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"overlapVideo.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
[exporter setVideoComposition:videoComposition];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
NSURL *outputURL = session.outputURL;
if(self.videodelegateObj!=nil){
[_videodelegateObj videoOverlappingFinished:outputURL];
}
}
-(void)applyBlurOnAsset:(AVAsset *)asset Completion:(void(^)(BOOL success, NSError* error, NSURL* videoUrl))completion{
CIFilter *filter = [CIFilter filterWithName:#"CIGaussianBlur"];
AVVideoComposition *composition = [AVVideoComposition videoCompositionWithAsset: asset
applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *request){
// Clamp to avoid blurring transparent pixels at the image edges
CIImage *source = [request.sourceImage imageByClampingToExtent];
[filter setValue:source forKey:kCIInputImageKey];
[filter setValue:[NSNumber numberWithDouble:10.0] forKey:kCIInputRadiusKey];
CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent];
[request finishWithImage:output context:nil];
}];
NSURL *outputUrl = [[NSURL alloc] initWithString:#"Your Output path"];
[[NSFileManager defaultManager] removeItemAtURL:outputUrl error:nil];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset960x540] ;
exporter.videoComposition = composition;
exporter.outputFileType = AVFileTypeMPEG4;
if (outputUrl){
exporter.outputURL = outputUrl;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"crop Export failed: %#", [[exporter error] localizedDescription]);
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,[exporter error],nil);
});
return;
}
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"crop Export canceled");
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,nil,nil);
});
return;
}
break;
default:
break;
}
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(YES,nil,outputUrl);
});
}
}];
}
}
Kindly give some guidance.Any help or guidance in this direction would be highly appreciated.Thanks in advance.

downloaded video - AFHTTPRequestOperation vs. NSURLSessionDownloadTask

I try to update my existing download-model, so I have replaced my old code:
AFHTTPRequestOperation *downloadRequest = [[AFHTTPRequestOperation alloc] initWithRequest:request];
[downloadRequest setCompletionBlockWithSuccess:^(AFHTTPRequestOperation *operation, id responseObject) {
NSData *data = [[NSData alloc] initWithData:responseObject];
[data writeToFile:video2Save.localFilePath atomically:YES];
video2Save.downloadComplete = YES;
[YEPersistentModelHelper saveData:_downloadVideos ToDiskWithIdentifier:persistentIdDownloadedVideos];
NSLog(#"file downloading complete : %#", video2Save.localFilePath);
} failure:^(AFHTTPRequestOperation *operation, NSError *error) {
NSLog(#"file downloading error : %#", [error localizedDescription]);
}];
[downloadRequest start];*/
with the following:
NSURLSessionDownloadTask *downloadTask = [_sessionManager downloadTaskWithRequest:request progress:&progress destination:^NSURL *(NSURL *targetPath, NSURLResponse *response) {
NSURL *documentsDirectoryURL = [[NSFileManager defaultManager] URLForDirectory:NSDocumentDirectory inDomain:NSUserDomainMask appropriateForURL:nil create:NO error:nil];
return [documentsDirectoryURL URLByAppendingPathComponent:[NSString stringWithFormat:#"%#.mp4",video2Save.videoVersionId]];
} completionHandler:^(NSURLResponse *response, NSURL *filePath, NSError *error) {
NSLog(#"File downloaded to: %#", filePath);
video2Save.localFilePath = [[filePath filePathURL] absoluteString];
video2Save.downloadComplete = YES;
[YEPersistentModelHelper saveData:_downloadVideos ToDiskWithIdentifier:persistentIdDownloadedVideos];
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *err = nil;
NSDictionary *att = [fileManager attributesOfItemAtPath:video2Save.localFilePath error:&err];
NSLog(#"NSDictionary: %#", att);
}];
[downloadTask resume];
And it seems to work fine. The complete-block is executed & the file exists at the traced target.
The problem is, that I am no longer available to play the video! I use the MPMoviePlayerController which throws this useful error:
_itemFailedToPlayToEnd: { kind = 1; new = 2; old = 0; }
The only difference seems to be the file-permissions. The first one adds a "staff"-group & everyone is allowed to read while the second only grants access for "me". But even if I change it in the finder I am not able to play it...
Does anyone has an idea!?
to save location file use path no absoluteString
video2Save.localFilePath = [[filePath filePathURL] absoluteString];
don't call absoluteString even to play.. just use the path
like this for example to call the video
NSURL *FilePathURL = [NSURL fileURLWithPath:[docDir stringByAppendingPathComponent:fileToCheck]];
[[myvideoCalss :[FilePathURL path]]

Find assets in library - add to a AVMutableComposition - export = crash

I've been struggling with adding assets from the iPhone Photo Library to a AVMutableComposition and then export them. Here is what I got:
Finding the assets: (here I grab the AVURLAsset)
-(void) findAssets {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
// Enumerate just the photos and videos group by using ALAssetsGroupSavedPhotos.
[library enumerateGroupsWithTypes:ALAssetsGroupSavedPhotos usingBlock:^(ALAssetsGroup *group, BOOL *stop) {
// Within the group enumeration block, filter to enumerate just videos.
[group setAssetsFilter:[ALAssetsFilter allVideos]];
[group enumerateAssetsUsingBlock:^(ALAsset *alAsset, NSUInteger index, BOOL *innerStop){
// The end of the enumeration is signaled by asset == nil.
if (alAsset) {
ALAssetRepresentation *representation = [alAsset defaultRepresentation];
NSURL *url = [representation url];
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
// Do something interesting with the AV asset.
[thumbs addObject:alAsset];
[assets addObject:avAsset];
}else if(alAsset == nil){
[self createScroll];
}
}];
}
failureBlock: ^(NSError *error) {
// Typically you should handle an error more gracefully than this.
NSLog(#"No groups");
}];
[library release];
}
Here I add a asset to my composition (I use the first object in the array for testing only.
-(void) addToCompositionWithAsset:(AVURLAsset*)_asset{
NSError *editError = nil;
composition = [AVMutableComposition composition];
AVURLAsset* sourceAsset = [assets objectAtIndex:0];
Float64 inSeconds = 1.0;
Float64 outSeconds = 2.0;
// calculate time
CMTime inTime = CMTimeMakeWithSeconds(inSeconds, 600);
CMTime outTime = CMTimeMakeWithSeconds(outSeconds, 600);
CMTime duration = CMTimeSubtract(outTime, inTime);
CMTimeRange editRange = CMTimeRangeMake(inTime, duration);
[composition insertTimeRange:editRange ofAsset:sourceAsset atTime:composition.duration error:&editError];
if (!editError) {
CMTimeGetSeconds (composition.duration);
}
}
And finally I export the comp and here it crashes
-(void)exportComposition {
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
NSLog (#"can export: %#", exportSession.supportedFileTypes);
NSArray *dirs = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [dirs objectAtIndex:0];
NSString *exportPath = [documentsDirectoryPath stringByAppendingPathComponent:EXPORT_NAME];
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
NSURL *exportURL = [NSURL fileURLWithPath:exportPath];
exportSession.outputURL = exportURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;//#"com.apple.quicktime-movie";
[exportSession exportAsynchronouslyWithCompletionHandler:^{
NSLog (#"i is in your block, exportin. status is %d",
exportSession.status);
switch (exportSession.status) {
case AVAssetExportSessionStatusFailed:
case AVAssetExportSessionStatusCompleted: {
[self performSelectorOnMainThread:#selector (exportDone:)
withObject:nil
waitUntilDone:NO];
break;
}
};
}];
}
Does anyone have an idea of what it might be? It crashes on
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
And I tried different presets and outputFileTypes.
Thanks
* SOLVED *
I have to answer my own question now when I have solved. It's amazing that I've been struggling with this for a whole day and then I fix it right after posting a question :)
I changed and moved:
composition = [AVMutableComposition
composition];
to:
composition = [[AVMutableComposition
alloc] init];
I think I was too tired when I was working on this yesterday. Thanks guys!

how to store file contents to iphone's internal memory using xcode?

how to store file contents to iphone's internal memory using xcode ?
I want to store XML file to iPhone's internal memory.
You can read hold the xml data in NSData and store the file in private folder ( avaible only for application )
below is code for saving file
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSMutableString* str = [[NSMutableString alloc] initWithCapacity:300];
[str appendString:documentsDirectory];
[str appendString:#"/"];
[str appendString:aName];
NSFileManager *fileManager = [NSFileManager defaultManager];
[/*NSData*/ writeToFile:str atomically:YES];
if(str)
{
[str release];
str = nil;
}
if(fileManager)
{
[fileManager release];
fileManager = nil;
}
[pool release];

create video from array of UIImages and save the video to iPhone library. AVAssetLibrary +AVFoundation

Problem in saving video to iPhone Library.
i have an array of UIImages,and two buttons ,"convertToVideo"&"saveToiPhoneLib"
-(IBAction) convertToVideo
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *savedVideoPath = [documentsDirectory stringByAppendingPathComponent:#"videoOutput"];
printf(" \n\n\n-Video file == %s--\n\n\n",[savedVideoPath UTF8String]);
[self writeImageAsMovie:imageArray toPath:savedVideoPath size:self.view.frame.size duration:3];
}
here i'm passing the imageArray and savedVideoPath to the function below
-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration
{
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Write samples:
......
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
}
generate a CVPixelBufferRef here
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, self.view.frame.size.width,
self.view.frame.size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, self.view.frame.size.width,
self.view.frame.size.height, 8, 4*self.view.frame.size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
saving to the iPhone library
-(IBAction) saveToiPhoneLib
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *getImagePath = [basePath stringByAppendingPathComponent:#"videoOutput"];
printf(" \n\n\n-Video file == %s--\n\n\n",[getImagePath UTF8String]);
UISaveVideoAtPathToSavedPhotosAlbum ( getImagePath,self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
- (void) video: (NSString *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo {
NSLog(#"Finished saving video with error: %#", error);
}
but while saving i m getting error message:-
Finished saving video with error: Error Domain=ALAssetsLibraryErrorDomain Code=-3302 "Invalid data" UserInfo=0x1d59f0 {NSLocalizedFailureReason=There was a problem writing this asset because the data is invalid and cannot be viewed or played., NSLocalizedRecoverySuggestion=Try with different data, NSLocalizedDescription=Invalid data}
please let me know my mistake. thanks in advance
-(void)convertimagetoVideo
{
///////////// setup OR function def if we move this to a separate function ////////////
// this should be moved to its own function, that can take an imageArray, videoOutputPath, etc...
NSError *error = nil;
// set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists...
//NSString *videoOutputPath = #"/Users/someuser/Desktop/test_output.mp4";
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(400, 200);
// NSUInteger fps = 30;
NSUInteger fps = 30;
//NSMutableArray *imageArray;
//imageArray = [[NSMutableArray alloc] initWithObjects:#"download.jpeg", #"download2.jpeg", nil];
NSMutableArray *imageArray;
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:#"png" inDirectory:nil];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(#"-->imageArray.count= %i", imageArray.count);
for (NSString* path in imagePaths)
{
[imageArray addObject:[UIImage imageWithContentsOfFile:path]];
//NSLog(#"-->image path= %#", path);
}
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in imageArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%d)",frameCount,[imageArray count]);
//CMTime frameTime = CMTimeMake((int64_t), (int32_t)2);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
NSLog(#"seconds = %f, %u, %d", CMTimeGetSeconds(frameTime),fps,j);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
}
-(void)CompileFilestomakeVideo
{
// set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists...
//NSString *videoOutputPath = #"/Users/someuser/Desktop/test_output.mp4";
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
//_assetExport.outputFileType = #"public.mpeg-4";
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
///// THAT IS IT DONE... the final video file will be written here...
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
// the final video file will be located somewhere like here:
// /Users/caferrara/Library/Application Support/iPhone Simulator/6.0/Applications/D4B12FEE-E09C-4B12-B772-7F1BD6011BE1/Documents/outputFile.mov
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
}
- (void) saveVideoToAlbum:(NSString*)path {
NSLog(#"saveVideoToAlbum");
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
UISaveVideoAtPathToSavedPhotosAlbum (path, self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
}
-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
if(error)
NSLog(#"error: %#", error);
else
NSLog(#" OK");
}
////////////////////////
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
CGSize size = CGSizeMake(400, 200);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
That’s simply too much code to check for errors. Make sure you can start the export session, that you really get pixel buffers for your images, that the writer is ready for receiving more data, that the buffer gets appended without errors, that the export session finishes with success and that the output movie file exists and actually contains some data. Only then you can try and save it to the system photo album. Check all the available error information along the way so that you know where the thing breaks for the first time. (Another thing is that you are simply taking code from the web and pasting it together, which is simply not going to work for AV programming.)
Yes, I had the same error:
Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo=0x193ce0 {NSLocalizedRecoverySuggestion=Try saving again., NSUnderlyingError=0x179e40 "The operation couldn’t be completed. (OSStatus error -12412.)", NSLocalizedDescription=Cannot Save}
But only on simulator, when I ran on a device, the save to the photo library worked just fine.
Use the code Below
- (void)creatingVideo {
//get full path of video file from documents directory
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [self applicationDocumentsDirectory];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mov"];
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file it does not exits on path");
//size of the video frame
CGSize imageSize = CGSizeMake(640,480);
//CGSize imageSize = CGSizeMake(1280, 720);
//frame per second
NSUInteger fps = 30;
NSLog(#"Start building video from defined frames.");
//AvAsset library to create video of images
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//frameCount.
int frameCount = 0;
double frameDuration;
double numberOfSecondsPerFrame = appDelegate.delaySecond;
NSLog(#"**************************video creation started********************************");
for (int i = 0; i<[self.arrImageDataDict count]; i++) {
{
#autoreleasepool{
UIImage *img1 = nil;
img1 = [self getImageForVideoCreation:i];
buffer = [self pixelBufferFromCGImage: [img1 CGImage]];
if (buffer == NULL) {
NSLog(#"Pixel buffer not created");
} else {
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 20) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%d) delay %f",frameCount,[self.arrImageDataDict count],numberOfSecondsPerFrame);
frameDuration = fps * numberOfSecondsPerFrame;
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
} else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
CVPixelBufferRelease(buffer);
buffer = nil;
}
}
}
}
//Finish the session:
[videoWriterInput markAsFinished];
//get the iOS version of the device
float version = [[[UIDevice currentDevice] systemVersion] floatValue];
if (version < 6.0)
{
[videoWriter finishWriting];
//NSLog (#"finished writing iOS version:%f",version);
} else {
[videoWriter finishWritingWithCompletionHandler:^(){
//NSLog (#"finished writing iOS version:%f",version);
}];
}
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[videoWriterInput release];
//OK now add an audio file to move file
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//Get the saved audio song path to merge it in video
NSURL *audio_inputFileUrl ;
NSString *filePath = [self applicationDocumentsDirectory];
NSString *outputFilePath1 = [filePath stringByAppendingPathComponent:#"mySong.m4a"];
audio_inputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath1];
// this is the video file that was just written above
NSURL *video_inputFileUrl = [[NSURL alloc]initFileURLWithPath:videoOutputPath];;
[NSThread sleepForTimeInterval:2.0];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"Slideshow_video.mov"];
NSURL *outputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
//AVURLAsset get video without audio
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[videoAsset release];
[NSThread sleepForTimeInterval:3.0];
//If audio song merged
if (![self.appDelegate.musicFilePath isEqualToString:#"Not set"])
{
//*************************make sure all exception is off***********************
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if (![audioAsset tracksWithMediaType:AVMediaTypeAudio].count == 0) {
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
[audioAsset release];
}
// Cleanup, in both success and fail cases
[audio_inputFileUrl release];
[video_inputFileUrl release];
[NSThread sleepForTimeInterval:0.1];
//AVAssetExportSession to export the video
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:^(void){
switch (_assetExport.status) {
case AVAssetExportSessionStatusCompleted:
#if !TARGET_IPHONE_SIMULATOR
[self writeVideoToPhotoLibrary:outputFileUrl];
#endif
[self RemoveSlideshowImagesInTemp];
[self removeAudioFileFromDocumentsdirectory:outputFilePath1];
[self removeAudioFileFromDocumentsdirectory:videoOutputPath];
[outputFileUrl release];
[_assetExport release];
//NSLog(#"AVAssetExportSessionStatusCompleted");
dispatch_async(dispatch_get_main_queue(), ^{
if (alrtCreatingVideo && alrtCreatingVideo.visible) {
[alrtCreatingVideo dismissWithClickedButtonIndex:alrtCreatingVideo.firstOtherButtonIndex animated:YES];
[databaseObj isVideoCreated:appDelegate.pro_id];
[self performSelector:#selector(successAlertView) withObject:nil afterDelay:0.0];
}
});
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#",_assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#",_assetExport.error);
break;
default:
break;
}
}];
}
//writeVideoToPhotoLibrary
- (void)writeVideoToPhotoLibrary:(NSURL *)url
{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error){
if (error) {
NSLog(#"Video could not be saved");
}
}];
[library release];
}

Resources