I have to make a video editor with a blur effect on video .
Can someone please guide me some useful links or the way this task should be proceeded .I have tried doing overlapping of videos but it doesn't bring me videos in center exactly.
- (void) overlapVideos{
AVURLAsset* firstAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"BearVideo" ofType:#"mp4"]] options:nil];
AVURLAsset * secondAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"BearVideo" ofType:#"mp4"]] options:nil];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform Scale = CGAffineTransformMakeScale(0.6f,0.6f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(140,20);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform SecondScale = CGAffineTransformMakeScale(0.9f,0.9f);
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:instruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = CGSizeMake(1280, 720);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"overlapVideo.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
[exporter setVideoComposition:videoComposition];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
NSURL *outputURL = session.outputURL;
if(self.videodelegateObj!=nil){
[_videodelegateObj videoOverlappingFinished:outputURL];
}
}
-(void)applyBlurOnAsset:(AVAsset *)asset Completion:(void(^)(BOOL success, NSError* error, NSURL* videoUrl))completion{
CIFilter *filter = [CIFilter filterWithName:#"CIGaussianBlur"];
AVVideoComposition *composition = [AVVideoComposition videoCompositionWithAsset: asset
applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *request){
// Clamp to avoid blurring transparent pixels at the image edges
CIImage *source = [request.sourceImage imageByClampingToExtent];
[filter setValue:source forKey:kCIInputImageKey];
[filter setValue:[NSNumber numberWithDouble:10.0] forKey:kCIInputRadiusKey];
CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent];
[request finishWithImage:output context:nil];
}];
NSURL *outputUrl = [[NSURL alloc] initWithString:#"Your Output path"];
[[NSFileManager defaultManager] removeItemAtURL:outputUrl error:nil];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset960x540] ;
exporter.videoComposition = composition;
exporter.outputFileType = AVFileTypeMPEG4;
if (outputUrl){
exporter.outputURL = outputUrl;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"crop Export failed: %#", [[exporter error] localizedDescription]);
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,[exporter error],nil);
});
return;
}
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"crop Export canceled");
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,nil,nil);
});
return;
}
break;
default:
break;
}
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(YES,nil,outputUrl);
});
}
}];
}
}
Kindly give some guidance.Any help or guidance in this direction would be highly appreciated.Thanks in advance.
I try to update my existing download-model, so I have replaced my old code:
AFHTTPRequestOperation *downloadRequest = [[AFHTTPRequestOperation alloc] initWithRequest:request];
[downloadRequest setCompletionBlockWithSuccess:^(AFHTTPRequestOperation *operation, id responseObject) {
NSData *data = [[NSData alloc] initWithData:responseObject];
[data writeToFile:video2Save.localFilePath atomically:YES];
video2Save.downloadComplete = YES;
[YEPersistentModelHelper saveData:_downloadVideos ToDiskWithIdentifier:persistentIdDownloadedVideos];
NSLog(#"file downloading complete : %#", video2Save.localFilePath);
} failure:^(AFHTTPRequestOperation *operation, NSError *error) {
NSLog(#"file downloading error : %#", [error localizedDescription]);
}];
[downloadRequest start];*/
with the following:
NSURLSessionDownloadTask *downloadTask = [_sessionManager downloadTaskWithRequest:request progress:&progress destination:^NSURL *(NSURL *targetPath, NSURLResponse *response) {
NSURL *documentsDirectoryURL = [[NSFileManager defaultManager] URLForDirectory:NSDocumentDirectory inDomain:NSUserDomainMask appropriateForURL:nil create:NO error:nil];
return [documentsDirectoryURL URLByAppendingPathComponent:[NSString stringWithFormat:#"%#.mp4",video2Save.videoVersionId]];
} completionHandler:^(NSURLResponse *response, NSURL *filePath, NSError *error) {
NSLog(#"File downloaded to: %#", filePath);
video2Save.localFilePath = [[filePath filePathURL] absoluteString];
video2Save.downloadComplete = YES;
[YEPersistentModelHelper saveData:_downloadVideos ToDiskWithIdentifier:persistentIdDownloadedVideos];
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *err = nil;
NSDictionary *att = [fileManager attributesOfItemAtPath:video2Save.localFilePath error:&err];
NSLog(#"NSDictionary: %#", att);
}];
[downloadTask resume];
And it seems to work fine. The complete-block is executed & the file exists at the traced target.
The problem is, that I am no longer available to play the video! I use the MPMoviePlayerController which throws this useful error:
_itemFailedToPlayToEnd: { kind = 1; new = 2; old = 0; }
The only difference seems to be the file-permissions. The first one adds a "staff"-group & everyone is allowed to read while the second only grants access for "me". But even if I change it in the finder I am not able to play it...
Does anyone has an idea!?
to save location file use path no absoluteString
video2Save.localFilePath = [[filePath filePathURL] absoluteString];
don't call absoluteString even to play.. just use the path
like this for example to call the video
NSURL *FilePathURL = [NSURL fileURLWithPath:[docDir stringByAppendingPathComponent:fileToCheck]];
[[myvideoCalss :[FilePathURL path]]
i want to ask a question about core location and core data. i looked some questions but couldnt do that..
i have a application which stores some textfields, photos, date and time datas in UITableView With core data, i stored everything (photos, texts, date etc.) Now trying to store Location data i couldnt do.
this is some of my code here.
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
locationManager = [[CLLocationManager alloc] init];
locationManager.delegate = self;
locationManager.desiredAccuracy = kCLLocationAccuracyBest;
[locationManager startUpdatingLocation];
NSDateFormatter *myFormatter = [[NSDateFormatter alloc] init];
[myFormatter setDateFormat:#"MM-dd-yyyy HH:mm"];
[myFormatter setTimeZone:[NSTimeZone systemTimeZone]];
todaysDate = [myFormatter stringFromDate:[NSDate date]];
myDateLabel.text = todaysDate;
UIView *patternBg = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 320, 480)];
patternBg.backgroundColor = [UIColor colorWithPatternImage:[UIImage imageNamed:#"background01.png"]];
self.tableView.backgroundView = patternBg;
// If we are editing an existing picture, then put the details from Core Data into the text fields for displaying
if (currentPicture)
{
[companyNameField setText:[currentPicture companyName]];
[myDateLabel setText:[currentPicture currentDate]];
if ([currentPicture photo])
[imageField setImage:[UIImage imageWithData:[currentPicture photo]]];
}
}
in the saveButton
- (IBAction)editSaveButtonPressed:(id)sender
{
// For both new and existing pictures, fill in the details from the form
[self.currentPicture setCompanyName:[companyNameField text]];
[self.currentPicture setCurrentDate:[myDateLabel text]];
[self.currentPicture setCurrentTime:[myTimeLabel text]];
[self.currentPicture setLatitudeData:[_latitudeLabel text]];
[self.currentPicture setLongtidueData:[_longtitudeLabel text]];
}
and last one, my locationManager's method..
- (void)locationManager:(CLLocationManager *)manager didUpdateToLocation:(CLLocation *)newLocation fromLocation:(CLLocation *)oldLocation
{
NSLog(#"didUpdateToLocation: %#", newLocation);
CLLocation *currentLocation = newLocation;
if (currentLocation != nil) {
_longtitudeLabel.text = [NSString stringWithFormat:#"%.8f", currentLocation.coordinate.longitude];
_latitudeLabel.text = [NSString stringWithFormat:#"%.8f", currentLocation.coordinate.latitude];
[self->locationManager stopUpdatingLocation];
}
}
i tried "[locationmanager stopUpdatingLocation];" many times, but when i entered the app, code starts to calculating latitude and longtitude data, i just want to take that data 1 time, and store..
Thanks!
If calling stopUpdatingLocation doesn't stop location updates, then most likely self->locationManager is nil. That would mean you're not really making the call.
It's hard to be sure exactly why this would happen, except that your code seem to make a point of not using any semantics implied by a #property declaration. Just assigning to location in viewDidLoad avoids any declaration, and looking up the manager using self->locationManager does as well. Assuming that location is a property, you should assign it to self.locationManager, and use that when looking it up as well.
A couple things:
- (void)locationManager:(CLLocationManager *)manager didUpdateToLocation:(CLLocation *)newLocation fromLocation:(CLLocation *)oldLocation
{
NSTimeInterval locationAge = -[newLocation.timestamp timeIntervalSinceNow];
if (locationAge > 5) return; // ignore cached location, we want current loc
if (newLocation.horizontalAccuracy <= 0) return; // ignore invalid
// wait for GPS accuracy (will be < 400)
if (newLocation.horizontalAccuracy < 400) {
_longtitudeLabel.text = [NSString stringWithFormat:#"%.8f", newLocation.coordinate.longitude];
_latitudeLabel.text = [NSString stringWithFormat:#"%.8f", newLocation.coordinate.latitude];
[manager stopUpdatingLocation];
}
}
In your didUpdateToLocation do this code
(void)locationManager:(CLLocationManager *)manager didUpdateToLocation:(CLLocation *)newLocation fromLocation:(CLLocation *)oldLocation {
NSTimeInterval locationAge = -[newLocation.timestamp timeIntervalSinceNow];
if (locationAge > 5) return;
// ignore cached location, we want current loc
if (newLocation.horizontalAccuracy <= 0) return; // ignore invalid
// wait for GPS accuracy (will be < 400)
if (newLocation.horizontalAccuracy < 400) {
_longtitudeLabel.text = [NSString stringWithFormat:#"%.8f", newLocation.coordinate.longitude];
_latitudeLabel.text = [NSString stringWithFormat:#"%.8f", newLocation.coordinate.latitude];
[manager stopUpdatingLocation];
//assign nil to locationManager object and delegate
locationManager.delegate = nil;
locationManager = nil;
}
}
Thanks.
Problem in saving video to iPhone Library.
i have an array of UIImages,and two buttons ,"convertToVideo"&"saveToiPhoneLib"
-(IBAction) convertToVideo
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *savedVideoPath = [documentsDirectory stringByAppendingPathComponent:#"videoOutput"];
printf(" \n\n\n-Video file == %s--\n\n\n",[savedVideoPath UTF8String]);
[self writeImageAsMovie:imageArray toPath:savedVideoPath size:self.view.frame.size duration:3];
}
here i'm passing the imageArray and savedVideoPath to the function below
-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration
{
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Write samples:
......
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
}
generate a CVPixelBufferRef here
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, self.view.frame.size.width,
self.view.frame.size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, self.view.frame.size.width,
self.view.frame.size.height, 8, 4*self.view.frame.size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
saving to the iPhone library
-(IBAction) saveToiPhoneLib
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *getImagePath = [basePath stringByAppendingPathComponent:#"videoOutput"];
printf(" \n\n\n-Video file == %s--\n\n\n",[getImagePath UTF8String]);
UISaveVideoAtPathToSavedPhotosAlbum ( getImagePath,self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
- (void) video: (NSString *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo {
NSLog(#"Finished saving video with error: %#", error);
}
but while saving i m getting error message:-
Finished saving video with error: Error Domain=ALAssetsLibraryErrorDomain Code=-3302 "Invalid data" UserInfo=0x1d59f0 {NSLocalizedFailureReason=There was a problem writing this asset because the data is invalid and cannot be viewed or played., NSLocalizedRecoverySuggestion=Try with different data, NSLocalizedDescription=Invalid data}
please let me know my mistake. thanks in advance
-(void)convertimagetoVideo
{
///////////// setup OR function def if we move this to a separate function ////////////
// this should be moved to its own function, that can take an imageArray, videoOutputPath, etc...
NSError *error = nil;
// set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists...
//NSString *videoOutputPath = #"/Users/someuser/Desktop/test_output.mp4";
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(400, 200);
// NSUInteger fps = 30;
NSUInteger fps = 30;
//NSMutableArray *imageArray;
//imageArray = [[NSMutableArray alloc] initWithObjects:#"download.jpeg", #"download2.jpeg", nil];
NSMutableArray *imageArray;
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:#"png" inDirectory:nil];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(#"-->imageArray.count= %i", imageArray.count);
for (NSString* path in imagePaths)
{
[imageArray addObject:[UIImage imageWithContentsOfFile:path]];
//NSLog(#"-->image path= %#", path);
}
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in imageArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%d)",frameCount,[imageArray count]);
//CMTime frameTime = CMTimeMake((int64_t), (int32_t)2);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
NSLog(#"seconds = %f, %u, %d", CMTimeGetSeconds(frameTime),fps,j);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
}
-(void)CompileFilestomakeVideo
{
// set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists...
//NSString *videoOutputPath = #"/Users/someuser/Desktop/test_output.mp4";
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
//_assetExport.outputFileType = #"public.mpeg-4";
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
///// THAT IS IT DONE... the final video file will be written here...
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
// the final video file will be located somewhere like here:
// /Users/caferrara/Library/Application Support/iPhone Simulator/6.0/Applications/D4B12FEE-E09C-4B12-B772-7F1BD6011BE1/Documents/outputFile.mov
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
}
- (void) saveVideoToAlbum:(NSString*)path {
NSLog(#"saveVideoToAlbum");
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
UISaveVideoAtPathToSavedPhotosAlbum (path, self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
}
-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
if(error)
NSLog(#"error: %#", error);
else
NSLog(#" OK");
}
////////////////////////
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
CGSize size = CGSizeMake(400, 200);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
That’s simply too much code to check for errors. Make sure you can start the export session, that you really get pixel buffers for your images, that the writer is ready for receiving more data, that the buffer gets appended without errors, that the export session finishes with success and that the output movie file exists and actually contains some data. Only then you can try and save it to the system photo album. Check all the available error information along the way so that you know where the thing breaks for the first time. (Another thing is that you are simply taking code from the web and pasting it together, which is simply not going to work for AV programming.)
Yes, I had the same error:
Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo=0x193ce0 {NSLocalizedRecoverySuggestion=Try saving again., NSUnderlyingError=0x179e40 "The operation couldn’t be completed. (OSStatus error -12412.)", NSLocalizedDescription=Cannot Save}
But only on simulator, when I ran on a device, the save to the photo library worked just fine.
Use the code Below
- (void)creatingVideo {
//get full path of video file from documents directory
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [self applicationDocumentsDirectory];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mov"];
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file it does not exits on path");
//size of the video frame
CGSize imageSize = CGSizeMake(640,480);
//CGSize imageSize = CGSizeMake(1280, 720);
//frame per second
NSUInteger fps = 30;
NSLog(#"Start building video from defined frames.");
//AvAsset library to create video of images
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//frameCount.
int frameCount = 0;
double frameDuration;
double numberOfSecondsPerFrame = appDelegate.delaySecond;
NSLog(#"**************************video creation started********************************");
for (int i = 0; i<[self.arrImageDataDict count]; i++) {
{
#autoreleasepool{
UIImage *img1 = nil;
img1 = [self getImageForVideoCreation:i];
buffer = [self pixelBufferFromCGImage: [img1 CGImage]];
if (buffer == NULL) {
NSLog(#"Pixel buffer not created");
} else {
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 20) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%d) delay %f",frameCount,[self.arrImageDataDict count],numberOfSecondsPerFrame);
frameDuration = fps * numberOfSecondsPerFrame;
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
} else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
CVPixelBufferRelease(buffer);
buffer = nil;
}
}
}
}
//Finish the session:
[videoWriterInput markAsFinished];
//get the iOS version of the device
float version = [[[UIDevice currentDevice] systemVersion] floatValue];
if (version < 6.0)
{
[videoWriter finishWriting];
//NSLog (#"finished writing iOS version:%f",version);
} else {
[videoWriter finishWritingWithCompletionHandler:^(){
//NSLog (#"finished writing iOS version:%f",version);
}];
}
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[videoWriterInput release];
//OK now add an audio file to move file
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//Get the saved audio song path to merge it in video
NSURL *audio_inputFileUrl ;
NSString *filePath = [self applicationDocumentsDirectory];
NSString *outputFilePath1 = [filePath stringByAppendingPathComponent:#"mySong.m4a"];
audio_inputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath1];
// this is the video file that was just written above
NSURL *video_inputFileUrl = [[NSURL alloc]initFileURLWithPath:videoOutputPath];;
[NSThread sleepForTimeInterval:2.0];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"Slideshow_video.mov"];
NSURL *outputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
//AVURLAsset get video without audio
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[videoAsset release];
[NSThread sleepForTimeInterval:3.0];
//If audio song merged
if (![self.appDelegate.musicFilePath isEqualToString:#"Not set"])
{
//*************************make sure all exception is off***********************
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if (![audioAsset tracksWithMediaType:AVMediaTypeAudio].count == 0) {
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
[audioAsset release];
}
// Cleanup, in both success and fail cases
[audio_inputFileUrl release];
[video_inputFileUrl release];
[NSThread sleepForTimeInterval:0.1];
//AVAssetExportSession to export the video
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:^(void){
switch (_assetExport.status) {
case AVAssetExportSessionStatusCompleted:
#if !TARGET_IPHONE_SIMULATOR
[self writeVideoToPhotoLibrary:outputFileUrl];
#endif
[self RemoveSlideshowImagesInTemp];
[self removeAudioFileFromDocumentsdirectory:outputFilePath1];
[self removeAudioFileFromDocumentsdirectory:videoOutputPath];
[outputFileUrl release];
[_assetExport release];
//NSLog(#"AVAssetExportSessionStatusCompleted");
dispatch_async(dispatch_get_main_queue(), ^{
if (alrtCreatingVideo && alrtCreatingVideo.visible) {
[alrtCreatingVideo dismissWithClickedButtonIndex:alrtCreatingVideo.firstOtherButtonIndex animated:YES];
[databaseObj isVideoCreated:appDelegate.pro_id];
[self performSelector:#selector(successAlertView) withObject:nil afterDelay:0.0];
}
});
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#",_assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#",_assetExport.error);
break;
default:
break;
}
}];
}
//writeVideoToPhotoLibrary
- (void)writeVideoToPhotoLibrary:(NSURL *)url
{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error){
if (error) {
NSLog(#"Video could not be saved");
}
}];
[library release];
}
Good Morning at all,
i have a big problem in the following code and no solution, so i hope someone could
help me:
- (IBAction)goToChart {
[rootViewController switchViews];
}
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
[super viewDidLoad];
UIButton *weiter = [UIButton buttonWithType:UIButtonTypeRoundedRect];
weiter.frame = CGRectMake(100, 400, 120, 40);
[weiter addTarget:self action:#selector(goToChart) forControlEvents:UIControlEventTouchUpInside];
NSString *ansicht = #"Weiter";
[weiter setTitle:ansicht forState:UIControlStateNormal];
[self.view addSubview:weiter];
// loading images into the queue
loadImagesOperationQueue = [[NSOperationQueue alloc] init];
NSString *imageName;
for (int i=0; i < 10; i++) {
imageName = [[NSString alloc] initWithFormat:#"cover_%d.jpg", i];
imageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:imageName]];
UIImage *aktuellesImage = imageView.image;
UIImage *scaledImage = [aktuellesImage scaleToSize:CGSizeMake(100.0f, 100.0f)];
[(AFOpenFlowView *)self.view setImage:scaledImage forIndex:i];
[imageName release];
NSLog(#"%d is the index",i);
}
[(AFOpenFlowView *)self.view setNumberOfImages:10];
}
So you can see there 10 Images in this CoverFlowView, but how could i find out the ACTUAL picture that is in front, to use this in another view??
Could someone help me, please?
Greetings Marco
-(void)openFlowView: (AFOpenFlowView *)openFlowView imageSelected:(int)index
{
AppDelegate_iPhone *appDelegate = (AppDelegate_iPhone *)[[UIApplication sharedApplication]delegate];
db_detail = (DB_data *)[self.GalleryArray objectAtIndex:index];
appDelegate.Id = db_detail.Id;
// NSLog(#"id: value is %d",db_detail.Id);
// NSLog(#"Name vlaue is: %#",db_detail.Name);
appDelegate.title = db_detail.Name;
DetailMovieViewController *ViewController = [[DetailMovieViewController alloc]init];
[self.navigationController pushViewController:ViewController animated:YES ];
[ViewController release];
[db_detail release];
}