2012-04-02 58 views
1

我想改變的是我選擇使用 UIImagePickerController.mov視頻文件的容器,並通過與AVAssetExportSessionAVAssetExportPresetMediumQuality壓縮shouldOptimizeForNetworkUse = YES他們.mp4容器。如何以編程方式在iPhone/iPad應用程序上執行最快的mov轉換爲mp4?

我需要以編程方式/示例代碼在iPhone/iPad上的應用程序執行速度最快trans-wrap

我試圖AVAssetExportSession.outputFileType屬性設置爲AVFileTypeMPEG4,但它不支持,我得到了一個例外。

我試圖做此轉換通過指定fileType:AVFileTypeMPEG4使用AVAssetWriter,其實我.mp4輸出文件,但它不是wrap-trans,輸出文件是3倍比源大,而轉換過程需要128 sec視頻與60 sec時間。

我需要的解決方案,很快就會運行並保持文件大小

這是我用來.mov.mp4轉換代碼:

我設置assetWriter期權setUpReaderAndWriterReturningError方法

#import "MCVideoConverter.h" 


#import <AVFoundation/AVAsset.h> 
#import <AVFoundation/AVAssetTrack.h> 
#import <AVFoundation/AVAssetReader.h> 
#import <AVFoundation/AVAssetReaderOutput.h> 
#import <AVFoundation/AVAssetWriter.h> 
#import <AVFoundation/AVAssetWriterInput.h> 
#import <AVFoundation/AVMediaFormat.h> 
#import <AVFoundation/AVAudioSettings.h> 
#import <AVFoundation/AVVideoSettings.h> 
#import <AVFoundation/AVAssetImageGenerator.h> 
#import <AVFoundation/AVTime.h> 
#import <CoreMedia/CMSampleBuffer.h> 


@protocol RWSampleBufferChannelDelegate; 

@interface RWSampleBufferChannel : NSObject 
{ 
@private 
    AVAssetReaderOutput  *assetReaderOutput; 
    AVAssetWriterInput  *assetWriterInput; 

    dispatch_block_t  completionHandler; 
    dispatch_queue_t  serializationQueue; 
    BOOL     finished; // only accessed on serialization queue 
} 

- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)assetReaderOutput assetWriterInput:(AVAssetWriterInput *)assetWriterInput; 
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)completionHandler; // delegate is retained until completion handler is called. Completion handler is guaranteed to be called exactly once, whether reading/writing finishes, fails, or is cancelled. Delegate may be nil. 
- (void)cancel; 

@property (nonatomic, readonly) NSString *mediaType; 

@end 


@protocol RWSampleBufferChannelDelegate <NSObject> 
@required 
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer; 
@end 


@interface MCVideoConverter() <RWSampleBufferChannelDelegate> 

// These three methods are always called on the serialization dispatch queue 
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError; // make sure "tracks" key of asset is loaded before calling this 
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError; 
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error; 

@end 


@implementation MCVideoConverter 

+ (NSArray *)readableTypes 
{ 
    return [AVURLAsset audiovisualTypes];; 
} 

+ (BOOL)canConcurrentlyReadDocumentsOfType:(NSString *)typeName 
{ 
    return YES; 
} 

- (id)init 
{ 
    self = [super init]; 

    if (self) 
    { 
     NSString *serializationQueueDescription = [NSString stringWithFormat:@"%@ serialization queue", self]; 
     serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL); 
    } 

    return self; 
} 

- (void)dealloc 
{ 
    [asset  release]; 
    [outputURL release]; 

    [assetReader release]; 
    [assetWriter release]; 
    [audioSampleBufferChannel release]; 
    [videoSampleBufferChannel release]; 
    if (serializationQueue) 
     dispatch_release(serializationQueue); 

    [super dealloc]; 
} 

@synthesize asset=asset; 
@synthesize timeRange=timeRange; 
@synthesize writingSamples=writingSamples; 
@synthesize outputURL=outputURL; 
@synthesize propgerssView; 

- (void)convertVideo:(NSURL*) inputURL outputURL: (NSURL*) _outputURL progress:(UIProgressView*) _propgerssView 
{ 
    self.asset = [AVURLAsset URLAssetWithURL:inputURL options:nil]; 
    self.propgerssView = _propgerssView; 

    cancelled = NO;  

    [self performSelector:@selector(startProgressSheetWithURL:) withObject:_outputURL afterDelay:0.0]; // avoid starting a new sheet while in 
} 


- (void)startProgressSheetWithURL:(NSURL *)localOutputURL 
{ 
    [self setOutputURL:localOutputURL]; 
    [self setWritingSamples:YES]; 

    AVAsset *localAsset = [self asset]; 
    [localAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"tracks", @"duration", nil] completionHandler:^ 
    { 
     // Dispatch the setup work to the serialization queue, to ensure this work is serialized with potential cancellation 
     dispatch_async(serializationQueue, ^{ 
      // Since we are doing these things asynchronously, the user may have already cancelled on the main thread. In that case, simply return from this block 
      if (cancelled) 
       return; 

      BOOL success = YES; 
      NSError *localError = nil; 

      success = ([localAsset statusOfValueForKey:@"tracks" error:&localError] == AVKeyValueStatusLoaded); 
      if (success) 
       success = ([localAsset statusOfValueForKey:@"duration" error:&localError] == AVKeyValueStatusLoaded); 

      if (success) 
      { 
       [self setTimeRange:CMTimeRangeMake(kCMTimeZero, [localAsset duration])]; 

       // AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists 
       NSFileManager *fm = [NSFileManager defaultManager]; 
       NSString *localOutputPath = [localOutputURL path]; 
       if ([fm fileExistsAtPath:localOutputPath]) 
        success = [fm removeItemAtPath:localOutputPath error:&localError]; 
      } 

      // Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error 
      if (success) 
       success = [self setUpReaderAndWriterReturningError:&localError]; 
      if (success) 
       success = [self startReadingAndWritingReturningError:&localError]; 
      if (!success) 
       [self readingAndWritingDidFinishSuccessfully:success withError:localError]; 
     }); 
    }]; 
} 

- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError 
{ 
    BOOL success = YES; 
    NSError *localError = nil; 
    AVAsset *localAsset = [self asset]; 
    NSURL *localOutputURL = [self outputURL]; 

    // Create asset reader and asset writer 
    assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&localError]; 
    success = (assetReader != nil); 
    if (success) 
    { 
     //changed assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeQuickTimeMovie error:&localError]; 
     assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeMPEG4 error:&localError]; 

     success = (assetWriter != nil); 
    } 

    // Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset 
    if (success) 
    { 
     AVAssetTrack *audioTrack = nil, *videoTrack = nil; 

     // Grab first audio track and first video track, if the asset has them 
     NSArray *audioTracks = [localAsset tracksWithMediaType:AVMediaTypeAudio]; 
     if ([audioTracks count] > 0) 
      audioTrack = [audioTracks objectAtIndex:0]; 
     NSArray *videoTracks = [localAsset tracksWithMediaType:AVMediaTypeVideo]; 
     if ([videoTracks count] > 0) 
      videoTrack = [videoTracks objectAtIndex:0]; 

     if (audioTrack) 
     { 
      // Decompress to Linear PCM with the asset reader 
      NSDictionary *decompressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
                 [NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM], AVFormatIDKey, 
                 nil]; 
      AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:decompressionAudioSettings]; 
      [assetReader addOutput:output]; 

      AudioChannelLayout stereoChannelLayout = { 
       .mChannelLayoutTag = kAudioChannelLayoutTag_Stereo, 
       .mChannelBitmap = 0, 
       .mNumberChannelDescriptions = 0 
      }; 
      NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)]; 

      // Compress to 128kbps AAC with the asset writer 
      NSDictionary *compressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
                 [NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVFormatIDKey, 
                 [NSNumber numberWithInteger:128000], AVEncoderBitRateKey, 
                 [NSNumber numberWithInteger:44100], AVSampleRateKey, 
                 channelLayoutAsData, AVChannelLayoutKey, 
                 [NSNumber numberWithUnsignedInteger:2], AVNumberOfChannelsKey, 
                 nil]; 
      AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[audioTrack mediaType] outputSettings:compressionAudioSettings]; 
      [assetWriter addInput:input]; 

      // Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers 
      audioSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input]; 
     } 

     if (videoTrack) 
     { 
      // Decompress to ARGB with the asset reader 
      NSDictionary *decompressionVideoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
                 [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey, 
                 [NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey, 
                 nil]; 
      AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:decompressionVideoSettings]; 
      [assetReader addOutput:output]; 

      // Get the format description of the track, to fill in attributes of the video stream that we don't want to change 
      CMFormatDescriptionRef formatDescription = NULL; 
      NSArray *formatDescriptions = [videoTrack formatDescriptions]; 
      if ([formatDescriptions count] > 0) 
       formatDescription = (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0]; 

      // Grab track dimensions from format description 
      CGSize trackDimensions = { 
       .width = 0.0, 
       .height = 0.0, 
      }; 
      if (formatDescription) 
       trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false); 
      else 
       trackDimensions = [videoTrack naturalSize]; 


      // Grab clean aperture, pixel aspect ratio from format description 
      NSMutableDictionary *compressionSettings = nil; 
      //              [NSMutableDictionary dictionaryWithObjectsAndKeys: 
      //              AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey, 
      //              [NSNumber numberWithInt:960000], AVVideoAverageBitRateKey, 
      //              [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey, 
      //              nil ]; 
      //NSDictionary *videoSettings = nil; 
      if (formatDescription) 
      { 
       NSDictionary *cleanAperture = nil; 
       NSDictionary *pixelAspectRatio = nil; 
       CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture); 
       if (cleanApertureFromCMFormatDescription) 
       { 
        cleanAperture = [NSDictionary dictionaryWithObjectsAndKeys: 
            CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth), AVVideoCleanApertureWidthKey, 
            CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight), AVVideoCleanApertureHeightKey, 
            CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset), AVVideoCleanApertureHorizontalOffsetKey, 
            CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset), AVVideoCleanApertureVerticalOffsetKey, 
            nil]; 
       } 
       CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio); 
       if (pixelAspectRatioFromCMFormatDescription) 
       { 
        pixelAspectRatio = [NSDictionary dictionaryWithObjectsAndKeys: 
             CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing), AVVideoPixelAspectRatioHorizontalSpacingKey, 
             CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing), AVVideoPixelAspectRatioVerticalSpacingKey, 
             nil]; 
       } 

       if (cleanAperture || pixelAspectRatio) 
       { 
        if (cleanAperture) 
         [compressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey]; 
        if (pixelAspectRatio) 
         [compressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey]; 
       } 
      } 

      // Compress to H.264 with the asset writer 
      NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithObjectsAndKeys: 
                AVVideoCodecH264, AVVideoCodecKey, 
                [NSNumber numberWithDouble:trackDimensions.width], AVVideoWidthKey, 
                [NSNumber numberWithDouble:trackDimensions.height], AVVideoHeightKey, 
                nil]; 
      if (compressionSettings) 
       [videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey]; 



      AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[videoTrack mediaType] outputSettings:videoSettings]; 
      [assetWriter addInput:input]; 

      // Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers 
      videoSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input]; 
     } 
    } 

    if (outError) 
     *outError = localError; 

    return success; 
} 

- (BOOL)startReadingAndWritingReturningError:(NSError **)outError 
{ 
    BOOL success = YES; 
    NSError *localError = nil; 

    // Instruct the asset reader and asset writer to get ready to do work 
    success = [assetReader startReading]; 
    if (!success) 
     localError = [assetReader error]; 
    if (success) 
    { 
     success = [assetWriter startWriting]; 
     if (!success) 
      localError = [assetWriter error]; 
    } 

    if (success) 
    { 
     dispatch_group_t dispatchGroup = dispatch_group_create(); 

     // Start a sample-writing session 
     [assetWriter startSessionAtSourceTime:[self timeRange].start]; 

     // Start reading and writing samples 
     if (audioSampleBufferChannel) 
     { 
      // Only set audio delegate for audio-only assets, else let the video channel drive progress 
      id <RWSampleBufferChannelDelegate> delegate = nil; 
      if (!videoSampleBufferChannel) 
       delegate = self; 

      dispatch_group_enter(dispatchGroup); 
      [audioSampleBufferChannel startWithDelegate:delegate completionHandler:^{ 
       dispatch_group_leave(dispatchGroup); 
      }]; 
     } 
     if (videoSampleBufferChannel) 
     { 
      dispatch_group_enter(dispatchGroup); 
      [videoSampleBufferChannel startWithDelegate:self completionHandler:^{ 
       dispatch_group_leave(dispatchGroup); 
      }]; 
     } 

     // Set up a callback for when the sample writing is finished 
     dispatch_group_notify(dispatchGroup, serializationQueue, ^{ 
      BOOL finalSuccess = YES; 
      NSError *finalError = nil; 

      if (cancelled) 
      { 
       [assetReader cancelReading]; 
       [assetWriter cancelWriting]; 
      } 
      else 
      { 
       if ([assetReader status] == AVAssetReaderStatusFailed) 
       { 
        finalSuccess = NO; 
        finalError = [assetReader error]; 
       } 

       if (finalSuccess) 
       { 
        finalSuccess = [assetWriter finishWriting]; 
        if (!finalSuccess) 
         finalError = [assetWriter error]; 
       } 
      } 

      [self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError]; 
     }); 

     dispatch_release(dispatchGroup); 
    } 

    if (outError) 
     *outError = localError; 

    return success; 
} 

- (void)cancel 
{ 
    self.propgerssView = nil; 

    // Dispatch cancellation tasks to the serialization queue to avoid races with setup and teardown 
    dispatch_async(serializationQueue, ^{ 
     [audioSampleBufferChannel cancel]; 
     [videoSampleBufferChannel cancel]; 
     cancelled = YES; 
    }); 
} 

- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error 
{ 
    NSLog(@"%s[%d] - success = %d error = %@", __FUNCTION__, __LINE__, success, error); 

    if (!success) 
    { 
     [assetReader cancelReading]; 
     [assetWriter cancelWriting]; 
    } 

    // Tear down ivars 
    [assetReader release]; 
    assetReader = nil; 
    [assetWriter release]; 
    assetWriter = nil; 
    [audioSampleBufferChannel release]; 
    audioSampleBufferChannel = nil; 
    [videoSampleBufferChannel release]; 
    videoSampleBufferChannel = nil; 
    cancelled = NO; 

    // Dispatch UI-related tasks to the main queue 
    dispatch_async(dispatch_get_main_queue(), ^{ 

     if (!success) 
     { 

     } 

     [self setWritingSamples:NO]; 
    }); 
} 

static double progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer, CMTimeRange timeRange) 
{ 
    CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 
    progressTime = CMTimeSubtract(progressTime, timeRange.start); 
    CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer); 
    if (CMTIME_IS_NUMERIC(sampleDuration)) 
     progressTime= CMTimeAdd(progressTime, sampleDuration); 
    return CMTimeGetSeconds(progressTime)/CMTimeGetSeconds(timeRange.duration); 
} 

static void removeARGBColorComponentOfPixelBuffer(CVPixelBufferRef pixelBuffer, size_t componentIndex) 
{ 
    CVPixelBufferLockBaseAddress(pixelBuffer, 0); 

    size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer); 
    size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer); 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer); 
    static const size_t bytesPerPixel = 4; // constant for ARGB pixel format 
    unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer); 

    for (size_t row = 0; row < bufferHeight; ++row) 
    {  
     for (size_t column = 0; column < bufferWidth; ++column) 
     { 
      unsigned char *pixel = base + (row * bytesPerRow) + (column * bytesPerPixel); 
      pixel[componentIndex] = 0; 
     } 
    } 

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); 
} 

+ (size_t)componentIndexFromFilterTag:(NSInteger)filterTag 
{ 
    return (size_t)filterTag; // we set up the tags in the popup button to correspond directly with the index they modify 
} 

- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer 
{ 
    CVPixelBufferRef pixelBuffer = NULL; 

    // Calculate progress (scale of 0.0 to 1.0) 
    double progress = progressOfSampleBufferInTimeRange(sampleBuffer, [self timeRange]); 
    NSLog(@"%s[%d] - progress = %f", __FUNCTION__, __LINE__, progress); 


    // Grab the pixel buffer from the sample buffer, if possible 
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    if (imageBuffer && (CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID())) 
    { 
     pixelBuffer = (CVPixelBufferRef)imageBuffer; 
     if (filterTag >= 0) // -1 means "no filtering, please" 
      removeARGBColorComponentOfPixelBuffer(pixelBuffer, [[self class] componentIndexFromFilterTag:filterTag]); 
    } 
} 

@end 


@interface RWSampleBufferChannel() 
- (void)callCompletionHandlerIfNecessary; // always called on the serialization queue 
@end 

@implementation RWSampleBufferChannel 

- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)localAssetReaderOutput assetWriterInput:(AVAssetWriterInput *)localAssetWriterInput 
{ 
    self = [super init]; 

    if (self) 
    { 
     assetReaderOutput = [localAssetReaderOutput retain]; 
     assetWriterInput = [localAssetWriterInput retain]; 

     finished = NO; 
     NSString *serializationQueueDescription = [NSString stringWithFormat:@"%@ serialization queue", self]; 
     serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL); 
    } 

    return self; 
} 

- (void)dealloc 
{ 
    [assetReaderOutput release]; 
    [assetWriterInput release]; 
    if (serializationQueue) 
     dispatch_release(serializationQueue); 
    [completionHandler release]; 

    [super dealloc]; 
} 

- (NSString *)mediaType 
{ 
    return [assetReaderOutput mediaType]; 
} 

- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)localCompletionHandler 
{ 
    completionHandler = [localCompletionHandler copy]; // released in -callCompletionHandlerIfNecessary 

    [assetWriterInput requestMediaDataWhenReadyOnQueue:serializationQueue usingBlock:^{ 
     if (finished) 
      return; 

     BOOL completedOrFailed = NO; 

     // Read samples in a loop as long as the asset writer input is ready 
     while ([assetWriterInput isReadyForMoreMediaData] && !completedOrFailed) 
     { 
      CMSampleBufferRef sampleBuffer = [assetReaderOutput copyNextSampleBuffer]; 
      if (sampleBuffer != NULL) 
      { 
       if ([delegate respondsToSelector:@selector(sampleBufferChannel:didReadSampleBuffer:)]) 
        [delegate sampleBufferChannel:self didReadSampleBuffer:sampleBuffer]; 

       BOOL success = [assetWriterInput appendSampleBuffer:sampleBuffer]; 
       CFRelease(sampleBuffer); 
       sampleBuffer = NULL; 

       completedOrFailed = !success; 
      } 
      else 
      { 
       completedOrFailed = YES; 
      } 
     } 

     if (completedOrFailed) 
      [self callCompletionHandlerIfNecessary]; 
    }]; 
} 

- (void)cancel 
{ 
    dispatch_async(serializationQueue, ^{ 
     [self callCompletionHandlerIfNecessary]; 
    }); 
} 

- (void)callCompletionHandlerIfNecessary 
{ 
    // Set state to mark that we no longer need to call the completion handler, grab the completion handler, and clear out the ivar 
    BOOL oldFinished = finished; 
    finished = YES; 

    if (oldFinished == NO) 
    { 
     [assetWriterInput markAsFinished]; // let the asset writer know that we will not be appending any more samples to this input 

     dispatch_block_t localCompletionHandler = [completionHandler retain]; 
     [completionHandler release]; 
     completionHandler = nil; 

     if (localCompletionHandler) 
     { 
      localCompletionHandler(); 
      [localCompletionHandler release]; 
     } 
    } 
} 

@end 

回答

2

嘿它在很長一段時間,但我結束了很好的解決方案,它可以幫助別人在未來

我的代碼:

-(void) compressVideo 
{ 
asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil]; 
exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough]; 

NSLog(@" %@", [AVAssetExportSession exportPresetsCompatibleWithAsset:asset]); 
NSLog(@" %@", exportSession.supportedFileTypes); 


NSLog(@"----------------------------------------- convert to mp4"); 
NSLog(@" %@", exportSession.supportedFileTypes); 

exportSession.outputFileType = AVFileTypeMPEG4; 
exportSession.outputURL = [self outputVideoPath:@"outPut" ext:@"mp4"]; 

[exportSession exportAsynchronouslyWithCompletionHandler:^{ 
    ICQLog(@" exportSession.status = %d exportSession.error = %@", exportSession.status, exportSession.error); 

    if (exportSession && (exportSession.status == AVAssetExportSessionStatusCompleted)) 
    { 
     ICQLog(@" exportSession.outputURL = %@", exportSession.outputURL); 

     // we need to remove temporary files 
     [[NSFileManager defaultManager] removeItemAtURL:videoUrl error:NULL]; 
     [videoUrl release]; 
     videoUrl = [exportSession.outputURL retain]; 

    } 
    else 
    { 
     //TODO - report error 
    } 

    [exportSession release], exportSession = nil; 
    [asset release], asset = nil; 
}]; 
+0

我試過這個,AVFoundation抱怨說我的親水res視頻不兼容mp4。你知道一個不同的方式來做到這一點嗎? – Eric 2015-01-26 02:53:28

1

我無法幫忙翻譯這些東西,我沒有把我的頭放進去。

將文件輸出獲取爲.mp4而不必重新處理它的主要優先級是?如果是,那麼只需使用.mp4作爲由代碼輸出的影片剪輯的文件擴展名,這應該可以正常工作。我今天使用這種方法,它的工作原理。我不必將它從.mov轉換爲.mp4,因爲基本上.mp4文件與具有一些額外的基於標準功能的.mov文件相同。

希望這是幫助。

+1

你可以做到這一點,但他們不會在Android設備上播放。 – poetmountain 2013-02-06 07:19:33

+0

嘿,你可以看到我的代碼如下 - 它使壓縮和重新打包到MP4格式,它在Android設備上播放 – 2014-08-20 13:29:27

0

這是我使用的代碼。

  • (BOOL)encodeVideo:(NSURL *)videoURL { AVURLAsset *資產= [[AVURLAsset的alloc] initWithURL:videoURL選項:無];

    // Create the composition and tracks 
    AVMutableComposition *composition = [AVMutableComposition composition]; 
    AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
    NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; 
    if (assetVideoTracks.count <= 0) 
    { 
         NSLog(@"Error reading the transformed video track"); 
         return NO; 
    } 
    
    // Insert the tracks in the composition's tracks 
    AVAssetTrack *assetVideoTrack = [assetVideoTracks firstObject]; 
    [videoTrack insertTimeRange:assetVideoTrack.timeRange ofTrack:assetVideoTrack atTime:CMTimeMake(0, 1) error:nil]; 
    [videoTrack setPreferredTransform:assetVideoTrack.preferredTransform]; 
    
    AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
    [audioTrack insertTimeRange:assetAudioTrack.timeRange ofTrack:assetAudioTrack atTime:CMTimeMake(0, 1) error:nil]; 
    
    // Export to mp4 
    NSString *mp4Quality = [MGPublic isIOSAbove:@"6.0"] ? AVAssetExportPresetMediumQuality : AVAssetExportPresetPassthrough; 
    NSString *exportPath = [NSString stringWithFormat:@"%@/%@.mp4", 
               [NSHomeDirectory() stringByAppendingString:@"/tmp"], 
               [BSCommon uuidString]]; 
    
    NSURL *exportUrl = [NSURL fileURLWithPath:exportPath]; 
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:mp4Quality]; 
    exportSession.outputURL = exportUrl; 
    CMTime start = CMTimeMakeWithSeconds(0.0, 0); 
    CMTimeRange range = CMTimeRangeMake(start, [asset duration]); 
    exportSession.timeRange = range; 
    exportSession.outputFileType = AVFileTypeMPEG4; 
    [exportSession exportAsynchronouslyWithCompletionHandler:^{ 
         switch ([exportSession status]) 
         { 
         case AVAssetExportSessionStatusCompleted: 
           NSLog(@"MP4 Successful!"); 
           break; 
         case AVAssetExportSessionStatusFailed: 
           NSLog(@"Export failed: %@", [[exportSession error] localizedDescription]); 
           break; 
         case AVAssetExportSessionStatusCancelled: 
           NSLog(@"Export canceled"); 
           break; 
         default: 
           break; 
         } 
    }]; 
    
    return YES; 
    

    }

相關問題