2
我正在合併視頻, 這是我的代碼,它正在合併視頻,但合併的視頻不可聽(我正在合併合適的視頻,視頻,合併視頻是沉默)任何人都可以幫助我:正確合併視頻,但合併的視頻無聲
NSValue *timeDur;
NSMutableArray *arrInstructions = [[NSMutableArray alloc]init] ;
NSMutableArray *arrDuration = [[NSMutableArray alloc]init] ;
AVAsset *fAsset ;
CMTime eachDuration = kCMTimeZero ;
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
for (NSInteger counter = 0; counter < self.arrVideoUrls.count; counter++) {
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
if (counter>0)
fAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:self.arrVideoUrls[counter-1][@"VideoUrl"]]];
AVAsset *firstAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:self.arrVideoUrls[counter][@"VideoUrl"]]];
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero,firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:(counter == 0)?kCMTimeZero:eachDuration error:nil];
eachDuration = CMTimeAdd(eachDuration, firstAsset.duration);
timeDur = [NSValue valueWithCMTime:firstAsset.duration];
[arrDuration addObject:timeDur];
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 640.0/640.0;
//CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
if (counter == 0) {
if(isFirstAssetPortrait_){
FirstAssetScaleToFitRatio = SYSTEM_SCREEN_SIZE.width/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:kCMTimeZero];
}
}else{
if(isFirstAssetPortrait_){
FirstAssetScaleToFitRatio = SYSTEM_SCREEN_SIZE.width/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:firstAsset.duration];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:firstAsset.duration];
}
}
if (counter <self.arrVideoUrls.count - 1) {
[FirstlayerInstruction setOpacity:0.0 atTime:eachDuration];
}
[arrInstructions addObject:FirstlayerInstruction];
}
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
CMTime final;
for (NSInteger counter = 0; counter < arrDuration.count; counter++) {
NSValue *value = arrDuration[counter];
CMTime timing = kCMTimeZero ;
[value getValue:&timing];
if (counter == 0) {
final = timing ;
}else
final = CMTimeAdd(final, timing);
}
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,final);
MainInstruction.layerInstructions = [arrInstructions copy] ;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 10);
NSLog(@"%f",CMTimeGetSeconds(MainCompositionInst.frameDuration));
MainCompositionInst.renderSize = CGSizeMake(SYSTEM_SCREEN_SIZE.width, SYSTEM_SCREEN_SIZE.height);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1920x1080];
//AVAssetExportPreset960x540
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
//exporter.shouldOptimizeForNetworkUse = YES;
//exporter.audioMix = audioZeroMix ;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"%@",exporter.error);
Hide_Indicator ;
[self exportDidFinish:exporter];
});
}];
謝謝:-)桑傑先生 –