以下のコードを使用して、ビデオに画像オーバーレイを追加し、新しく生成されたビデオをドキュメントディレクトリにエクスポートしました。しかし不思議なことに、ビデオは90度回転します。
- (void)buildTransitionComposition:(AVMutableComposition *)composition andVideoComposition:(AVMutableVideoComposition *)videoComposition
{
CMTime nextClipStartTime = kCMTimeZero;
NSInteger i;
// Make transitionDuration no greater than half the shortest clip duration.
CMTime transitionDuration = self.transitionDuration;
for (i = 0; i < [_clips count]; i++ ) {
NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i];
if (clipTimeRange) {
CMTime halfClipDuration = [clipTimeRange CMTimeRangeValue].duration;
halfClipDuration.timescale *= 2; // You can halve a rational by doubling its denominator.
transitionDuration = CMTimeMinimum(transitionDuration, halfClipDuration);
}
}
// Add two video tracks and two audio tracks.
AVMutableCompositionTrack *compositionVideoTracks[2];
AVMutableCompositionTrack *compositionAudioTracks[2];
compositionVideoTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionVideoTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]);
CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]);
// Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
for (i = 0; i < [_clips count]; i++ ) {
NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ...
AVURLAsset *asset = [_clips objectAtIndex:i];
NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i];
CMTimeRange timeRangeInAsset;
if (clipTimeRange)
timeRangeInAsset = [clipTimeRange CMTimeRangeValue];
else
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
/*
CGAffineTransform t = clipVideoTrack.preferredTransform;
NSLog(@"Transform1 : %@",t);
*/
AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
// Remember the time range in which this clip should pass through.
// Every clip after the first begins with a transition.
// Every clip before the last ends with a transition.
// Exclude those transitions from the pass through time ranges.
passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration);
if (i > 0) {
passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration);
passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
}
if (i+1 < [_clips count]) {
passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
}
// The end of this clip will overlap the start of the next by transitionDuration.
// (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration);
// Remember the time range for the transition to the next item.
transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration);
}
// Set up the video composition if we are to perform crossfade or Push transitions between clips.
NSMutableArray *instructions = [NSMutableArray array];
// Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A".
for (i = 0; i < [_clips count]; i++ ) {
NSInteger alternatingIndex = i % 2; // alternating targets
// Pass through clip i.
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = passThroughTimeRanges[i];
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
/*
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI_2);
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);
[passThroughLayer setTransform:rotateTranslate atTime:kCMTimeZero];
*/
passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
[instructions addObject:passThroughInstruction];
if (i+1 < [_clips count]) {
// Add transition from clip i to clip i+1.
AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
transitionInstruction.timeRange = transitionTimeRanges[i];
AVMutableVideoCompositionLayerInstruction *fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
AVMutableVideoCompositionLayerInstruction *toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]];
if (self.transitionType == SimpleEditorTransitionTypeCrossFade) {
// Fade out the fromLayer by setting a ramp from 1.0 to 0.0.
[fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]];
}
else if (self.transitionType == SimpleEditorTransitionTypePush) {
// Set a transform ramp on fromLayer from identity to all the way left of the screen.
[fromLayer setTransformRampFromStartTransform:CGAffineTransformIdentity toEndTransform:CGAffineTransformMakeTranslation(-composition.naturalSize.width, 0.0) timeRange:transitionTimeRanges[i]];
// Set a transform ramp on toLayer from all the way right of the screen to identity.
[toLayer setTransformRampFromStartTransform:CGAffineTransformMakeTranslation(+composition.naturalSize.width, 0.0) toEndTransform:CGAffineTransformIdentity timeRange:transitionTimeRanges[i]];
}
transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil];
[instructions addObject:transitionInstruction];
}
}
videoComposition.instructions = instructions;
}
ポートレートビデオを適切なモードでエクスポートできないので、助けてください。助けていただければ幸いです。ありがとうございました。
デフォルトでは、AVAssetExportSessionを使用してビデオをエクスポートすると、ビデオは元の方向から回転します。正確な方向を設定するには、その変換を適用する必要があります。同じことを行うには、以下のコードを試してください。
- (AVMutableVideoCompositionLayerInstruction *)layerInstructionAfterFixingOrientationForAsset:(AVAsset *)inAsset
forTrack:(AVMutableCompositionTrack *)inTrack
atTime:(CMTime)inTime
{
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:inTrack];
AVAssetTrack *videoAssetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {videoAssetOrientation_= UIImageOrientationRight; isVideoAssetPortrait_ = YES;}
if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {videoAssetOrientation_ = UIImageOrientationLeft; isVideoAssetPortrait_ = YES;}
if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {videoAssetOrientation_ = UIImageOrientationUp;}
if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {videoAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0 / videoAssetTrack.naturalSize.width;
if(isVideoAssetPortrait_) {
FirstAssetScaleToFitRatio = 320.0/videoAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[videolayerInstruction setTransform:CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[videolayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[videolayerInstruction setOpacity:0.0 atTime:inTime];
return videolayerInstruction;
}
これがお役に立てば幸いです。
AVAssetTrack *assetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *mutableTrack = [mergeComposition mutableTrackCompatibleWithTrack:assetTrack];
AVMutableVideoCompositionLayerInstruction *assetInstruction = [self layerInstructionAfterFixingOrientationForAsset:inAsset forTrack:myLocalVideoTrack atTime:videoTotalDuration];
上記は、上記のメソッドを呼び出すコードです。ここで、inAsset
は動画アセットであり、videoTotalDuration
はCMTime.mergeComposition
での動画の合計継続時間です。AVMutableComposition
クラスのオブジェクトです。
これがお役に立てば幸いです。
編集:これはコールバックメソッドまたはイベントではありません。上記のように、必要なパラメーターを使用して期待どおりに呼び出す必要があります。
元の回転を維持したいだけの場合は、これが少し簡単な方法です。
// Grab the source track from AVURLAsset for example.
AVAssetTrack *assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Grab the composition video track from AVMutableComposition you already made.
AVMutableCompositionTrack *compositionVideoTrack = [composition tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Apply the original transform.
if (assetVideoTrack && compositionVideoTrack) {
[compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
}
// Export...
以下の方法を使用して、AVMutableVideoComposition
のビデオアセットの向きに従って正しい向きを設定します
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
BOOL isPortrait_ = [self isVideoPortrait:asset];
if(isPortrait_) {
NSLog(@"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
// videoComposition.renderSize = videoTrack.naturalSize; //
videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
-(BOOL) isVideoPortrait:(AVAsset *)asset
{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = NO;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = NO;
}
}
return isPortrait;
}
スウィフト2:
do { let path = NSSearchPathForDirectoriesInDomains( NSSearchPathDirectory.DocumentDirectory、NSSearchPathDomainMask.UserDomainMask、true) let documentsDirectory:AnyObject = path [0 ] //これは動的ビデオに対応するために変更されます let dataPath = documentsDirectory.stringByAppendingPathComponent(videoFileName + "。MOV") let videoAsset = AVURLAsset(URL:NSURL(fileURLWithPath: dataPath)、options:nil) let imgGenerator = AVAssetImageGenerator(asset:videoAsset) imgGenerator.appliesPreferredTrackTransform = true let cgImage = try imgGenerator.copyCGImageAtTime(CMTimeMake(0、1) 、actualTime:nil) let uiImage = UIImage(CGImage:cgImage) videoThumb.image = uiImage } catch let err as NSError { print( "サムネイルの生成中にエラーが発生しましたl:\(err) ") }
これはSwift 4のコードで、ビデオを最適な方向にマージするために完全に機能します
let mainComposition = AVMutableComposition()
let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
var insertTime = kCMTimeZero
for videoAsset in arrayVideos {
try! compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: .video)[0], at: insertTime)
var assetVideoTrack = (videoAsset.tracks(withMediaType: AVMediaType.video)).last as! AVAssetTrack
var compositionVideoTrack = (mainComposition.tracks(withMediaType: AVMediaType.video)).last as! AVMutableCompositionTrack
compositionVideoTrack.preferredTransform = assetVideoTrack.preferredTransform
try! soundtrackTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: .audio)[0], at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge1uupo.MOV")
let fileManager = FileManager()
//fileManager.removeItemIfExisted(outputFileURL)
let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mov
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously {
DispatchQueue.main.async {
// Do what you want at the end
}
}
Swiftめまいがする答え..これは私のために働く
var assetVideoTrack = (sourceAsset.tracksWithMediaType(AVMediaTypeVideo)).last as! AVAssetTrack
var compositionVideoTrack = (composition.tracksWithMediaType(AVMediaTypeVideo)).last as! AVMutableCompositionTrack
if (assetVideoTrack.playable && compositionVideoTrack.playable) {
compositionVideoTrack.preferredTransform = assetVideoTrack.preferredTransform
}