I have multiple AVAsset
videos that I string together using timeRanges in an AVComposition
. Each video has an associated AVVideoComposition
that uses the animationTool
to add an overlay layer to the video. I want to be able to string these AVVideoCompositions
together in a similar way to the AVComposition
so that the layers associated with each AVAsset show/hide at the specified start/end time. How can I do this?
I currently use this code to create the AVVideoComposition
for an AVComposition
with a single AVAsset:
// Create video composition
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = videoSize
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(
postProcessingAsVideoLayer: videoLayer,
in: outputLayer
)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(
start: .zero,
duration: composition.duration
)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionTrack)
layerInstruction.setTransform(assetTrack.preferredTransform, at: .zero)
instruction.layerInstructions = [layerInstruction]
videoComposition.instructions = [instruction]
This is my code for creating an AVComposition
with multiple assets:
static func createFullVideo(from videos: [VideoItem]) -> AVComposition? {
let newComposition = AVMutableComposition()
guard let compositionTrack = newComposition.addMutableTrack(
withMediaType: .video,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
return nil
}
guard let compositionAudioTrack = newComposition.addMutableTrack(
withMediaType: .audio,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
return nil
}
var endTimeOfPreviousTrack: CMTime = .zero
for video in videos {
let composition = video.composition
guard let assetTrack = composition.tracks(withMediaType: .video).first else {
return nil
}
compositionTrack.preferredTransform = assetTrack.preferredTransform
do {
// Insert time range for video track in composition
let timeRange = assetTrack.timeRange
try compositionTrack.insertTimeRange(timeRange, of: assetTrack, at: endTimeOfPreviousTrack)
// Get the audio track from the asset
guard let audioAssetTrack = composition.tracks(withMediaType: .audio).first else {
return nil
}
// Insert time range for audio track in composition
try compositionAudioTrack.insertTimeRange(
timeRange,
of: audioAssetTrack,
at: endTimeOfPreviousTrack
)
// Store end time of track
endTimeOfPreviousTrack = CMTimeAdd(endTimeOfPreviousTrack, assetTrack.timeRange.duration)
} catch {
return nil
}
return newComposition
}