I have an AVURLAsset
array. It contains videos that have been taken with the back camera, front camera (not mirrored), images turned into a video. All in portrait orientation.
I created a function to return a single merged video to play back.
My issue is the original presentation of each video is not supported in the final play back. A video is either rotated, stretched/squished, or mirrored.
I’ve tried to follow existing SO posts but nothing has worked.
The below function is where I have landed thus far. Any guidance would be extremely appreciated.
func merge(assets: [AVURLAsset], completion: @escaping (URL?, AVAssetExportSession?) -> Void) {
let mainComposition = AVMutableComposition()
var lastTime = CMTime.zero
guard let videoCompositionTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { return }
guard let audioCompositionTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { return }
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoCompositionTrack)
// Add media items to the composition
for asset in assets {
if let videoTrack = asset.tracks(withMediaType: .video)[safe: 0] {
let t = videoTrack.preferredTransform
layerInstruction.setTransform(t, at: lastTime)
if let audioTrack = asset.tracks(withMediaType: .audio)[safe: 0] {
do {
try videoCompositionTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration), of: videoTrack, at: lastTime)
try audioCompositionTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration), of: audioTrack, at: lastTime)
print("Inserted audio + video track")
} catch {
print("Failed to insert audio or video track")
return
}
lastTime = CMTimeAdd(lastTime, asset.duration)
} else {
do {
try videoCompositionTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration), of: videoTrack, at: lastTime)
print("Inserted just video track")
} catch {
print("Failed to insert just video track")
return
}
lastTime = CMTimeAdd(lastTime, asset.duration)
}
}
}
let outputUrl = NSURL.fileURL(withPath: NSTemporaryDirectory() + "test" + ".mp4")
let videoComposition = AVMutableVideoComposition()
let instruction = AVMutableVideoCompositionInstruction()
instruction.layerInstructions = [layerInstruction]
instruction.timeRange = videoCompositionTrack.timeRange
videoComposition.instructions = [instruction]
videoComposition.frameDuration = videoCompositionTrack.minFrameDuration
videoComposition.renderSize = CGSize(width: 720, height: 1280) // Adjust as per your video dimensions
guard let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exporter.outputURL = outputUrl
exporter.outputFileType = .mp4
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = videoComposition
exporter.exportAsynchronously {
if let outputUrl = exporter.outputURL {
completion(outputUrl, exporter)
}
}
play(video: exporter.asset)
}