Since this tutorial is very old… I had to struggle quite a bit to write the app in Swift 3. Also the rotation bit does not work at all if you follow the tutorial. You need to actually scale, translate, and rotate the video in order to have videos taken in portrait work.
Below is the code I worked out for merging two videos taken in portrait.
func assetIsPortrait(assetTrack: AVAssetTrack) -> Bool {
let trackTransform: CGAffineTransform = assetTrack.preferredTransform
if (trackTransform.a == 0 && trackTransform.b == 1.0 && trackTransform.c == -1.0 && trackTransform.d == 0) {
return true
}
if (trackTransform.a == 0 && trackTransform.b == -1.0 && trackTransform.c == 1.0 && trackTransform.d == 0) {
return true
}
if (trackTransform.a == 1.0 && trackTransform.b == 0 && trackTransform.c == 0 && trackTransform.d == 1.0) {
return false
}
if (trackTransform.a == -1.0 && trackTransform.b == 0 && trackTransform.c == 0 && trackTransform.d == -1.0) {
return false
}
return true //default case
}
func renderSizeForTracks(assetTracks: [AVAssetTrack]) -> CGSize {
var renderWidth: CGFloat = 0
var renderHeight: CGFloat = 0
for assetTrack: AVAssetTrack in assetTracks {
if (self.assetIsPortrait(assetTrack: assetTrack)) {
renderWidth = max(renderWidth, assetTrack.naturalSize.height)
renderHeight = max(renderHeight, assetTrack.naturalSize.width)
} else {
renderWidth = max(renderWidth, assetTrack.naturalSize.width)
renderHeight = max(renderHeight, assetTrack.naturalSize.height)
}
}
return CGSize(width: renderWidth, height: renderHeight)
}
func scaleFactorForAsset(assetTrack: AVAssetTrack) -> CGSize {
if (!self.assetIsPortrait(assetTrack: assetTrack)) {
if (assetTrack.naturalSize.width > UIScreen.main.nativeBounds.width && assetTrack.naturalSize.height > UIScreen.main.nativeBounds.height) {
return CGSize(width: 1.0, height: 1.0)
}
let widthRatio: CGFloat = UIScreen.main.bounds.width / assetTrack.naturalSize.width
let heightRatio: CGFloat = UIScreen.main.bounds.height / assetTrack.naturalSize.height
return CGSize(width: min(widthRatio, heightRatio), height: min(widthRatio, heightRatio))
} else {
if (assetTrack.naturalSize.width > UIScreen.main.nativeBounds.height && assetTrack.naturalSize.height > UIScreen.main.nativeBounds.width) {
return CGSize(width: 1.0, height: 1.0)
}
let widthRatio: CGFloat = UIScreen.main.nativeBounds.height / assetTrack.naturalSize.width
let heightRatio: CGFloat = UIScreen.main.nativeBounds.width / assetTrack.naturalSize.height
return CGSize(width: min(widthRatio, heightRatio), height: min(widthRatio, heightRatio))
}
}
@IBAction func mergeVideos(_ sender: UIButton) {
if (self.firstAsset != nil && self.secondAsset != nil) {
self.spinner.startAnimating()
let mixComposition: AVMutableComposition = AVMutableComposition.init()
let firstTrack: AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let firstAssetTrack: AVAssetTrack = self.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0]
try! firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.firstAsset!.duration), of: firstAssetTrack, at: kCMTimeZero)
let secondTrack: AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let secondAssetTrack: AVAssetTrack = self.secondAsset!.tracks(withMediaType: AVMediaTypeVideo)[0]
try! secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.secondAsset!.duration), of: secondAssetTrack, at: self.firstAsset!.duration)
if (self.audioAsset != nil) {
let audioTrack: AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
try! audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTimeAdd(self.firstAsset!.duration, self.secondAsset!.duration)), of: self.audioAsset!.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}
let mainInstruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction.init()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(self.firstAsset!.duration, self.secondAsset!.duration))
let firstLayerInstruction: AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: firstTrack)
var firstTransform: CGAffineTransform = self.firstAsset!.preferredTransform
let firstScale: CGSize = self.scaleFactorForAsset(assetTrack: firstAssetTrack)
let firstScaleTransform = CGAffineTransform(scaleX: firstScale.width, y: firstScale.height)
if (self.assetIsPortrait(assetTrack: firstAssetTrack)) {
let translateTransform = firstScaleTransform.translatedBy(x: firstAssetTrack.naturalSize.height, y: 0)
firstTransform = translateTransform.rotated(by: CGFloat(M_PI)/2)
}
firstLayerInstruction.setTransform(firstTransform, at: kCMTimeZero)
firstLayerInstruction.setOpacity(0, at: self.firstAsset!.duration)
let secondLayerInstruction: AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: secondTrack)
var secondTransform = self.secondAsset!.preferredTransform
let secondScale: CGSize = self.scaleFactorForAsset(assetTrack: secondAssetTrack)
let secondScaleTransform = CGAffineTransform(scaleX: secondScale.width, y: secondScale.height)
if (self.assetIsPortrait(assetTrack: secondAssetTrack)) {
let translateTransform = secondScaleTransform.translatedBy(x: secondAssetTrack.naturalSize.height, y: 0)
secondTransform = translateTransform.rotated(by: CGFloat(M_PI)/2)
}
secondLayerInstruction.setTransform(secondTransform, at: kCMTimeZero)
mainInstruction.layerInstructions = [firstLayerInstruction, secondLayerInstruction]
let mainCompositionInstructions: AVMutableVideoComposition = AVMutableVideoComposition.init()
mainCompositionInstructions.instructions = [mainInstruction]
mainCompositionInstructions.frameDuration = CMTimeMake(1, 30)
mainCompositionInstructions.renderSize = self.renderSizeForTracks(assetTracks: [firstAssetTrack, secondAssetTrack])
let videoFilePath = URL.init(fileURLWithPath: NSTemporaryDirectory().appending("mergedVideo-\(arc4random() % 1000).mp4")).absoluteString
let savePathUrl = URL(string: videoFilePath)!
let exporter: AVAssetExportSession = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
//exporter.outputURL = url
exporter.outputURL = savePathUrl
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainCompositionInstructions
exporter.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
self.exportDidFinish(session: exporter)
}
})
} else {
let alert: UIAlertController = self.dismissAlertWithTitleAndMessage(title: "Error", message: "Please first select videos to merge (audio optional)")
self.presentAlert(alert: alert)
}
}