//
// CameraViewController.swift
// Sezzwho
//
// Created by Mobdev125 on 5/25/17.
// Copyright © 2017 Mobdev125. All rights reserved.
//
import UIKit
import SwiftyCam
import Material
import Photos
import QuartzCore
import AVFoundation
import CoreMedia
let maxVideoTime = 30.0
class CameraViewController: SwiftyCamViewController {
var flipCameraButton: UIButton!
var flashButton: UIButton!
var captureButton: SwiftyRecordButton!
var closeButton: UIButton!
var nextButton: UIButton!
var retryButton: UIButton!
var timeLabel: UILabel!
var videoClips = [URL]()
var finnalVideoUrl:URL?
var remainedTime = maxVideoTime
var recordingTimer: Timer!
override func viewDidLoad() {
super.viewDidLoad()
cameraDelegate = self
maximumVideoDuration = remainedTime
shouldUseDeviceOrientation = true
videoQuality = .high
addButtons()
setTimeLabel()
self.navigationController?.navigationBar.isHidden = true
}
override var prefersStatusBarHidden: Bool {
return true
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
if remainedTime == 0 {
captureButton.isEnabled = false
}
else {
captureButton.isEnabled = true
}
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "gotoVideoVC" {
let videoVC = segue.destination as! VideoViewController
videoVC.videoURL = sender as? URL
}
}
private func addButtons() {
let bottomView = UIView(frame: CGRect(x: 0, y: view.frame.height - 125.0, width: view.frame.width, height: 125))
bottomView.backgroundColor = UIColor(red: 0, green: 0, blue: 0, alpha: 0.4)
self.view.addSubview(bottomView)
captureButton = SwiftyRecordButton(frame: CGRect(x: view.frame.midX - 37.5, y: view.frame.height - 100.0, width: 75.0, height: 75.0))
self.view.addSubview(captureButton)
captureButton.delegate = self
flipCameraButton = UIButton(frame: CGRect(x: (((view.frame.width / 2 - 37.5) / 2) - 15.0), y: view.frame.height - 74.0, width: 30.0, height: 23.0))
flipCameraButton.setImage(#imageLiteral(resourceName: "CameraSwitch"), for: UIControlState())
flipCameraButton.addTarget(self, action: #selector(cameraSwitchAction(_:)), for: .touchUpInside)
self.view.addSubview(flipCameraButton)
let test = CGFloat((view.frame.width - (view.frame.width / 2 + 37.5)) + ((view.frame.width / 2) - 37.5) - 9.0)
flashButton = UIButton(frame: CGRect(x: test, y: view.frame.height - 77.5, width: 18.0, height: 30.0))
flashButton.setImage(#imageLiteral(resourceName: "flashOutline"), for: UIControlState())
flashButton.addTarget(self, action: #selector(toggleFlashAction(_:)), for: .touchUpInside)
self.view.addSubview(flashButton)
let topView = UIView(frame: CGRect(x: 0, y: 0, width: view.frame.width, height: 70))
topView.backgroundColor = UIColor(red: 0, green: 0, blue: 0, alpha: 0.4)
self.view.addSubview(topView)
closeButton = UIButton(frame: CGRect(x: 20, y: 20, width: 30, height: 30))
closeButton.setImage(Icon.close, for: UIControlState())
closeButton.tintColor = UIColor.white
closeButton.addTarget(self, action: #selector(closeAction(_:)), for: .touchUpInside)
self.view.addSubview(closeButton)
nextButton = UIButton(frame: CGRect(x: view.frame.width - 50, y: 20, width: 30, height: 30))
nextButton.setImage(#imageLiteral(resourceName: "ic_arrow_next_white"), for: UIControlState())
nextButton.addTarget(self, action: #selector(nextAction(_:)), for: .touchUpInside)
self.view.addSubview(nextButton)
timeLabel = UILabel(frame: CGRect(x: 60, y: 20, width: view.frame.width - 120, height: 30))
timeLabel.textAlignment = .center
timeLabel.textColor = .white
timeLabel.text = "00:60"
self.view.addSubview(timeLabel)
retryButton = UIButton(frame: CGRect(x: view.frame.midX - 30, y: 80, width: 60, height: 30))
retryButton.backgroundColor = UIColor(red: 0, green: 0, blue: 0, alpha: 0.4)
retryButton.cornerRadius = 4
retryButton.setTitle("Retry!", for: UIControlState())
retryButton.titleLabel?.textColor = .white
retryButton.tintColor = .white
retryButton.addTarget(self, action: #selector(retryAction(_:)), for: .touchUpInside)
self.view.addSubview(retryButton)
}
}
// Actions
extension CameraViewController {
@objc fileprivate func cameraSwitchAction(_ sender: Any) {
switchCamera()
}
@objc fileprivate func toggleFlashAction(_ sender: Any) {
flashEnabled = !flashEnabled
if flashEnabled == true {
flashButton.setImage(#imageLiteral(resourceName: "flash"), for: UIControlState())
} else {
flashButton.setImage(#imageLiteral(resourceName: "flashOutline"), for: UIControlState())
}
}
@objc fileprivate func closeAction(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
@objc fileprivate func nextAction(_ sender: Any) {
mergeVideos()
}
@objc fileprivate func retryAction(_ sender: Any) {
for url in videoClips {
try! FileManager.default.removeItem(at: url)
}
if let url = finnalVideoUrl {
try! FileManager.default.removeItem(at: url)
}
videoClips.removeAll()
remainedTime = maxVideoTime
setTimeLabel()
}
@objc fileprivate func countDownTime() {
remainedTime = remainedTime - 1
if remainedTime < 0 {
recordingTimer.invalidate()
return
}
setTimeLabel()
}
func setTimeLabel() {
if remainedTime < 10 {
timeLabel.text = "00:0\(Int(remainedTime))"
}
else {
timeLabel.text = "00:\(Int(remainedTime))"
}
}
func exportDidFinish(_ session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.completed {
guard let outputURL = session.outputURL else { return }
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
}) { completed, error in
if completed {
print("Video is saved!")
self.finnalVideoUrl = outputURL
self.performSegue(withIdentifier: "gotoVideoVC", sender: outputURL)
}
}
}
}
func mergeVideos() {
if videoClips.count == 0 {
return
}
else if videoClips.count == 1 {
self.performSegue(withIdentifier: "gotoVideoVC", sender: videoClips[0])
return
}
// merge
let mixComposition = AVMutableComposition()
let track = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
for index in 0..<videoClips.count {
let avAsset = AVAsset(url: videoClips[index])
do {
try track.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset.duration), of: avAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: index == 0 ? kCMTimeZero:mixComposition.duration)
} catch _ {
print("Failed to load track")
}
}
track.preferredTransform = CGAffineTransform(rotationAngle: .pi/2)
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: Date())
let outputPath = FileUtils.getSaveFilePath()?.appending("/mergeVideo-\(date).mov")
exporter.outputURL = URL(fileURLWithPath: outputPath!)
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronously() {
DispatchQueue.main.async { _ in
print("export finished")
self.exportDidFinish(exporter)
}
}
}
}
extension CameraViewController: SwiftyCamViewControllerDelegate {
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didTake photo: UIImage) {
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didBeginRecordingVideo camera: SwiftyCamViewController.CameraSelection) {
print("Did Begin Recording")
recordingTimer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(countDownTime), userInfo: nil, repeats: true)
captureButton.growButton()
UIView.animate(withDuration: 0.25, animations: {
self.flashButton.alpha = 0.0
self.flipCameraButton.alpha = 0.0
})
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFinishRecordingVideo camera: SwiftyCamViewController.CameraSelection) {
recordingTimer.invalidate()
maximumVideoDuration = remainedTime
print("Did finish Recording")
captureButton.shrinkButton()
UIView.animate(withDuration: 0.25, animations: {
self.flashButton.alpha = 1.0
self.flipCameraButton.alpha = 1.0
})
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFinishProcessVideoAt url: URL) {
videoClips.append(url)
if remainedTime <= 0 {
nextAction(self)
}
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFocusAtPoint point: CGPoint) {
let focusView = UIImageView(image: #imageLiteral(resourceName: "focus"))
focusView.center = point
focusView.alpha = 0.0
view.addSubview(focusView)
UIView.animate(withDuration: 0.25, delay: 0.0, options: .curveEaseInOut, animations: {
focusView.alpha = 1.0
focusView.transform = CGAffineTransform(scaleX: 1.25, y: 1.25)
}, completion: { (success) in
UIView.animate(withDuration: 0.15, delay: 0.5, options: .curveEaseInOut, animations: {
focusView.alpha = 0.0
focusView.transform = CGAffineTransform(translationX: 0.6, y: 0.6)
}, completion: { (success) in
focusView.removeFromSuperview()
})
})
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didChangeZoomLevel zoom: CGFloat) {
print(zoom)
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didSwitchCameras camera: SwiftyCamViewController.CameraSelection) {
print(camera)
}
}
I used “SwiftyCam”. Above is full source code. It records the video with front and rear and merges the videos. I hope this helps you.
Kind Regards.
Martin.