AVFoundation Unit Testing

Hello everybody! Can you suggest me, how can I test a number of my functionalities for merging audio and video tracks? I need to test functions like audio track reduction, which loops my audio track or trimmed it; audio and video merging etc.
So, test like
XCTAssertEqual(AVAsset.init(url:merger.audioTrackReduction()), AVAsset.init(url:URL(string: “file:/Users/Project.0_o.Path/merged377.m4a”))!) failed, but I got this merged file from this function later!
Please, explain to me, what I need to test and how to do it :slight_smile:

//
maybe problem in different url path, or file names? Therefore I need to get only tracks and compare them?

I debugged my code and found out that when I run test audio file does not created, only empty file with name - no tracks in asset, nothing. But my code works and when I run it, audio files successfully created.But not in tests.

func audioTrackReduction() → URL? {
var mergeAudioURL: URL?
let composition = AVMutableComposition()
let fileName = “merged(arc4random_uniform(1000)).m4a”

    let assetAudio = AVURLAsset(url: audioUrl)
    let assetVideo = AVURLAsset(url: videoUrl)
    let trackAudio = assetAudio.tracks(withMediaType: AVMediaType.audio)[0]
    let trackVideo = assetVideo.tracks(withMediaType: AVMediaType.video)[0]
    
    let inputAudioDuration = Double(CMTimeGetSeconds(trackAudio.timeRange.duration))
    let inputVideoDuration = Double(CMTimeGetSeconds(trackVideo.timeRange.duration))
    
    if inputAudioDuration == inputVideoDuration {
        let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!
        let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: trackAudio.timeRange.duration)
        try! compositionAudioTrack.insertTimeRange(timeRange, of: trackAudio, at: composition.duration)
    } else if inputAudioDuration > inputVideoDuration {
        do {
            let asset = try assetByTrimming(timeOffStart: inputVideoDuration, track: trackAudio)
            let trackAudioTrimmed = asset.tracks(withMediaType: AVMediaType.audio)[0]
            
            let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!
            let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: trackAudioTrimmed.timeRange.duration)
            try! compositionAudioTrack.insertTimeRange(timeRange, of: trackAudioTrimmed, at: composition.duration)
        } catch let error {
            print(error.localizedDescription)
        }
    } else {            
        
        let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: trackAudio.timeRange.duration)
        for _ in 0..<Int((inputVideoDuration / inputAudioDuration).rounded(.down)) {
            let compositionAudioTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!
            try! compositionAudioTrack.insertTimeRange(timeRange, of: trackAudio, at: composition.duration)
        }
        let trimmedDuration = inputVideoDuration - Double(CMTimeGetSeconds(composition.duration))
        do {
            let asset = try assetByTrimming(timeOffStart: trimmedDuration, track: trackAudio)
            let trackAudioTrimmed = asset.tracks(withMediaType: AVMediaType.audio)[0]
            
            let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!
            let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: trackAudioTrimmed.timeRange.duration)
            try! compositionAudioTrack.insertTimeRange(timeRange, of: trackAudioTrimmed, at: composition.duration)
        } catch let error {
            print(error.localizedDescription)
        }
    }
    let docsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
    mergeAudioURL = docsURL.appendingPathComponent(fileName)! as URL
    let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
    assetExport?.outputFileType = AVFileType.m4a
    assetExport?.outputURL = mergeAudioURL
    assetExport?.exportAsynchronously(completionHandler:
        {
            switch assetExport!.status
            {
            case AVAssetExportSessionStatus.failed:
                print("failed \(String(describing: assetExport?.error))")
            case AVAssetExportSessionStatus.cancelled:
                print("cancelled \(String(describing: assetExport?.error))")
            case AVAssetExportSessionStatus.unknown:
                print("unknown\(String(describing: assetExport?.error))")
            case AVAssetExportSessionStatus.waiting:
                print("waiting\(String(describing: assetExport?.error))")
            case AVAssetExportSessionStatus.exporting:
                print("exporting\(String(describing: assetExport?.error))")
            default:
                print("-----Merge audio exportation complete.\(String(describing: mergeAudioURL))")
            }
    })
    guard let url = mergeAudioURL else {
        return nil
    }
    do {
        let audioData = try Data(contentsOf: url)
        storage.insert(item: Record(audio: audioData,
                                    name: fileName,
            urlString: String(describing: url),
            duration: Double(CMTimeGetSeconds((AVURLAsset(url: url).duration)))))
    } catch {
        print(error.localizedDescription)
    }
    return mergeAudioURL
}

//Test code
override func setUp() {
super.setUp()
// Put setup code here. This method is called before the invocation of each test method in the class.
guard let url = merger.audioTrackReduction() else {return}
mergedUrl = url
}

func testMergerAudioTrackReduction() {
let gottenAsset = AVURLAsset.init(url: mergedUrl)
let expectedAsset = AVURLAsset.init(url:URL(string: “file:/Users/Nechaev/Documents/TestSmashingFlashing/merged377.m4a”)!)
XCTAssertEqual(gottenAsset, expectedAsset)
}

Gotten asset has nothing but url.

That line means the export is asynchronous, so the method returns the url before the export has completed. Somewhere you need to wait until the export is finished, before doing the comparison test. I’m not sure what the best way to do that is. Adding some more print() statements would help you see what order things are happening.

1 Like

Yes, you are almost right. I found out that I need to use XCTestExpectation for waiting async operations.
Now my test look like this:
func testMergerAudioTrackReduction() {

    let expectation = XCTestExpectation.init(description: "Example")
    
    let url = merger.audioTrackReduction {
        expectation.fulfill()
    }
    
    let gottenAsset = AVURLAsset.init(url: url)
    let expectedAsset = AVURLAsset.init(url:URL(string: "file:/file0_opath/merged377.m4a")!)
    
    wait(for: [expectation], timeout: 50)

    XCTAssertEqual(gottenAsset.track(withTrackID: 1), expectedAsset.track(withTrackID: 1))
}

And I get error:
XCTAssertEqual failed: (“Optional(<AVAssetTrack: 0x60400001fbe0, trackID = 1, mediaType = soun>)”) is not equal to (“Optional(<AVAssetTrack: 0x6000000161e0, trackID = 1, mediaType = soun>)”)

Unfortunately I have no more time for searching property to comparison - I will compare tracks duration - it works fine :slight_smile: But anyone will find any other property to comparison - let me know :wink:

This topic was automatically closed after 166 days. New replies are no longer allowed.