0

I used this as a reference: Concatenate two audio files in Swift and play them

I'm trying to create an alarm clock, due to restrictive Apple Policy you cannot execute code in the bg past 10 minutes, if I opt out of bg modes and leave my app in the foreground the "sound" suspends if the user clicks the home button, I need the sound to go on until the user does something. UILocalNotifs don't work due to silent/do not disturb buttons. So I want to run a blank audio until the alarm time and then play sound.

So I used the following to run it:

This is what happens when the save button is clicked (user has selected alarm time)

 let seconds = Double(comp.second)
    let notification = UILocalNotification()
    notification.alertBody = "testBody"
    notification.fireDate = dueDatePicker.date
    notification.alertTitle = "testTitle"
    println("seconds:\(seconds)")



    var results:NSArray = managedObjectContext!.executeFetchRequest(request, error: &error)!

     let audioURL1 = NSBundle.mainBundle().URLForResource("alarm", withExtension: "m4a")!
    let audioURL2 = NSBundle.mainBundle().URLForResource("music", withExtension: "mp3")!
    println(audioURL1)
    println(audioURL2)
    println(task.uuid)

           mergeAudio2(audioURL1, audioURL2: audioURL2, time: seconds, uuid: task.uuid)

and this is the merge audio portion:

func mergeAudio2(audioURL: NSURL, audioURL2: NSURL, time:Double, uuid:String) {
    var error:NSError?

    var ok1 = false
    var ok2 = false


    //var documentsDirectory:String = paths[0] as! String

    //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
    var composition = AVMutableComposition()
    var compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
    var compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())

    //create new file to receive data
    var documentDirectoryURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first! as! NSURL
    var fileDestinationUrl = documentDirectoryURL.URLByAppendingPathComponent("resultmerge.wav")
    println(fileDestinationUrl)

    var file = "resultmerge.m4a"
    var dirs : [String] = (NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.AllDomainsMask, true) as? [String])!
    var dir = dirs[0] //documents directory
    var path = dir.stringByAppendingPathComponent(file)
    var pathURLarray:Array = (NSURL(fileURLWithPath: path)!).pathComponents!
    var pathURL:String = ""
    var final = ""
    var debut = ""

    for i in 1...(pathURLarray.count-1) {
        if i == pathURLarray.count-1 {
            final = ""
        } else {
            final = "/"
        }
        if i == 1 {
            debut = "/"
        } else {
            debut = ""
        }
        pathURL = debut + pathURL + (pathURLarray[i] as! String) + final
    }

    var checkValidation = NSFileManager.defaultManager()
    if checkValidation.fileExistsAtPath(pathURL) {
        println("file exist")
        if NSFileManager.defaultManager().removeItemAtURL(fileDestinationUrl, error: nil) {
            println("delete")
        }
    } else {
        println("no file")
    }

    var url1 = audioURL
    var url2 = audioURL2


    var avAsset1 = AVURLAsset(URL: url1, options: nil)
    var avAsset2 = AVURLAsset(URL: url2, options: nil)

    var tracks1 =  avAsset1.tracksWithMediaType(AVMediaTypeAudio)
    var tracks2 =  avAsset2.tracksWithMediaType(AVMediaTypeAudio)

    var assetTrack1:AVAssetTrack = tracks1[0] as! AVAssetTrack
    var assetTrack2:AVAssetTrack = tracks2[0] as! AVAssetTrack


    var duration1: CMTime = assetTrack1.timeRange.duration
    var duration2: CMTime = assetTrack2.timeRange.duration

    var timeRange1 = CMTimeRangeMake(kCMTimeZero, duration1)
    var timeRange2 = CMTimeRangeMake(duration1, duration2)


    ok1 = compositionAudioTrack1.insertTimeRange(timeRange1, ofTrack: assetTrack1, atTime: kCMTimeZero, error: nil)
    if ok1 {

        ok2 = compositionAudioTrack2.insertTimeRange(timeRange2, ofTrack: assetTrack2, atTime: duration1, error: nil)

        if ok2 {
            println("success")
        }
    }

    //AVAssetExportPresetPassthrough => concatenation


    var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
    assetExport.outputFileType = AVFileTypeAppleM4A
    assetExport.outputURL = fileDestinationUrl
    assetExport.exportAsynchronouslyWithCompletionHandler({
        switch assetExport.status{
        case  AVAssetExportSessionStatus.Failed:
            println("failed \(assetExport.error)")
        case AVAssetExportSessionStatus.Cancelled:
            println("cancelled \(assetExport.error)")
        default:
            println("complete")
            var audioPlayer = AVAudioPlayer()
            AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, error: nil)
            //audioPlayer.delegate = self

            audioPlayer = AVAudioPlayer(contentsOfURL: fileDestinationUrl, error: nil)
            println(fileDestinationUrl)
            audioPlayer.prepareToPlay()
            audioPlayer.play()
        }

    })


}

The app loads up and everything fine, I get the println's of the files and I get the no file, success and completed print out meaning it all executes. But no sound plays

Community
  • 1
  • 1
Kashish Goel
  • 303
  • 3
  • 15

1 Answers1

0

But no sound plays

Because audioPlayer is a local variable and dies before it has a chance to play anything. Promote it to be an instance variable so that it persists.

matt
  • 515,959
  • 87
  • 875
  • 1,141