在iOS中保存音效后的音频

Kaa*_*rak 7 ios swift avaudioengine avaudioplayernode

我正在开发一个应用程序,以便人们可以通过应用程序记录和更改他们的声音并分享它.基本上我有很多东西,现在是时候请你帮忙了.这是我的播放功能,播放录制的音频文件并添加效果.

private func playAudio(pitch : Float, rate: Float, reverb: Float, echo: Float) {
        // Initialize variables
        audioEngine = AVAudioEngine()
        audioPlayerNode = AVAudioPlayerNode()
        audioEngine.attachNode(audioPlayerNode)

        // Setting the pitch
        let pitchEffect = AVAudioUnitTimePitch()
        pitchEffect.pitch = pitch
        audioEngine.attachNode(pitchEffect)

        // Setting the platback-rate
        let playbackRateEffect = AVAudioUnitVarispeed()
        playbackRateEffect.rate = rate
        audioEngine.attachNode(playbackRateEffect)

        // Setting the reverb effect
        let reverbEffect = AVAudioUnitReverb()
        reverbEffect.loadFactoryPreset(AVAudioUnitReverbPreset.Cathedral)
        reverbEffect.wetDryMix = reverb
        audioEngine.attachNode(reverbEffect)

        // Setting the echo effect on a specific interval
        let echoEffect = AVAudioUnitDelay()
        echoEffect.delayTime = NSTimeInterval(echo)
        audioEngine.attachNode(echoEffect)

        // Chain all these up, ending with the output
        audioEngine.connect(audioPlayerNode, to: playbackRateEffect, format: nil)
        audioEngine.connect(playbackRateEffect, to: pitchEffect, format: nil)
        audioEngine.connect(pitchEffect, to: reverbEffect, format: nil)
        audioEngine.connect(reverbEffect, to: echoEffect, format: nil)
        audioEngine.connect(echoEffect, to: audioEngine.outputNode, format: nil)

        audioPlayerNode.stop()

        let length = 4000
        let buffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0),frameCapacity:AVAudioFrameCount(length))
        buffer.frameLength = AVAudioFrameCount(length)

        try! audioEngine.start()


        let dirPaths: AnyObject = NSSearchPathForDirectoriesInDomains( NSSearchPathDirectory.DocumentDirectory,  NSSearchPathDomainMask.UserDomainMask, true)[0]
        let tmpFileUrl: NSURL = NSURL.fileURLWithPath(dirPaths.stringByAppendingPathComponent("effectedSound.m4a"))


        do{
            print(dirPaths)
            let settings = [AVFormatIDKey: NSNumber(unsignedInt: kAudioFormatMPEG4AAC), AVSampleRateKey: NSNumber(integer: 44100), AVNumberOfChannelsKey: NSNumber(integer: 2)]
            self.newAudio = try AVAudioFile(forWriting: tmpFileUrl, settings: settings)

            audioEngine.outputNode.installTapOnBus(0, bufferSize: (AVAudioFrameCount(self.player!.duration)), format: self.audioPlayerNode.outputFormatForBus(0)){
                (buffer: AVAudioPCMBuffer!, time: AVAudioTime!)  in

                print(self.newAudio.length)
                print("=====================")
                print(self.audioFile.length)
                print("**************************")
                if (self.newAudio.length) < (self.audioFile.length){

                    do{
                        //print(buffer)
                        try self.newAudio.writeFromBuffer(buffer)
                    }catch _{
                        print("Problem Writing Buffer")
                    }
                }else{
                    self.audioPlayerNode.removeTapOnBus(0)
                }

            }
        }catch _{
            print("Problem")
        }

        audioPlayerNode.play()

    }
Run Code Online (Sandbox Code Playgroud)

我想问题是我是installTapOnBus到audioPlayerNode但受影响的音频是在audioEngine.outputNode上.但是我试图将toTapOnBus安装到audioEngine.outputNode,但它给了我error.Also我试图将效果连接到audioEngine.mixerNode但它也不是解决方案.那么你有保存受影响音频文件的经验吗?我怎样才能获得这种受影响的音频?

任何帮助表示赞赏

谢谢

Kaa*_*rak 5

这是我的问题解决方案:

func playAndRecord(pitch : Float, rate: Float, reverb: Float, echo: Float) {
    // Initialize variables

// These are global variables . if you want you can just  (let audioEngine = etc ..) init here these variables
    audioEngine = AVAudioEngine()
    audioPlayerNode = AVAudioPlayerNode()
    audioEngine.attachNode(audioPlayerNode)
    playerB = AVAudioPlayerNode()

    audioEngine.attachNode(playerB)

    // Setting the pitch
    let pitchEffect = AVAudioUnitTimePitch()
    pitchEffect.pitch = pitch
    audioEngine.attachNode(pitchEffect)

    // Setting the platback-rate
    let playbackRateEffect = AVAudioUnitVarispeed()
    playbackRateEffect.rate = rate
    audioEngine.attachNode(playbackRateEffect)

    // Setting the reverb effect
    let reverbEffect = AVAudioUnitReverb()
    reverbEffect.loadFactoryPreset(AVAudioUnitReverbPreset.Cathedral)
    reverbEffect.wetDryMix = reverb
    audioEngine.attachNode(reverbEffect)

    // Setting the echo effect on a specific interval
    let echoEffect = AVAudioUnitDelay()
    echoEffect.delayTime = NSTimeInterval(echo)
    audioEngine.attachNode(echoEffect)

    // Chain all these up, ending with the output
    audioEngine.connect(audioPlayerNode, to: playbackRateEffect, format: nil)
    audioEngine.connect(playbackRateEffect, to: pitchEffect, format: nil)
    audioEngine.connect(pitchEffect, to: reverbEffect, format: nil)
    audioEngine.connect(reverbEffect, to: echoEffect, format: nil)
    audioEngine.connect(echoEffect, to: audioEngine.mainMixerNode, format: nil)


    // Good practice to stop before starting
    audioPlayerNode.stop()

    // Play the audio file 
// this player is also a global variable  AvAudioPlayer
    if(player != nil){
    player?.stop()
    }

    // audioFile here is our original audio
    audioPlayerNode.scheduleFile(audioFile, atTime: nil, completionHandler: {
        print("Complete")
    })


    try! audioEngine.start()


    let dirPaths: AnyObject = NSSearchPathForDirectoriesInDomains( NSSearchPathDirectory.DocumentDirectory,  NSSearchPathDomainMask.UserDomainMask, true)[0]
    let tmpFileUrl: NSURL = NSURL.fileURLWithPath(dirPaths.stringByAppendingPathComponent("effectedSound2.m4a"))

//Save the tmpFileUrl into global varibale to not lose it (not important if you want to do something else)
filteredOutputURL = tmpFileUrl

    do{
        print(dirPaths)

        self.newAudio = try! AVAudioFile(forWriting: tmpFileUrl, settings:  [
            AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
            AVEncoderAudioQualityKey : AVAudioQuality.Low.rawValue,
            AVEncoderBitRateKey : 320000,
            AVNumberOfChannelsKey: 2,
            AVSampleRateKey : 44100.0
            ])

        let length = self.audioFile.length


        audioEngine.mainMixerNode.installTapOnBus(0, bufferSize: 1024, format: self.audioEngine.mainMixerNode.inputFormatForBus(0)) {
            (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in


            print(self.newAudio.length)
            print("=====================")
            print(length)
            print("**************************")

            if (self.newAudio.length) < length {//Let us know when to stop saving the file, otherwise saving infinitely

                do{
                    //print(buffer)
                    try self.newAudio.writeFromBuffer(buffer)
                }catch _{
                    print("Problem Writing Buffer")
                }
            }else{
                self.audioEngine.mainMixerNode.removeTapOnBus(0)//if we dont remove it, will keep on tapping infinitely

                //DO WHAT YOU WANT TO DO HERE WITH EFFECTED AUDIO

             }

        }
    }catch _{
        print("Problem")
    }

    audioPlayerNode.play()

}
Run Code Online (Sandbox Code Playgroud)