录音可以在 iOS 模拟器上使用,但不能在实际设备上使用

Kil*_*ter 4 audio-recording ios flutter

我用来flutter-sound在我的 flutter 应用程序中录制(然后播放)一些音频。然而,我遇到了一个有趣的问题:在 iOS 模拟器上它可以工作,但在实际的 iOS 设备上我遇到了一个模糊的错误。两者都在相同版本的 iOS (15.4) 上运行。

这是启动和停止录制/播放的代码,很简单:

Future<void> startRecording(GlobalState curState) async {
    setState(() {
      recording = true;
    });
    curState.startRecording();
    Directory directory = Directory(pathToAudio);
    if (filePathText.isNotEmpty && File(filePathText).existsSync()) {
      File(filePathText).deleteSync();
    }
    if (!directory.existsSync()) {
      directory.createSync();
    }
    await _recordingSession.startRecorder(
      toFile: (filePathName()),
      codec: (Platform.isIOS ? Codec.pcm16WAV : Codec.aacMP4),
      audioSource: AudioSource.microphone,
    );
  }

  Future<void> stopRecording(GlobalState curState) async {
    setState(() {
      recording = false;
    });
    String? fileURL = await _recordingSession.stopRecorder();
    print("the file is recorded!!!");
    print("FILEPATH:");
    print(fileURL);
    curState.stopRecording();
    if (fileURL != null) {
      filePathText = fileURL;
      if (widget.widgetControlInfo.onChanged != null) {
        FileAnswer tempA = FileAnswer.fromBasicQuestion(widget.currentQuestion);
        tempA.filePath = fileURL;
        tempA.filetype = FileType.recording;
        if (widget.widgetControlInfo.onChanged != null) {
          widget.widgetControlInfo.onChanged!(tempA);
        }
      }
    } else {
      print('sumn went rong wit da recording');
    }
  }

  String filePathName() =>
      pathToAudio +
      DateTime.now().month.toString() +
      DateTime.now().day.toString() +
      DateTime.now().hour.toString() +
      DateTime.now().minute.toString() +
      DateTime.now().second.toString() +
      (Platform.isIOS ? ".wav" : ".m4a");

  Future<void> playControl() async {
    if (playing) {
      await stopPlaying();
    } else {
      await startPlaying();
    }
  }

  Future<void> startPlaying() async {
    setState(() {
      playing = true;
    });
    if (filePathText.isEmpty) {
      return;
    } else {
      if (File(filePathText).existsSync()) {
        print("the file existssss!!!");
        print("FILEPATH:");
        print(filePathText);
      }
      await _playingSession.startPlayer(
          fromURI: filePathText,
          codec: (Platform.isIOS ? Codec.pcm16WAV : Codec.aacMP4),
          whenFinished: () {
            print("its over");
            stopPlaying();
          });
    }
    return;
  }

  Future<void> stopPlaying() async {
    setState(() {
      playing = false;
    });
    await _playingSession.stopPlayer();
  }

  void _initializer() async {
    if (Platform.isIOS) {
      var directory = await getTemporaryDirectory();
      print("TIS IOS");
      pathToAudio = directory.path + '/';
    } else {
      pathToAudio = '/sdcard/Download/m-Path/';
    }
    _recordingSession = new FlutterSoundRecorder(logLevel: Level.debug);
    _playingSession = new FlutterSoundPlayer(logLevel: Level.debug);
    await _recordingSession.openRecorder();
    await _playingSession.openPlayer();
    await _recordingSession
        .setSubscriptionDuration(Duration(milliseconds: 10))
        .then((value) => null);
    _recorderSubscription = _recordingSession.onProgress!.listen((e) {
      setState(() {
        _timerText = e.duration.toString().substring(0, 10);
      });
    });
    await _playingSession.setSubscriptionDuration(Duration(milliseconds: 10));
    _playerSubscription = _playingSession.onProgress!.listen((e) {
      setState(() {
        _timerText = e.position.toString().substring(0, 10);
      });
    });
    await Permission.microphone.request();
    await Permission.storage.request();
    await Permission.manageExternalStorage.request();
  }
Run Code Online (Sandbox Code Playgroud)

UI 部分如下所示。在iOS模拟器上,当我按下录音按钮时,计时器开始递增,再次按下后,我可以按下播放按钮来听我刚刚录制的内容。在设备上,当我按下按钮时,计时器不会增加而是保持为零,当我尝试播放音频时,出现以下错误:

[VERBOSE-2:ui_dart_state.cc(209)] Unhandled Exception: PlatformException(Audio Player, startPlayer failure, null, null)
#0      StandardMethodCodec.decodeEnvelope (package:flutter/src/services/message_codecs.dart:607:7)
#1      MethodChannel._invokeMethod (package:flutter/src/services/platform_channel.dart:177:18)
<asynchronous suspension>
#2      MethodChannelFlutterSoundPlayer.invokeMethod (package:flutter_sound_platform_interface/method_channel_flutter_sound_player.dart:157:12)
<asynchronous suspension>
#3      FlutterSoundPlayer._startPlayer (package:flutter_sound/public/flutter_sound_player.dart:819:19)
<asynchronous suspension>
#4      FlutterSoundPlayer.startPlayer.<anonymous closure> (package:flutter_sound/public/flutter_sound_player.dart:759:11)
<asynchronous suspension>
#5      BasicLock.synchronized (package:synchronized/src/basic_lock.dart:33:16)
<asynchronous suspension>
#6      FlutterSoundPlayer.startPlayer (package:flutter_sound/public/flutter_sound_player.dart:758:5)
<asynchronous suspension>
#7      _RecordingQuestionWidgetS.startPlaying (package:flutter_app2/interactions/widgets/questionWidgets/RecordingQuestionWidget.dart:216:7)
<asynchronous suspension>
#8      _RecordingQuestionWidgetS.playControl (package:flutter_app2/interactions/widgets/questionWidgets/RecordingQuestionWidget.dart:200:7)
<asynchronous suspension>
Run Code Online (Sandbox Code Playgroud)

flutter-sound已经在 github 上问过作者了,但他似乎也不知道出了什么问题,而且它在模拟器上工作但在设备上不起作用这一事实让我认为问题可能不仅仅是一些错误的代码。

同一应用程序的不同部分已经保存并显示同一目录中保存的图像,因此我认为这不是权限问题。

Kil*_*ter 7

显然,在 iOS 上,除非您创建一个 AudioSession 实例(从此包中) ,否则这不起作用。新initializer函数如下所示:

void _initializer() async {
 if (Platform.isIOS) {
   var directory = await getApplicationDocumentsDirectory();
   pathToAudio = directory.path + '/';
 } else {
   pathToAudio = '/sdcard/Download/appname/';
 }
 _recordingSession = new FlutterSoundRecorder();
 _playingSession = new FlutterSoundPlayer();
 await _recordingSession.openRecorder();
 await _playingSession.openPlayer();
 final session = await AudioSession.instance;
 await session.configure(AudioSessionConfiguration(
   avAudioSessionCategory: AVAudioSessionCategory.playAndRecord,
   avAudioSessionCategoryOptions:
       AVAudioSessionCategoryOptions.allowBluetooth |
           AVAudioSessionCategoryOptions.defaultToSpeaker,
   avAudioSessionMode: AVAudioSessionMode.spokenAudio,
   avAudioSessionRouteSharingPolicy:
       AVAudioSessionRouteSharingPolicy.defaultPolicy,
   avAudioSessionSetActiveOptions: AVAudioSessionSetActiveOptions.none,
   androidAudioAttributes: const AndroidAudioAttributes(
     contentType: AndroidAudioContentType.speech,
     flags: AndroidAudioFlags.none,
     usage: AndroidAudioUsage.voiceCommunication,
   ),
   androidAudioFocusGainType: AndroidAudioFocusGainType.gain,
   androidWillPauseWhenDucked: true,
 ));
 await _recordingSession
     .setSubscriptionDuration(Duration(milliseconds: 10))
     .then((value) => null);
 _recorderSubscription = _recordingSession.onProgress!.listen((e) {
   setState(() {
     _timerText = e.duration.toString().substring(0, 10);
   });
 });
 await _playingSession.setSubscriptionDuration(Duration(milliseconds: 
10));
 _playerSubscription = _playingSession.onProgress!.listen((e) {
   setState(() {
     _timerText = e.position.toString().substring(0, 10);
   });
 });
 await Permission.microphone.request();
 await Permission.storage.request();
 await Permission.manageExternalStorage.request();
}
Run Code Online (Sandbox Code Playgroud)