I'm using flutter-sound to record (and then play back) some audio in my flutter app. However, I've run up against an interesting problem: On the iOS emulator it works, but on the actual iOS device I get an obscure error. Both are running on the same version of iOS (15.4).

Here is the code for starting and stopping the recording/playback, it's simple enough:

Future<void> startRecording(GlobalState curState) async {
    setState(() {
      recording = true;
    });
    curState.startRecording();
    Directory directory = Directory(pathToAudio);
    if (filePathText.isNotEmpty && File(filePathText).existsSync()) {
      File(filePathText).deleteSync();
    }
    if (!directory.existsSync()) {
      directory.createSync();
    }
    await _recordingSession.startRecorder(
      toFile: (filePathName()),
      codec: (Platform.isIOS ? Codec.pcm16WAV : Codec.aacMP4),
      audioSource: AudioSource.microphone,
    );
  }

  Future<void> stopRecording(GlobalState curState) async {
    setState(() {
      recording = false;
    });
    String? fileURL = await _recordingSession.stopRecorder();
    print("the file is recorded!!!");
    print("FILEPATH:");
    print(fileURL);
    curState.stopRecording();
    if (fileURL != null) {
      filePathText = fileURL;
      if (widget.widgetControlInfo.onChanged != null) {
        FileAnswer tempA = FileAnswer.fromBasicQuestion(widget.currentQuestion);
        tempA.filePath = fileURL;
        tempA.filetype = FileType.recording;
        if (widget.widgetControlInfo.onChanged != null) {
          widget.widgetControlInfo.onChanged!(tempA);
        }
      }
    } else {
      print('sumn went rong wit da recording');
    }
  }

  String filePathName() =>
      pathToAudio +
      DateTime.now().month.toString() +
      DateTime.now().day.toString() +
      DateTime.now().hour.toString() +
      DateTime.now().minute.toString() +
      DateTime.now().second.toString() +
      (Platform.isIOS ? ".wav" : ".m4a");

  Future<void> playControl() async {
    if (playing) {
      await stopPlaying();
    } else {
      await startPlaying();
    }
  }

  Future<void> startPlaying() async {
    setState(() {
      playing = true;
    });
    if (filePathText.isEmpty) {
      return;
    } else {
      if (File(filePathText).existsSync()) {
        print("the file existssss!!!");
        print("FILEPATH:");
        print(filePathText);
      }
      await _playingSession.startPlayer(
          fromURI: filePathText,
          codec: (Platform.isIOS ? Codec.pcm16WAV : Codec.aacMP4),
          whenFinished: () {
            print("its over");
            stopPlaying();
          });
    }
    return;
  }

  Future<void> stopPlaying() async {
    setState(() {
      playing = false;
    });
    await _playingSession.stopPlayer();
  }

  void _initializer() async {
    if (Platform.isIOS) {
      var directory = await getTemporaryDirectory();
      print("TIS IOS");
      pathToAudio = directory.path + '/';
    } else {
      pathToAudio = '/sdcard/Download/m-Path/';
    }
    _recordingSession = new FlutterSoundRecorder(logLevel: Level.debug);
    _playingSession = new FlutterSoundPlayer(logLevel: Level.debug);
    await _recordingSession.openRecorder();
    await _playingSession.openPlayer();
    await _recordingSession
        .setSubscriptionDuration(Duration(milliseconds: 10))
        .then((value) => null);
    _recorderSubscription = _recordingSession.onProgress!.listen((e) {
      setState(() {
        _timerText = e.duration.toString().substring(0, 10);
      });
    });
    await _playingSession.setSubscriptionDuration(Duration(milliseconds: 10));
    _playerSubscription = _playingSession.onProgress!.listen((e) {
      setState(() {
        _timerText = e.position.toString().substring(0, 10);
      });
    });
    await Permission.microphone.request();
    await Permission.storage.request();
    await Permission.manageExternalStorage.request();
  }

Here's what the UI portion looks like. On the iOS emulator, when I press the recording button, the timer starts incrementing, and after I press it again, I can press the play button to listen what I just recorded. On the device, when I press the button, the timer doesn't increment but stays at zero, and when I try to play the audio, I get the following error:

[VERBOSE-2:ui_dart_state.cc(209)] Unhandled Exception: PlatformException(Audio Player, startPlayer failure, null, null)
#0      StandardMethodCodec.decodeEnvelope (package:flutter/src/services/message_codecs.dart:607:7)
#1      MethodChannel._invokeMethod (package:flutter/src/services/platform_channel.dart:177:18)
<asynchronous suspension>
#2      MethodChannelFlutterSoundPlayer.invokeMethod (package:flutter_sound_platform_interface/method_channel_flutter_sound_player.dart:157:12)
<asynchronous suspension>
#3      FlutterSoundPlayer._startPlayer (package:flutter_sound/public/flutter_sound_player.dart:819:19)
<asynchronous suspension>
#4      FlutterSoundPlayer.startPlayer.<anonymous closure> (package:flutter_sound/public/flutter_sound_player.dart:759:11)
<asynchronous suspension>
#5      BasicLock.synchronized (package:synchronized/src/basic_lock.dart:33:16)
<asynchronous suspension>
#6      FlutterSoundPlayer.startPlayer (package:flutter_sound/public/flutter_sound_player.dart:758:5)
<asynchronous suspension>
#7      _RecordingQuestionWidgetS.startPlaying (package:flutter_app2/interactions/widgets/questionWidgets/RecordingQuestionWidget.dart:216:7)
<asynchronous suspension>
#8      _RecordingQuestionWidgetS.playControl (package:flutter_app2/interactions/widgets/questionWidgets/RecordingQuestionWidget.dart:200:7)
<asynchronous suspension>

I asked the flutter-sound author on github already, but it seems he doesn't really know what's wrong either, and the fact that it works on the simulator but not on the device makes me think the problem might be larger than just some faulty code.

A different part of the same app already saves and shows saved images from the same directory so I don't think it's a permission issue.


Solution 1: KiloBitter

Apparently, on iOS, this doesn't work unless you create an AudioSession instance (from this package). The new initializer function looks like this:

void _initializer() async {
 if (Platform.isIOS) {
   var directory = await getApplicationDocumentsDirectory();
   pathToAudio = directory.path + '/';
 } else {
   pathToAudio = '/sdcard/Download/appname/';
 }
 _recordingSession = new FlutterSoundRecorder();
 _playingSession = new FlutterSoundPlayer();
 await _recordingSession.openRecorder();
 await _playingSession.openPlayer();
 final session = await AudioSession.instance;
 await session.configure(AudioSessionConfiguration(
   avAudioSessionCategory: AVAudioSessionCategory.playAndRecord,
   avAudioSessionCategoryOptions:
       AVAudioSessionCategoryOptions.allowBluetooth |
           AVAudioSessionCategoryOptions.defaultToSpeaker,
   avAudioSessionMode: AVAudioSessionMode.spokenAudio,
   avAudioSessionRouteSharingPolicy:
       AVAudioSessionRouteSharingPolicy.defaultPolicy,
   avAudioSessionSetActiveOptions: AVAudioSessionSetActiveOptions.none,
   androidAudioAttributes: const AndroidAudioAttributes(
     contentType: AndroidAudioContentType.speech,
     flags: AndroidAudioFlags.none,
     usage: AndroidAudioUsage.voiceCommunication,
   ),
   androidAudioFocusGainType: AndroidAudioFocusGainType.gain,
   androidWillPauseWhenDucked: true,
 ));
 await _recordingSession
     .setSubscriptionDuration(Duration(milliseconds: 10))
     .then((value) => null);
 _recorderSubscription = _recordingSession.onProgress!.listen((e) {
   setState(() {
     _timerText = e.duration.toString().substring(0, 10);
   });
 });
 await _playingSession.setSubscriptionDuration(Duration(milliseconds: 
10));
 _playerSubscription = _playingSession.onProgress!.listen((e) {
   setState(() {
     _timerText = e.position.toString().substring(0, 10);
   });
 });
 await Permission.microphone.request();
 await Permission.storage.request();
 await Permission.manageExternalStorage.request();
}