I’m working on a flutter music app with just-audio package and it’s working fine on Android, but when I play audios (partial content 206 especially) on Ios, It’s not working probably. I can’t play songs or get the right duration on Ios.
This is the song Handler file code:
import 'dart:io';
import 'package:audio_service/audio_service.dart';
import 'package:audio_session/audio_session.dart';
import 'package:just_audio/just_audio.dart';
import 'package:http/http.dart' as http;
import '../app_export.dart';
class SongHandler extends BaseAudioHandler with QueueHandler, SeekHandler {
final AudioPlayer audioPlayer = AudioPlayer();
final playlist =
ConcatenatingAudioSource(children: [], useLazyPreparation: true);
SongHandler() {
initializeAudioSession();
}
//////////////////////////////////////////////////////////////////////////////
// Audio Session Settings
Future<void> initializeAudioSession() async {
final session = await AudioSession.instance;
try {
await session.configure(const AudioSessionConfiguration.music());
session.interruptionEventStream.listen((event) async {
if (event.begin) {
switch (event.type) {
case AudioInterruptionType.duck:
await audioPlayer.setVolume(0.5);
break;
case AudioInterruptionType.pause:
case AudioInterruptionType.unknown:
await audioPlayer.pause();
break;
}
} else {
switch (event.type) {
case AudioInterruptionType.duck:
await audioPlayer.setVolume(1);
break;
case AudioInterruptionType.pause:
await audioPlayer.play();
break;
case AudioInterruptionType.unknown:
break;
}
}
});
} catch (e, stackTrace) {
debugPrint('Error initializing audio session: $stackTrace');
}
}
//////////////////////////////////////////////////////////////////////////////
// Initial Audio Sources
UriAudioSource createAudioSource(MediaItem mediaItem) {
return AudioSource.uri(Uri.parse(mediaItem.extras!['url'] as String),
tag: mediaItem);
}
List<UriAudioSource> createAudioSources(List<MediaItem> mediaItems) {
return mediaItems
.map(
(item) => AudioSource.uri(Uri.parse(item.extras!['url'] as String),
tag: item,
headers: {}),
)
.toList();
}
//etc...
> Future<void> setNewPlaylist(List<MediaItem> mediaItems, int index) async {
if (!Platform.isAndroid && audioPlayer.playing) {
await audioPlayer.stop();
}
var getCount = queue.value.length;
await playlist.removeRange(0, getCount);
final audioSource = createAudioSources(mediaItems);
await playlist.addAll(audioSource);
final newQueue = queue.value..addAll(mediaItems);
queue.add(newQueue);
try {
audioPlayer.setAudioSource(playlist,
initialIndex: index, initialPosition: Duration.zero);
} catch (e) {
print("Error loading audio source: $e");
}
}
I tried to call setNewPlaylist when I click on a song in the list and this was the error:
[ERROR:flutter/runtime/dart_vm_initializer.cc(41)] Unhandled Exception: (-1) unknown error
I build my SongBloc and listeners to update the duration and mediaItem Object:
class SongBloc extends Bloc<SongEvent, SongState> {
final NetworkInfo _networkInfo;
final SongHandler songHandler;
SongBloc(this._networkInfo, this.songHandler)
: super(const SongState().copyWith(
songsStatus: SongsStatus.initial,
isPlaying: [],
mediaItems: [],
selectedSongId: "")) {
// Set up listeners
_initializeListeners();
on<SelectSong>(onSelectSong);
on<PlaySong>(onPlaySong);
on<PauseSong>(onPauseSong);
on<SkipToNextSong>(onSkipToNextSong);
on<SkipToPreviousSong>(onSkipToPreviousSong);
}
//create listen functions
void _initializeListeners() {
songHandler.audioPlayer.durationStream.listen((duration) {
_handleDurationChange(duration);
});
songHandler.audioPlayer.currentIndexStream.listen((index) {
_handleCurrentSongIndexChange(index);
});
songHandler.audioPlayer.sequenceStateStream.listen((sequenceState) {
_handleSequenceStateChange(sequenceState);
});
songHandler.audioPlayer.playbackEventStream.listen((event) {
_handlePlaybackEvent(event);
});
}
Future<void> _handleDurationChange(Duration? duration) async {
var index = songHandler.audioPlayer.currentIndex;
final newQueue = state.mediaItems;
if (index == null || newQueue!.isEmpty || newQueue.length < index) {
return;
}
if (songHandler.audioPlayer.shuffleModeEnabled) {
index = songHandler.audioPlayer.shuffleIndices!.indexOf(index);
}
final oldMediaItem = newQueue[index];
//duration = await songHandler.fetchAudioDuration(oldMediaItem.extras!['url'] as String);
final newMediaItem = oldMediaItem.copyWith(duration: duration);
newQueue[index] = newMediaItem;
emit(state.copyWith(songDuration: duration, mediaItem: newMediaItem));
}
Future<void> _handleCurrentSongIndexChange(int? index) async {
final pPlaylist = state.mediaItems;
if (index == null || pPlaylist!.isEmpty) return;
if (songHandler.audioPlayer.shuffleModeEnabled) {
index = songHandler.audioPlayer.shuffleIndices!.indexOf(index);
}
emit(state.copyWith(
selectedIndexSong: index,
selectedSongId: pPlaylist[index].id,
mediaItem: pPlaylist[index]));
final PaletteGenerator paletteGenerator =
await PaletteGenerator.fromImageProvider(
NetworkImage(
"${Config.imageBaseUrl}${state.songsListEntity![index].photoUrl}"),
size: const Size(200, 200),
maximumColorCount: 20,
);
final Color dominantColor =
paletteGenerator.darkMutedColor?.color ?? AppColor.primaryColor;
emit(state.copyWith(
backgroundColor: dominantColor,
));
}
void _handleSequenceStateChange(SequenceState? sequenceState) {
final sequence = sequenceState?.effectiveSequence;
if (sequence == null || sequence.isEmpty) return;
final items = sequence.map((source) => source.tag as MediaItem).toList();
emit(state.copyWith(mediaItems: items));
}
void _handlePlaybackEvent(PlaybackEvent event) {
final playing = songHandler.audioPlayer.playing;
songHandler.playbackState.add(
songHandler.playbackState.value.copyWith(
controls: [
MediaControl.skipToPrevious,
if (playing) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.skipToNext,
],
systemActions: const {
MediaAction.seek,
MediaAction.seekForward,
MediaAction.seekBackward,
},
androidCompactActionIndices: const [0, 1, 3],
processingState: const {
ProcessingState.idle: AudioProcessingState.idle,
ProcessingState.loading: AudioProcessingState.loading,
ProcessingState.buffering: AudioProcessingState.buffering,
ProcessingState.ready: AudioProcessingState.ready,
ProcessingState.completed: AudioProcessingState.completed,
}[songHandler.audioPlayer.processingState]!,
repeatMode: const {
LoopMode.off: AudioServiceRepeatMode.none,
LoopMode.one: AudioServiceRepeatMode.one,
LoopMode.all: AudioServiceRepeatMode.all,
}[songHandler.audioPlayer.loopMode]!,
shuffleMode: songHandler.audioPlayer.shuffleModeEnabled
? AudioServiceShuffleMode.all
: AudioServiceShuffleMode.none,
playing: playing,
updatePosition: songHandler.audioPlayer.position,
bufferedPosition: songHandler.audioPlayer.bufferedPosition,
speed: songHandler.audioPlayer.speed,
queueIndex: event.currentIndex ?? 0,
),
);
List<bool> isPlaying = [];
isPlaying.addAll(state.isPlaying!);
if (state.selectedIndexSong != null && isPlaying.isNotEmpty) {
if (playing) {
isPlaying[state.selectedIndexSong!] = true;
} else {
isPlaying[state.selectedIndexSong!] = false;
}
emit(state.copyWith(
isPlaying: isPlaying,
));
}
}
Future<void> onSelectSong(SelectSong event, Emitter<SongState> emit) async {
try {
final mediaItems =
event.songs.map(MediaConverter.mediaToMediaItem).toList();
final MediaItem mediaItem = mediaItems[event.index];
emit(state.copyWith(
selectedSongId: mediaItem.id,
selectedIndexSong: event.index,
));
if (MediaConverter.areMediaItemsEqual(state.mediaItems!, mediaItems)) {
await songHandler.skipToQueueItem(event.index);
} else {
await songHandler.setNewPlaylist(mediaItems, event.index);
List<bool> isPlaying = [];
isPlaying = List.filled(event.songs.length, false);
emit(state.copyWith(
isPlaying: isPlaying,
));
}
final PaletteGenerator paletteGenerator =
await PaletteGenerator.fromImageProvider(
NetworkImage(
"${Config.imageBaseUrl}${event.songs[event.index].photoUrl}"),
size: const Size(200, 200),
maximumColorCount: 20,
);
final Color dominantColor =
paletteGenerator.darkMutedColor?.color ?? Colors.black;
emit(state.copyWith(
backgroundColor: dominantColor,
));
songHandler.play();
emit(state.copyWith(
songsListEntity: event.songs,
currentSong: event.songs[event.index],
mediaItem: mediaItem,
mediaItems: mediaItems,
songsStatus: SongsStatus.selectSong,
));
} catch (e) {
emit(state.copyWith(
songsStatus: SongsStatus.error,
error: const PlaybackFailure('Failed to select song', ""),
));
}
}
}
I think the problem in response headers from the server cause the player of ios can’t read chunks of audio probably, it returns like this:
Accept-Ranges: bytes
Access-Control-Allow-Headers: Origin, X-Requested-With, Content-Type, Accept
Access-Control-Allow-Origin: *
Connection: keep-alive
Content-Length: 1000002
Content-Range: bytes 965921-1965921/11279288
Content-Type: audio/mpeg3
Date: Tue, 24 Sep 2024 08:05:29 GMT
Server: nginx/1.22.0
X-Powered-By: Express
I tried to add header “Range” in get request of the audio like this:
List<UriAudioSource> createAudioSources(List<MediaItem> mediaItems) {
return mediaItems
.map(
(item) => AudioSource.uri(Uri.parse(item.extras!['url'] as String),
tag: item,
headers: {
"Range": "",
}),
)
.toList();
}
and the only way it works when I added it with an empty value, the audio was working on Ios but with duration zero and wrong listeners.
I can’t seek or streaming in the audio in song progress.
import 'package:audio_service/audio_service.dart';
import 'package:audio_video_progress_bar/audio_video_progress_bar.dart';
import '../../../../core/app_export.dart';
import '../bloc/song_bloc.dart';
class SongProgress extends StatelessWidget {
final Duration? totalDuration;
final Color? color;
const SongProgress({
super.key,
this.totalDuration,
this.color,
});
@override
Widget build(BuildContext context) {
return StreamBuilder<Duration>(
stream: AudioService.position,
builder: (context, positionSnapshot) {
Duration? position = positionSnapshot.data;
return ProgressBar(
progress: position ?? Duration.zero,
buffered: const Duration(seconds: 1),
total: totalDuration ?? Duration.zero,
onSeek: (position) {
BlocProvider.of<SongBloc>(context).songHandler.seek(position);
},
progressBarColor: color,
baseBarColor: Colors.white.withOpacity(0.24),
bufferedBarColor: Colors.white.withOpacity(0.24),
thumbColor: Colors.white,
barHeight: 3.h,
thumbRadius: 5.h,
timeLabelTextStyle: Styles.textStyle14.copyWith(fontWeight: FontWeight.bold),
);
},
);
}
}
what I do to configure everything in ios audio player?