I am trying to integrate audio_service package with video_player package to implement background audio in my app. On Android everything works fine but on iOS when app goes to background the video pauses for some reason (on emulator it just pauses and can be resumed, on physical device it pauses and then process is being instantly killed by the system). How can I prevent this pause from happening so that my audio keeps playing in the background as it does on Android.
I could not find any obvious errors in my code and also there is little to no examples of audio_service integration with video_player on the internet, so I tried to replicate this behavior by taking this audio_service with just_audio (example) that works on both Android and iOS, and modifying it so that it uses video_player package instead. It allowed me to create a minimal example with above-mentioned behavior. Please let me know if you see any errors or got any idea what might be the issue or how to integrate those two packages in a better way.
All you need to replicate this problem is this main.dart function:
// ignore_for_file: public_member_api_docs
// FOR MORE EXAMPLES, VISIT THE GITHUB REPOSITORY AT:
//
// https://github.com/ryanheise/audio_service
//
// This example implements a minimal audio handler that renders the current
// media item and playback state to the system notification and responds to 4
// media actions:
//
// - play
// - pause
// - seek
// - stop
//
// To run this example, use:
//
// flutter run
import 'dart:async';
import 'dart:math';
import 'package:audio_service/audio_service.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:rxdart/rxdart.dart';
import 'package:video_player/video_player.dart';
// You might want to provide this using dependency injection rather than a
// global variable.
late AudioHandler _audioHandler;
Future<void> main() async {
_audioHandler = await AudioService.init(
builder: () => AudioPlayerHandler(),
config: const AudioServiceConfig(
androidNotificationChannelId: 'com.ryanheise.myapp.channel.audio',
androidNotificationChannelName: 'Audio playback',
androidNotificationOngoing: true,
),
);
runApp(const MyApp());
}
class MyApp extends StatelessWidget {
const MyApp({super.key});
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Audio Service Demo',
theme: ThemeData(primarySwatch: Colors.blue),
home: const MainScreen(),
);
}
}
class SeekBar extends StatefulWidget {
final Duration duration;
final Duration position;
final Duration bufferedPosition;
final ValueChanged<Duration>? onChanged;
final ValueChanged<Duration>? onChangeEnd;
const SeekBar({
super.key,
required this.duration,
required this.position,
this.bufferedPosition = Duration.zero,
this.onChanged,
this.onChangeEnd,
});
@override
SeekBarState createState() => SeekBarState();
}
class SeekBarState extends State<SeekBar> {
double? _dragValue;
bool _dragging = false;
late SliderThemeData _sliderThemeData;
@override
void didChangeDependencies() {
super.didChangeDependencies();
_sliderThemeData = SliderTheme.of(context).copyWith(
trackHeight: 2.0,
);
}
@override
Widget build(BuildContext context) {
final value = min(
_dragValue ?? widget.position.inMilliseconds.toDouble(),
widget.duration.inMilliseconds.toDouble(),
);
if (_dragValue != null && !_dragging) {
_dragValue = null;
}
return Stack(
children: [
SliderTheme(
data: _sliderThemeData.copyWith(
thumbShape: HiddenThumbComponentShape(),
activeTrackColor: Colors.blue.shade100,
inactiveTrackColor: Colors.grey.shade300,
),
child: ExcludeSemantics(
child: Slider(
min: 0.0,
max: widget.duration.inMilliseconds.toDouble(),
value: min(widget.bufferedPosition.inMilliseconds.toDouble(), widget.duration.inMilliseconds.toDouble()),
onChanged: (value) {},
),
),
),
SliderTheme(
data: _sliderThemeData.copyWith(
inactiveTrackColor: Colors.transparent,
),
child: Slider(
min: 0.0,
max: widget.duration.inMilliseconds.toDouble(),
value: value,
onChanged: (value) {
if (!_dragging) {
_dragging = true;
}
setState(() {
_dragValue = value;
});
if (widget.onChanged != null) {
widget.onChanged!(Duration(milliseconds: value.round()));
}
},
onChangeEnd: (value) {
if (widget.onChangeEnd != null) {
widget.onChangeEnd!(Duration(milliseconds: value.round()));
}
_dragging = false;
},
),
),
Positioned(
right: 16.0,
bottom: 0.0,
child: Text(
RegExp(r'((^0*[1-9]d*:)?d{2}:d{2}).d+$').firstMatch("$_remaining")?.group(1) ?? '$_remaining',
style: Theme.of(context).textTheme.bodySmall),
),
],
);
}
Duration get _remaining => widget.duration - widget.position;
}
class HiddenThumbComponentShape extends SliderComponentShape {
@override
Size getPreferredSize(bool isEnabled, bool isDiscrete) => Size.zero;
@override
void paint(
PaintingContext context,
Offset center, {
required Animation<double> activationAnimation,
required Animation<double> enableAnimation,
required bool isDiscrete,
required TextPainter labelPainter,
required RenderBox parentBox,
required SliderThemeData sliderTheme,
required TextDirection textDirection,
required double value,
required double textScaleFactor,
required Size sizeWithOverflow,
}) {}
}
class MainScreen extends StatelessWidget {
const MainScreen({super.key});
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text('Audio Service Demo'),
),
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
// Show media item title
StreamBuilder<MediaItem?>(
stream: _audioHandler.mediaItem,
builder: (context, snapshot) {
final mediaItem = snapshot.data;
return Text(mediaItem?.title ?? '');
},
),
// Play/pause/stop buttons.
StreamBuilder<bool>(
stream: _audioHandler.playbackState.map((state) => state.playing).distinct(),
builder: (context, snapshot) {
final playing = snapshot.data ?? false;
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
_button(Icons.fast_rewind, _audioHandler.rewind),
if (playing)
_button(Icons.pause, _audioHandler.pause)
else
_button(Icons.play_arrow, _audioHandler.play),
_button(Icons.stop, _audioHandler.stop),
_button(Icons.fast_forward, _audioHandler.fastForward),
],
);
},
),
// A seek bar.
StreamBuilder<MediaState>(
stream: _mediaStateStream,
builder: (context, snapshot) {
final mediaState = snapshot.data;
return SeekBar(
duration: mediaState?.mediaItem?.duration ?? Duration.zero,
position: mediaState?.position ?? Duration.zero,
onChangeEnd: (newPosition) {
_audioHandler.seek(newPosition);
},
);
},
),
// Display the processing state.
StreamBuilder<AudioProcessingState>(
stream: _audioHandler.playbackState.map((state) => state.processingState).distinct(),
builder: (context, snapshot) {
final processingState = snapshot.data ?? AudioProcessingState.idle;
return Text(
// ignore: deprecated_member_use
"Processing state: ${describeEnum(processingState)}");
},
),
],
),
),
);
}
/// A stream reporting the combined state of the current media item and its
/// current position.
Stream<MediaState> get _mediaStateStream => Rx.combineLatest2<MediaItem?, Duration, MediaState>(
_audioHandler.mediaItem, AudioService.position, (mediaItem, position) => MediaState(mediaItem, position));
IconButton _button(IconData iconData, VoidCallback onPressed) => IconButton(
icon: Icon(iconData),
iconSize: 64.0,
onPressed: onPressed,
);
}
class MediaState {
final MediaItem? mediaItem;
final Duration position;
MediaState(this.mediaItem, this.position);
}
/// An [AudioHandler] for playing a single item.
class AudioPlayerHandler extends BaseAudioHandler with SeekHandler {
static final _item = MediaItem(
// id: 'https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3',
id: 'https://flutter.github.io/assets-for-api-docs/assets/videos/butterfly.mp4',
album: "Science Friday",
title: "A Salute To Head-Scratching Science",
artist: "Science Friday and WNYC Studios",
duration: const Duration(milliseconds: 7000),
// duration: const Duration(milliseconds: 5739820),
artUri: Uri.parse('https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg'),
);
// final _player = AudioPlayer();
final _controller = VideoPlayerController.networkUrl(
Uri.parse('https://flutter.github.io/assets-for-api-docs/assets/videos/butterfly.mp4'),
videoPlayerOptions: VideoPlayerOptions(allowBackgroundPlayback: true, mixWithOthers: true));
/// Initialise our audio handler.
AudioPlayerHandler() {
// So that our clients (the Flutter UI and the system notification) know
// what state to display, here we set up our audio handler to broadcast all
// playback state changes as they happen via playbackState...
// _player.playbackEventStream.map(_transformEvent).pipe(playbackState);
_controller.addListener(_updatePlaybackState);
// ... and also the current media item via mediaItem.
mediaItem.add(_item);
// Load the player.
// _player.setAudioSource(AudioSource.uri(Uri.parse(_item.id)));
_controller.initialize();
}
// In this simple example, we handle only 4 actions: play, pause, seek and
// stop. Any button press from the Flutter UI, notification, lock screen or
// headset will be routed through to these 4 methods so that you can handle
// your audio playback logic in one place.
@override
Future<void> play() => _controller.play();
// Future<void> play() => _player.play();
@override
Future<void> pause() => _controller.pause();
// Future<void> pause() => _player.pause();
@override
Future<void> seek(Duration position) => _controller.seekTo(position);
// Future<void> seek(Duration position) => _player.seek(position);
@override
Future<void> stop() async {
await _controller.pause();
await _controller.seekTo(Duration.zero);
}
// @override
// Future<void> stop() => _player.stop();
AudioProcessingState resolveAudioProcessingState(VideoPlayerValue value) {
if (value.isBuffering) {
return AudioProcessingState.buffering;
}
if (value.isCompleted) {
return AudioProcessingState.completed;
}
return AudioProcessingState.ready;
}
void _updatePlaybackState() {
final isPlaying = _controller.value.isPlaying;
final bufferedRanges = _controller.value.buffered;
final bufferedPosition = bufferedRanges.isNotEmpty ? bufferedRanges.last.end : Duration.zero;
playbackState.add(PlaybackState(
processingState: resolveAudioProcessingState(_controller.value),
playing: isPlaying,
updatePosition: _controller.value.position,
bufferedPosition: bufferedPosition,
speed: _controller.value.playbackSpeed,
controls: [
MediaControl.rewind,
if (isPlaying) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.fastForward,
],
systemActions: const {
MediaAction.seek,
MediaAction.seekForward,
MediaAction.seekBackward,
},
updateTime: DateTime.now(),
));
}
/// Transform a just_audio event into an audio_service state.
///
/// This method is used from the constructor. Every event received from the
/// just_audio player will be transformed into an audio_service state so that
/// it can be broadcast to audio_service clients.
// PlaybackState _transformEvent(PlaybackEvent event) {
// return PlaybackState(
// controls: [
// MediaControl.rewind,
// if (_player.playing) MediaControl.pause else MediaControl.play,
// MediaControl.stop,
// MediaControl.fastForward,
// ],
// systemActions: const {
// MediaAction.seek,
// MediaAction.seekForward,
// MediaAction.seekBackward,
// },
// androidCompactActionIndices: const [0, 1, 3],
// processingState: const {
// ProcessingState.idle: AudioProcessingState.idle,
// ProcessingState.loading: AudioProcessingState.loading,
// ProcessingState.buffering: AudioProcessingState.buffering,
// ProcessingState.ready: AudioProcessingState.ready,
// ProcessingState.completed: AudioProcessingState.completed,
// }[_player.processingState]!,
// playing: _player.playing,
// updatePosition: _player.position,
// bufferedPosition: _player.bufferedPosition,
// speed: _player.speed,
// queueIndex: event.currentIndex,
// );
// }
}
Let me know if you need any additional information.
Qbencjusz is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.