When generating the notification for audio persistence in the background, it appears empty, without controls or other components, just an icon. Perhaps with the same origin, another problem is that the notification keeps playing the audio even after the app has completely finished, and cannot be deleted like other notifications.
void main() async{
WidgetsFlutterBinding.ensureInitialized();
//iniciaStream();
await Firebase.initializeApp(
options: DefaultFirebaseOptions.currentPlatform,
);
audioHandler = await AudioService.init(
builder: () => AudioPlayerTask(),
config: const AudioServiceConfig(
androidNotificationChannelId: 'meuapp.channel.audio',
androidNotificationChannelName: 'meu app',
androidNotificationOngoing: false,
androidStopForegroundOnPause: false,
notificationColor: Color.fromARGB(10,255, 209, 0),
),
);
SystemChrome.setPreferredOrientations([DeviceOrientation.portraitUp]);
runApp(
const MaterialApp(
home: Principal(),)
);
}
class AudioPlayerTask extends BaseAudioHandler with SeekHandler {
static final _item = MediaItem(
id: urlList[currentSetStream],
album: "Science Friday",
title: "titulo",
artist: "artista",
displayTitle: "Nome da rádio",//radiosList[currentSetStream],
duration: const Duration(milliseconds: 5739820),
//artUri: Image.asset(imgList[currentSetStream]),
);
final AudioPlayer _player = AudioPlayer();
AudioPlayerHandler() {
_player.playbackEventStream.map(_transformEvent).pipe(playbackState);
mediaItem.add(_item);
_player.setAudioSource(AudioSource.uri(Uri.parse(_item.id)));
}
@override
Future<void> play() async{
await _player.setUrl(urlList[currentSetStream]);
await _player.play();
playbackState.add(
playbackState.value.copyWith(
playing: true,
processingState: AudioProcessingState.ready,
),
);
PlaybackState(
controls: [MediaControl.pause, MediaControl.stop],
processingState: AudioProcessingState.ready,
playing: true,
bufferedPosition: _player.bufferedPosition,
speed: _player.speed,
);
}
@override
Future<void> setVolume(double volume) async {
await _player.setVolume(volume);
}
@override
Future<void> pause() async{
_player.pause();
}
@override
Future<void> seek(Duration position) => _player.seek(position);
@override
Future<void> stop() => _player.stop();
PlaybackState _transformEvent(PlaybackEvent event) {
return PlaybackState(
controls: [
MediaControl.rewind,
if (_player.playing) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.fastForward,
],
systemActions: const {
MediaAction.seek,
MediaAction.seekForward,
MediaAction.seekBackward,
},
androidCompactActionIndices: const [0, 1, 3],
processingState: const {
ProcessingState.idle: AudioProcessingState.idle,
ProcessingState.loading: AudioProcessingState.loading,
ProcessingState.buffering: AudioProcessingState.buffering,
ProcessingState.ready: AudioProcessingState.ready,
ProcessingState.completed: AudioProcessingState.completed,
}[_player.processingState]!,
playing: _player.playing,
updatePosition: _player.position,
bufferedPosition: _player.bufferedPosition,
queueIndex: event.currentIndex,
);
}
PlaybackState updatePlaybackState(PlaybackEvent event) {
return PlaybackState(
controls: [
MediaControl.rewind,
if (_player.playing) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.fastForward,
],
systemActions: const {
MediaAction.seek,
MediaAction.seekForward,
MediaAction.seekBackward,
},
androidCompactActionIndices: const [0, 1, 3],
processingState: const {
ProcessingState.idle: AudioProcessingState.idle,
ProcessingState.loading: AudioProcessingState.loading,
ProcessingState.buffering: AudioProcessingState.buffering,
ProcessingState.ready: AudioProcessingState.ready,
ProcessingState.completed: AudioProcessingState.completed,
}[_player.processingState]!,
playing: _player.playing,
updatePosition: _player.position,
bufferedPosition: _player.bufferedPosition,
queueIndex: event.currentIndex,
);
}
}
I checked and re-checked the code with manuals and didn't notice any differences.
You can implement this method in background service class
@override Future onTaskRemoved() => _player.stop();