refactor: Upgrade to latest flutter_sound_lite

This commit is contained in:
Christian Pauly 2021-01-23 11:17:34 +01:00
parent b6dca5b7a7
commit 2f7dece4c7
5 changed files with 117 additions and 62 deletions

View File

@ -6,8 +6,7 @@ import 'package:fluffychat/components/message_download_content.dart';
import 'package:flushbar/flushbar_helper.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:intl/intl.dart';
import 'package:flutter_sound_lite/flutter_sound.dart';
import 'package:universal_html/prefer_universal/html.dart' as html;
import '../utils/ui_fake.dart' if (dart.library.html) 'dart:ui' as ui;
import 'matrix.dart';
@ -31,7 +30,7 @@ enum AudioPlayerStatus { NOT_DOWNLOADED, DOWNLOADING, DOWNLOADED }
class _AudioPlayerState extends State<AudioPlayer> {
AudioPlayerStatus status = AudioPlayerStatus.NOT_DOWNLOADED;
FlutterSound flutterSound = FlutterSound();
final FlutterSoundPlayer flutterSound = FlutterSoundPlayer();
StreamSubscription soundSubscription;
Uint8List audioFile;
@ -58,9 +57,12 @@ class _AudioPlayerState extends State<AudioPlayer> {
@override
void dispose() {
if (flutterSound.audioState == t_AUDIO_STATE.IS_PLAYING) {
if (flutterSound.isPlaying) {
flutterSound.stopPlayer();
}
if (flutterSound.isOpen()) {
flutterSound.closeAudioSession();
}
soundSubscription?.cancel();
super.dispose();
}
@ -86,28 +88,31 @@ class _AudioPlayerState extends State<AudioPlayer> {
void _playAction() async {
if (AudioPlayer.currentId != widget.event.eventId) {
if (AudioPlayer.currentId != null) {
if (flutterSound.audioState != t_AUDIO_STATE.IS_STOPPED) {
if (!flutterSound.isStopped) {
await flutterSound.stopPlayer();
setState(() => null);
}
}
AudioPlayer.currentId = widget.event.eventId;
}
switch (flutterSound.audioState) {
case t_AUDIO_STATE.IS_PLAYING:
switch (flutterSound.playerState) {
case PlayerState.isPlaying:
await flutterSound.pausePlayer();
break;
case t_AUDIO_STATE.IS_PAUSED:
case PlayerState.isPaused:
await flutterSound.resumePlayer();
break;
case t_AUDIO_STATE.IS_RECORDING:
break;
case t_AUDIO_STATE.IS_STOPPED:
await flutterSound.startPlayerFromBuffer(
audioFile,
codec: t_CODEC.CODEC_AAC,
);
soundSubscription ??= flutterSound.onPlayerStateChanged.listen((e) {
case PlayerState.isStopped:
default:
if (!flutterSound.isOpen()) {
await flutterSound.openAudioSession(
focus: AudioFocus.requestFocusAndStopOthers,
category: SessionCategory.playback);
}
await flutterSound.setSubscriptionDuration(Duration(milliseconds: 100));
await flutterSound.startPlayer(fromDataBuffer: audioFile);
soundSubscription ??= flutterSound.onProgress.listen((e) {
if (AudioPlayer.currentId != widget.event.eventId) {
soundSubscription?.cancel()?.then((f) => soundSubscription = null);
setState(() {
@ -116,19 +121,13 @@ class _AudioPlayerState extends State<AudioPlayer> {
});
AudioPlayer.currentId = null;
} else if (e != null) {
var date =
DateTime.fromMillisecondsSinceEpoch(e.currentPosition.toInt());
var txt = DateFormat('mm:ss', 'en_US').format(date);
var txt =
'${e.position.inMinutes.toString().padLeft(2, '0')}:${(e.position.inSeconds % 60).toString().padLeft(2, '0')}';
setState(() {
maxPosition = e.duration;
currentPosition = e.currentPosition;
maxPosition = e.duration.inMilliseconds.toDouble();
currentPosition = e.position.inMilliseconds.toDouble();
statusText = txt;
});
if (e.duration == e.currentPosition) {
soundSubscription
?.cancel()
?.then((f) => soundSubscription = null);
}
}
});
break;
@ -158,9 +157,7 @@ class _AudioPlayerState extends State<AudioPlayer> {
? CircularProgressIndicator(strokeWidth: 2)
: IconButton(
icon: Icon(
flutterSound.audioState == t_AUDIO_STATE.IS_PLAYING
? Icons.pause
: Icons.play_arrow,
flutterSound.isPlaying ? Icons.pause : Icons.play_arrow,
color: widget.color,
),
onPressed: () {
@ -175,8 +172,8 @@ class _AudioPlayerState extends State<AudioPlayer> {
Expanded(
child: Slider(
value: currentPosition,
onChanged: (double position) =>
flutterSound.seekToPlayer(position.toInt()),
onChanged: (double position) => flutterSound
.seekToPlayer(Duration(milliseconds: position.toInt())),
max: status == AudioPlayerStatus.DOWNLOADED ? maxPosition : 0,
min: 0,
),

View File

@ -1,16 +1,16 @@
import 'dart:async';
import 'dart:io';
import 'dart:math';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:intl/intl.dart';
import 'package:flutter_sound_lite/flutter_sound.dart';
import 'package:path_provider/path_provider.dart';
class RecordingDialog extends StatefulWidget {
final Function onFinished;
final L10n l10n;
const RecordingDialog({
this.onFinished,
@required this.l10n,
Key key,
}) : super(key: key);
@ -20,22 +20,38 @@ class RecordingDialog extends StatefulWidget {
}
class _RecordingDialogState extends State<RecordingDialog> {
FlutterSound flutterSound = FlutterSound();
final FlutterSoundRecorder flutterSound = FlutterSoundRecorder();
String time = '00:00:00';
StreamSubscription _recorderSubscription;
bool error = false;
String _recordedPath;
double _decibels = 0;
void startRecording() async {
try {
await flutterSound.startRecorder(
codec: t_CODEC.CODEC_AAC,
);
_recorderSubscription = flutterSound.onRecorderStateChanged.listen((e) {
var date =
DateTime.fromMillisecondsSinceEpoch(e.currentPosition.toInt());
setState(() => time = DateFormat('mm:ss:SS', 'en_US').format(date));
await flutterSound.openAudioSession();
await flutterSound.setSubscriptionDuration(Duration(milliseconds: 100));
final codec = Codec.aacADTS;
final tempDir = await getTemporaryDirectory();
_recordedPath = '${tempDir.path}/recording${ext[codec.index]}';
// delete any existing file
var outputFile = File(_recordedPath);
if (outputFile.existsSync()) {
await outputFile.delete();
}
await flutterSound.startRecorder(codec: codec, toFile: _recordedPath);
_recorderSubscription = flutterSound.onProgress.listen((e) {
setState(() {
_decibels = e.decibels;
time =
'${e.duration.inMinutes.toString().padLeft(2, '0')}:${(e.duration.inSeconds % 60).toString().padLeft(2, '0')}';
});
});
} catch (e) {
error = true;
@ -52,6 +68,7 @@ class _RecordingDialogState extends State<RecordingDialog> {
void dispose() {
if (flutterSound.isRecording) flutterSound.stopRecorder();
_recorderSubscription?.cancel();
flutterSound.closeAudioSession();
super.dispose();
}
@ -62,12 +79,24 @@ class _RecordingDialogState extends State<RecordingDialog> {
Navigator.of(context).pop();
});
}
const maxDecibalWidth = 64.0;
final decibalWidth = min(_decibels / 2, maxDecibalWidth).toDouble();
return AlertDialog(
content: Row(
children: <Widget>[
CircleAvatar(
backgroundColor: Colors.red,
radius: 8,
Container(
width: maxDecibalWidth,
height: maxDecibalWidth,
alignment: Alignment.center,
child: AnimatedContainer(
duration: Duration(milliseconds: 50),
width: decibalWidth,
height: decibalWidth,
decoration: BoxDecoration(
color: Colors.red,
borderRadius: BorderRadius.circular(decibalWidth),
),
),
),
SizedBox(width: 8),
Expanded(
@ -100,11 +129,8 @@ class _RecordingDialogState extends State<RecordingDialog> {
),
onPressed: () async {
await _recorderSubscription?.cancel();
final result = await flutterSound.stopRecorder();
if (widget.onFinished != null) {
widget.onFinished(result);
}
Navigator.of(context).pop();
await flutterSound.stopRecorder();
Navigator.of(context).pop<String>(_recordedPath);
},
),
],

View File

@ -31,6 +31,7 @@ import 'package:flutter/services.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:image_picker/image_picker.dart';
import 'package:pedantic/pedantic.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:scroll_to_index/scroll_to_index.dart';
import 'package:swipe_to_action/swipe_to_action.dart';
import 'package:shared_preferences/shared_preferences.dart';
@ -260,13 +261,16 @@ class _ChatState extends State<Chat> {
}
void voiceMessageAction(BuildContext context) async {
String result;
await showDialog(
if (await Permission.microphone.isGranted != true) {
final status = await Permission.microphone.request();
if (status != PermissionStatus.granted) return;
}
final result = await showDialog<String>(
context: context,
builder: (c) => RecordingDialog(
onFinished: (r) => result = r,
l10n: L10n.of(context),
));
),
);
if (result == null) return;
final audioFile = File(result);
// as we already explicitly say send in the recording dialog,

View File

@ -426,13 +426,34 @@ packages:
url: "https://pub.dartlang.org"
source: hosted
version: "0.5.7"
flutter_sound:
flutter_sound_lite:
dependency: "direct main"
description:
name: flutter_sound
name: flutter_sound_lite
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.1"
version: "7.5.3+1"
flutter_sound_platform_interface:
dependency: transitive
description:
name: flutter_sound_platform_interface
url: "https://pub.dartlang.org"
source: hosted
version: "7.5.3+1"
flutter_sound_web:
dependency: transitive
description:
name: flutter_sound_web
url: "https://pub.dartlang.org"
source: hosted
version: "7.5.3+1"
flutter_spinkit:
dependency: transitive
description:
name: flutter_spinkit
url: "https://pub.dartlang.org"
source: hosted
version: "4.1.2+1"
flutter_svg:
dependency: "direct main"
description:
@ -879,6 +900,13 @@ packages:
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.0"
recase:
dependency: transitive
description:
name: recase
url: "https://pub.dartlang.org"
source: hosted
version: "3.0.1"
receive_sharing_intent:
dependency: "direct main"
description:

View File

@ -37,7 +37,7 @@ dependencies:
universal_html: ^1.2.4
receive_sharing_intent: ^1.4.2
flutter_slidable: ^0.5.7
flutter_sound: 2.1.1
flutter_sound_lite: ^7.5.3+1
open_file: ^3.0.3
mime_type: ^0.3.2
flushbar: ^1.10.4