urgent help for sound wave recorder to forward and backward functionality #71
Description
Working in flutter need solution for recording sound with wave form and forward it by 15 seconds ,
I tried many solutions but they were not helpful like using duration custom or custom.....
also used this package audio_waveforms but didnt worked if anyone could help please respond asap
import 'dart:io';
import 'package:audio_waveforms/audio_waveforms.dart';
import 'package:flutter/material.dart';
import 'package:path_provider/path_provider.dart';
import 'package:provider/provider.dart';
import 'package:smilestone/Audio_Screen/AudioRecordingProvider.dart';
import 'package:smilestone/Utils/sizeConfig.dart';
import '../Utils/utils.dart';
class AudioRecording extends StatefulWidget {
const AudioRecording({Key key}) : super(key: key);
@OverRide
State createState() => _AudioRecordingState();
}
class _AudioRecordingState extends State {
RecorderController recorderController;
PlayerController playerController;
bool _isPlayingAudio = false;
bool _isPlayIcon = true;
bool closeIcon = false;
bool yesDialog = false;
String audioFilePath;
TextEditingController saveController=TextEditingController();
bool listOfSound=false;
Duration currentPosition = Duration.zero;
Duration _position = Duration.zero;
Duration _duration = Duration.zero;
var filePath;
void toggleBtn() {
if (_isPlayingAudio == false) {
_isPlayingAudio = !_isPlayingAudio;
_startRecording();
}
if (_isPlayIcon == false) {
_isPlayIcon = !_isPlayIcon;
}
setState(() {});
}
void toggleIconBtn() {
_isPlayIcon = !_isPlayIcon;
_pauseAudio();
if(_isPlayIcon==true){
_pauseAudio();
}
print("object111111111");
setState(() {});
}
void _initializeController() {
recorderController = RecorderController()
..androidEncoder = AndroidEncoder.aac
..androidOutputFormat = AndroidOutputFormat.mpeg4
..iosEncoder = IosEncoder.kAudioFormatMPEG4AAC
..sampleRate = 16000;
playerController = PlayerController();
}
void _startRecording() async {
try {
if (!recorderController.isRecording) {
await recorderController.record();
} else {
final path = await recorderController.stop();
_isPlayingAudio = true;
if (path != null) {
await playerController.preparePlayer(path: path);
await playerController.startPlayer(finishMode: FinishMode.loop);
}
setState(() {
audioFilePath = path;
});
}
setState(() {});
}catch (e){
print(e.toString());
}
}
void forwardAudioBy15Seconds() async {
if (_isPlayingAudio) {
var duration = playerController.getDuration(DurationType.current);
print("duration===>"+duration.toString());
playerController.onPlayerStateChanged.skip(15000);
// final Duration newPosition = currentPosition + Duration(seconds: 15);
// playerController.seekTo(newPosition.inSeconds);
// print("object"+newPosition.toString());
}
}
Future saveAudio() async {
final appDir = await getExternalStorageDirectory();
final folderPath = '${appDir.parent.path}/audio_folder';
final folder = Directory(folderPath);
if (!(await folder.exists())) {
await folder.create(recursive: true);
}
var audioFileName = "";
if (saveController.text != null && saveController.text.isNotEmpty) {
audioFileName = '${saveController.text}.wav';
} else {
audioFileName = 'audio${DateTime.now().millisecondsSinceEpoch}.wav';
}
print("audioFileName"+audioFileName);
filePath = '$folderPath/$audioFileName';
print("file path"+filePath);
final recordedAudioFile = File(audioFilePath);
final copiedFile = await recordedAudioFile.copy(filePath);
if (copiedFile != null) {
print('Audio saved at: $filePath');
} else {
print('Failed to save audio.');
}
saveController.text="";
}
void _pauseAudio() async {
if (_isPlayIcon) {
playerController.pausePlayer();
_isPlayingAudio=true;
} else {
playerController.startPlayer(finishMode: FinishMode.loop);
}
_isPlayIcon = !_isPlayIcon;
setState(() {});
}
@OverRide
void initState() {
super.initState();
_initializeController();
}
@OverRide
void dispose() {
super.dispose();
recorderController.dispose();
playerController.dispose();
}
Widget SelectListOfAudio(){
return Container(
width: SizeConfig.screenWidth *0.11,
height: SizeConfig.screenHeight *0.18,
decoration: BoxDecoration(
// border: Border.all(width: 0.5),
color: Color.fromRGBO(0, 0, 0, 0.7),
borderRadius: BorderRadius.circular(36),
shape: BoxShape.rectangle,
),
child: Column(
children: [
SizedBox(height: SizeConfig.screenHeight *0.01,),
GestureDetector(
child: Image.asset(
"assets/images/audioImage.png",
height: SizeConfig.screenHeight * 0.045,
),
),
SizedBox(height: SizeConfig.screenHeight *0.01,),
GestureDetector(
child: Image.asset(
"assets/images/childImage.png",
height: SizeConfig.screenHeight * 0.045,
),
),
SizedBox(height: SizeConfig.screenHeight *0.01,),
GestureDetector(
child: Image.asset(
"assets/images/child_animalImg.png",
height: SizeConfig.screenHeight * 0.045,
width: SizeConfig.screenWidth *0.088,
),
)
],
),
);
}
@OverRide
Widget build(BuildContext context) {
SizeConfig().init(context);
return Scaffold(
body: Container(
child: SingleChildScrollView(
child: Column(
children: [
Container(
margin: EdgeInsets.only(top: SizeConfig.screenHeight * 0.055),
alignment: Alignment.topCenter,
child: Image.asset(
"assets/images/smileStone.png",
width: SizeConfig.screenWidth * 0.4,
)),
Container(
margin: EdgeInsets.only(top: SizeConfig.screenHeight*0.01,left: SizeConfig.screenWidth *0.055,right: SizeConfig.screenWidth *0.055),
width: SizeConfig.screenWidth * 0.9,
height: SizeConfig.screenHeight * 0.7,
decoration: BoxDecoration(
boxShadow: [
BoxShadow(
color: Color.fromRGBO(0, 0, 0, 0.18),
offset: Offset(0, 2),
spreadRadius: 0,
blurRadius: 8
)
],
color: ColorCode_white_text,
shape: BoxShape.rectangle,
borderRadius: BorderRadius.circular(36)
),
child: Stack(
children: [
_isPlayingAudio
? AudioFileWaveforms(
size: Size(MediaQuery.of(context).size.width, 400.0),
playerController: playerController,
enableSeekGesture: false,
waveformType: WaveformType.long,
backgroundColor: Colors.red,
playerWaveStyle: const PlayerWaveStyle(
fixedWaveColor: Colors.grey,
liveWaveColor: Colors.black,
spacing: 12,
waveThickness: 7,
waveCap: StrokeCap.round,
scaleFactor: 1000,
showSeekLine: false,
),
)
: recorderController.isRecording
? Container(
width: SizeConfig.screenWidth * 0.9,
height: SizeConfig.screenHeight * 0.7,
decoration: BoxDecoration(
boxShadow: [
BoxShadow(
color: Color.fromRGBO(0, 0, 0, 0.18),
offset: Offset(0, 2),
spreadRadius: 0,
blurRadius: 8
)
],
color: ColorCode_white_text,
shape: BoxShape.rectangle,
borderRadius: BorderRadius.circular(36)
),
child: AudioWaveforms(
size: Size(MediaQuery.of(context).size.width, 500.0),
recorderController: recorderController,
enableGesture: false,
waveStyle: const WaveStyle(
bottomPadding: 205,
waveColor: Colors.black,
showDurationLabel: false,
waveCap: StrokeCap.round,
extendWaveform: true,
showMiddleLine: false,
scaleFactor: 400,
waveThickness: 6,
spacing: 12,
),
),
)
: Stack(
alignment: Alignment.center,
children: [
Row(
// mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Container(
alignment: Alignment.topLeft,
margin: EdgeInsets.only(top: SizeConfig.screenHeight * 0.04, left: SizeConfig.screenWidth * 0.05),
child: GestureDetector(
child: Image.asset(
"assets/images/backBtn.png",
height: SizeConfig.screenHeight * 0.03,
),
),
),
GestureDetector(
onTap: (){
listOfSound=!listOfSound;
setState(() {
});
},
child: Container(
alignment: Alignment.topLeft,
margin: EdgeInsets.only(top: SizeConfig.screenHeight * 0.025, left: SizeConfig.screenWidth * 0.5),
child: !listOfSound? Image.asset(
"assets/images/selectSound.png",
height: SizeConfig.screenHeight * 0.05,
):SelectListOfAudio()
),
),
Container(
alignment: Alignment.topLeft,
padding: EdgeInsets.only(top: SizeConfig.screenHeight * 0.02, right: SizeConfig.screenWidth * 0.02),
child: GestureDetector(
child: Image.asset(
"assets/images/upload.png",
height: SizeConfig.screenHeight * 0.06,
),
),
),
],
),
Text('Record your voice here'),
],
),
_isPlayingAudio?
Container(
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Container(
alignment: Alignment.topLeft,
margin: EdgeInsets.only(top: SizeConfig.screenHeight * 0.025, left: SizeConfig.screenWidth * 0.05,bottom: SizeConfig.screenHeight *0.025),
child: GestureDetector(
onTap: () {
showDialog(
barrierDismissible: false,
context: context,
builder: (BuildContext context) {
return AlertDialog(
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(15),
),
title: Column(
mainAxisAlignment: MainAxisAlignment.start,
crossAxisAlignment: CrossAxisAlignment.start,
mainAxisSize: MainAxisSize.max,
children: [
Text(
'''Are you sure you want to Delete this recording?''',
style: TextStyle(fontSize: Utils().GlobalScreenHeight * 0.022, color: black_color, fontWeight: FontWeight.w600),
),
SizedBox(
height: SizeConfig.screenHeight * 0.03,
),
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Container(
width: SizeConfig.screenWidth * 0.25,
height: SizeConfig.screenHeight * 0.053,
child: ElevatedButton(
style: ElevatedButton.styleFrom(
primary: greenColor, // Background color
),
onPressed: () {
Navigator.pop(context);
_isPlayingAudio = !_isPlayingAudio;
setState(() {});
},
child: Text('Yes')),
),
SizedBox(
width: SizeConfig.screenWidth * 0.05,
),
Container(
width: SizeConfig.screenWidth * 0.25,
height: SizeConfig.screenHeight * 0.053,
child: ElevatedButton(
style: ElevatedButton.styleFrom(
primary: redColor, // Background color
),
onPressed: () {
Navigator.of(context).pop();
if (_isPlayIcon == true) {
_pauseAudio();
}
},
child: Text('No')),
),
],
)
],
),
);
},
);
},
child: Image.asset(
"assets/images/wrong.png",
height: SizeConfig.screenHeight * 0.025,
),
),
),
ChangeNotifierProvider<RecordingProvider>(
create: (_) => RecordingProvider(),
child: Consumer<RecordingProvider>(
builder: (context, provider,_){
return Container(
alignment: Alignment.topLeft,
margin: EdgeInsets.only(top: SizeConfig.screenHeight * 0.025, right: SizeConfig.screenWidth * 0.05),
child: GestureDetector(
onTap: () async{
await _pauseAudio();
showDialog(
barrierDismissible: false,
context: context,
builder: (BuildContext context) {
return AlertDialog(
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(15),
),
title: Column(
mainAxisAlignment: MainAxisAlignment.start,
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisSize: MainAxisSize.max,
children: [
Container(
width:SizeConfig.screenWidth* 0.5,
height: SizeConfig.screenHeight *0.07,
child: TextFormField(
cursorColor: ColorCode_grey,
controller: saveController,
style: TextStyle(fontSize: SizeConfig.screenHeight *0.018),
decoration: InputDecoration(
border: OutlineInputBorder(
borderSide: BorderSide(
color: TextFormColor,
)
),
hintText: "Name your recording",
hintStyle: TextStyle(fontSize: SizeConfig.screenHeight *0.018),
fillColor: TextFormColor,
filled: true,
focusedBorder: OutlineInputBorder(
borderSide: BorderSide(
color: TextFormColor,
)
),
enabledBorder: OutlineInputBorder(
borderSide: BorderSide(
color: TextFormColor,
)
),
),
),
),
SizedBox(height: SizeConfig.screenHeight *0.02,),
Text("Successfully Saved!",style: TextStyle(fontWeight: FontWeight.w600,fontSize: Utils().GlobalScreenHeight *0.02),),
SizedBox(height: SizeConfig.screenHeight *0.02,),
Container(
width: SizeConfig.screenWidth * 0.25,
height: SizeConfig.screenHeight * 0.053,
child: ElevatedButton(
style: ElevatedButton.styleFrom(
primary: greenColor, // Background color
),
onPressed: () {
_saveAudio();
provider.FetchSaveRecording('${saveController.text}', filePath);
Navigator.pop(context);
// _isPlayIcon = !_isPlayIcon;
setState(() {});
},
child: Text('Done')),
),
],
),
);
});
setState(() {
});
},
child: Image.asset(
"assets/images/right.png",
height: SizeConfig.screenHeight * 0.025,
),
),
);
},
),
),
],
),
):Container(),
],
)
),
SizedBox(height: SizeConfig.screenHeight *0.05,),
_isPlayingAudio
? Container(
width: SizeConfig.screenWidth *0.8,
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
GestureDetector(
onTap: () async{
await playerController.seekTo(1000);
},
child: Image.asset(
"assets/images/backward.png",
height: SizeConfig.screenHeight * 0.08,
)),
GestureDetector(
onTap: _pauseAudio,
child: Image.asset(
_isPlayIcon ? "assets/images/audio_pause.png" : "assets/images/audio_play.png",
height: SizeConfig.screenHeight * 0.085,
)
),
GestureDetector(
onTap: ()async {
playerController.onCurrentDurationChanged.listen((duration) {
duration=5000;
});
forwardAudioBy15Seconds();
setState(()async {
Duration forwardDuration = Duration(seconds: 5);
await playerController.getDuration(DurationType.current);
forwardDuration.inSeconds;
// Seek forward by a specific duration
// Duration currentPosition = playerController.getDuration(DurationType.current);
// Duration forwardDuration = Duration(seconds: 5); // Seek forward by 5 seconds
// Duration targetPosition = currentPosition + forwardDuration;
//
// if (targetPosition > playerController.) {
// // Adjust target position to the maximum duration if it exceeds the duration
// targetPosition = playerController.duration;
// }
//
// await playerController.seekTo(targetPosition);
var duration = playerController.getDuration(DurationType.current);
print("duration===>"+duration.toString());
});
},
child: Image.asset(
"assets/images/forward.png",
height: SizeConfig.screenHeight * 0.08,
)),
],
),
)
: GestureDetector(
onTap: () {
_startRecording();
},
child: Image.asset(
recorderController.isRecording
?
"assets/images/audio_pause.png":
"assets/images/mic.png",
height: SizeConfig.screenHeight * 0.085,
)),
],
),
),
),
);
}
}