Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions lib/features/transcription/data/deepgram_service.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import 'dart:convert';
import 'dart:io';
import 'package:flutter_dotenv/flutter_dotenv.dart';
import 'package:http/http.dart' as http;

class DeepgramService {
final String _apiKey = dotenv.env['DEEPGRAM_API_KEY'] ?? '';

Future<String> transcribe(String recordingPath) async {
final uri = Uri.parse('https://api.deepgram.com/v1/listen?model=nova-2');

final file = File(recordingPath);
if (!await file.exists()) {
throw Exception('Recording file not found');
}

final bytes = await file.readAsBytes();
final response = await http.post(
uri,
headers: {
'Authorization': 'Token $_apiKey',
'Content-Type': 'audio/m4a',
},
body: bytes,
);

if (response.statusCode == 200) {
final decodedResponse = json.decode(response.body);
final result = decodedResponse['results']['channels'][0]['alternatives'][0]['transcript'];
return result.isNotEmpty ? result : 'No speech detected';
} else {
throw Exception('Deepgram failed: ${response.statusCode}');
}
}
}
18 changes: 18 additions & 0 deletions lib/features/transcription/data/gemini_service.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import 'package:doc_pilot_new_app_gradel_fix/services/chatbot_service.dart';

class GeminiService {
final ChatbotService _chatbotService = ChatbotService();

Future<String> generateSummary(String transcription) async {
return await _chatbotService.getGeminiResponse(
"Generate a summary of the conversation based on this transcription: $transcription",
);
}

Future<String> generatePrescription(String transcription) async {
await Future.delayed(const Duration(seconds: 3));
return await _chatbotService.getGeminiResponse(
"Generate a prescription based on the conversation in this transcription: $transcription",
);
}
}
23 changes: 23 additions & 0 deletions lib/features/transcription/domain/transcription_model.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
class TranscriptionModel {
final String rawTranscript;
final String summary;
final String prescription;

const TranscriptionModel({
this.rawTranscript = '',
this.summary = '',
this.prescription = '',
});

TranscriptionModel copyWith({
String? rawTranscript,
String? summary,
String? prescription,
}) {
return TranscriptionModel(
rawTranscript: rawTranscript ?? this.rawTranscript,
summary: summary ?? this.summary,
prescription: prescription ?? this.prescription,
);
}
}
158 changes: 158 additions & 0 deletions lib/features/transcription/presentation/transcription_controller.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
import 'dart:developer' as developer;
import 'dart:math';
import 'dart:async';
import 'package:flutter/foundation.dart';
import 'package:path_provider/path_provider.dart';
import 'package:record/record.dart';
import 'package:permission_handler/permission_handler.dart';
import '../data/deepgram_service.dart';
import '../data/gemini_service.dart';
import '../domain/transcription_model.dart';

enum TranscriptionState { idle, recording, transcribing, processing, done, error }

class TranscriptionController extends ChangeNotifier {
final _audioRecorder = AudioRecorder();
final _deepgramService = DeepgramService();
final _geminiService = GeminiService();

TranscriptionState state = TranscriptionState.idle;
TranscriptionModel data = const TranscriptionModel();
String? errorMessage;
String _recordingPath = '';

// Waveform — kept here since it's driven by recording state
final List<double> waveformValues = List.filled(40, 0.0);
Timer? _waveformTimer;

bool get isRecording => state == TranscriptionState.recording;
bool get isProcessing =>
state == TranscriptionState.transcribing ||
state == TranscriptionState.processing;

String get transcription => data.rawTranscript;
String get summary => data.summary;
String get prescription => data.prescription;

Future<void> requestPermissions() async {
await Permission.microphone.request();
}

Future<void> toggleRecording() async {
if (isRecording) {
await _stopRecording();
} else {
await _startRecording();
}
}

Future<void> _startRecording() async {
try {
if (await _audioRecorder.hasPermission()) {
final directory = await getTemporaryDirectory();
_recordingPath =
'${directory.path}/recording_${DateTime.now().millisecondsSinceEpoch}.m4a';

await _audioRecorder.start(
RecordConfig(
encoder: AudioEncoder.aacLc,
bitRate: 128000,
sampleRate: 44100,
),
path: _recordingPath,
);

// Reset previous data
data = const TranscriptionModel();
state = TranscriptionState.recording;
_startWaveformAnimation();
notifyListeners();

developer.log('Started recording to: $_recordingPath');
} else {
await requestPermissions();
}
} catch (e) {
_setError('Error starting recording: $e');
}
}

Future<void> _stopRecording() async {
try {
_waveformTimer?.cancel();
_resetWaveform();

await _audioRecorder.stop();
state = TranscriptionState.transcribing;
notifyListeners();

developer.log('Recording stopped, transcribing...');
await _transcribe();
} catch (e) {
_setError('Error stopping recording: $e');
}
}

Future<void> _transcribe() async {
try {
final transcript = await _deepgramService.transcribe(_recordingPath);

data = data.copyWith(rawTranscript: transcript);
state = TranscriptionState.processing;
notifyListeners();

if (transcript.isNotEmpty && transcript != 'No speech detected') {
await _processWithGemini(transcript);
} else {
state = TranscriptionState.done;
notifyListeners();
}
} catch (e) {
_setError('Transcription error: $e');
}
}

Future<void> _processWithGemini(String transcript) async {
try {
final summary = await _geminiService.generateSummary(transcript);
final prescription = await _geminiService.generatePrescription(transcript);

data = data.copyWith(summary: summary, prescription: prescription);
state = TranscriptionState.done;
notifyListeners();

developer.log('Gemini processing complete');
} catch (e) {
_setError('Gemini error: $e');
}
}

void _startWaveformAnimation() {
_waveformTimer = Timer.periodic(const Duration(milliseconds: 100), (_) {
for (int i = 0; i < waveformValues.length; i++) {
waveformValues[i] = Random().nextDouble();
}
notifyListeners();
});
}

void _resetWaveform() {
for (int i = 0; i < waveformValues.length; i++) {
waveformValues[i] = 0.0;
}
}

void _setError(String message) {
errorMessage = message;
state = TranscriptionState.error;
notifyListeners();
developer.log(message);
}

@override
void dispose() {
_waveformTimer?.cancel();
_audioRecorder.dispose();
super.dispose();
}
}
Loading