Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 54 additions & 9 deletions lib/main.dart
Original file line number Diff line number Diff line change
Expand Up @@ -314,14 +314,21 @@ class _TranscriptionScreenState extends State<TranscriptionScreen> with SingleTi
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
// App header
const Text(
'DocPilot',
style: TextStyle(
fontSize: 28,
fontWeight: FontWeight.bold,
color: Colors.white,
),
// App header row with model picker
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
crossAxisAlignment: CrossAxisAlignment.center,
children: [
const Text(
'DocPilot',
style: TextStyle(
fontSize: 28,
fontWeight: FontWeight.bold,
color: Colors.white,
),
),
_buildModelPicker(),
],
),
const SizedBox(height: 8),
Text(
Expand All @@ -330,7 +337,7 @@ class _TranscriptionScreenState extends State<TranscriptionScreen> with SingleTi
: _isTranscribing
? 'Transcribing your voice...'
: _isProcessing
? 'Processing with Gemini...'
? 'Processing with ${_chatbotService.selectedModel.displayName}...'
: 'Tap the mic to begin',
style: const TextStyle(
fontSize: 16,
Expand Down Expand Up @@ -502,6 +509,44 @@ class _TranscriptionScreenState extends State<TranscriptionScreen> with SingleTi
);
}

Widget _buildModelPicker() {
return PopupMenuButton<GeminiModel>(
initialValue: _chatbotService.selectedModel,
tooltip: 'Select AI model',
icon: const Icon(Icons.tune, color: Colors.white70, size: 22),
color: Colors.deepPurple.shade700,
onSelected: (model) {
setState(() {
_chatbotService.selectedModel = model;
});
},
itemBuilder: (_) => GeminiModel.values.map((model) {
return PopupMenuItem<GeminiModel>(
value: model,
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
mainAxisSize: MainAxisSize.min,
children: [
Text(
model.displayName,
style: TextStyle(
color: Colors.white,
fontWeight: _chatbotService.selectedModel == model
? FontWeight.bold
: FontWeight.normal,
),
),
Text(
model.description,
style: const TextStyle(fontSize: 11, color: Colors.white60),
),
],
),
);
}).toList(),
);
}

// Helper method to build navigation buttons
Widget _buildNavigationButton(
BuildContext context,
Expand Down
93 changes: 71 additions & 22 deletions lib/services/chatbot_service.dart
Original file line number Diff line number Diff line change
Expand Up @@ -3,45 +3,94 @@ import 'dart:developer' as developer;
import 'package:http/http.dart' as http;
import 'package:flutter_dotenv/flutter_dotenv.dart';

/// Supported Gemini/Gemma models via the Gemini API.
enum GeminiModel {
flash('gemini-2.0-flash', 'Gemini 2.0 Flash', 'Fast responses, good for most tasks'),
pro('gemini-1.5-pro', 'Gemini 1.5 Pro', 'Higher accuracy for complex reasoning'),
gemma('gemma-3-27b-it', 'Gemma 3 27B', 'Open-weight model, efficient summarisation');

final String modelId;
final String displayName;
final String description;

const GeminiModel(this.modelId, this.displayName, this.description);
}

class ChatbotService {
// Get API key from .env file
final String apiKey = dotenv.env['GEMINI_API_KEY'] ?? '';
final String _apiKey = dotenv.env['GEMINI_API_KEY'] ?? '';

GeminiModel selectedModel;

ChatbotService({this.selectedModel = GeminiModel.flash});

bool get hasValidApiKey {
final trimmed = _apiKey.trim();
if (trimmed.isEmpty) return false;
final lower = trimmed.toLowerCase();
return !lower.contains('your_gemini_api_key_here') &&
!lower.contains('replace_with') &&
!lower.contains('example') &&
!lower.contains('dummy');
}

// Get a response from Gemini based on a prompt
Future<String> getGeminiResponse(String prompt) async {
print('\n=== GEMINI PROMPT ===');
print(prompt);
if (!hasValidApiKey) {
return 'Error: GEMINI_API_KEY is missing. Add it to your .env file.';
}

developer.log(
'Sending prompt to ${selectedModel.displayName}',
name: 'ChatbotService',
);

final url = Uri.parse('https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key=$apiKey');
final url = Uri.parse(
'https://generativelanguage.googleapis.com/v1beta/models/${selectedModel.modelId}:generateContent?key=$_apiKey',
);

try {
final response = await http.post(
url,
headers: {'Content-Type': 'application/json'},
body: jsonEncode({
"contents": [{"parts": [{"text": prompt}]}],
"generationConfig": {
"temperature": 0.7,
"maxOutputTokens": 1024
}
'contents': [
{
'parts': [
{'text': prompt}
]
}
],
'generationConfig': {
'temperature': 0.7,
'maxOutputTokens': 1024,
},
}),
);
).timeout(const Duration(seconds: 30));

if (response.statusCode == 200) {
final data = jsonDecode(response.body);
final result = data['candidates'][0]['content']['parts'][0]['text'];

print('\n=== GEMINI RESPONSE ===');
print(result);

return result;
final candidates = data['candidates'];
if (candidates is List && candidates.isNotEmpty) {
final text = candidates[0]['content']?['parts']?[0]?['text'];
if (text is String && text.trim().isNotEmpty) {
developer.log('Response received from ${selectedModel.displayName}', name: 'ChatbotService');
return text;
}
}
return 'Error: Unexpected response format from ${selectedModel.displayName}.';
} else {
print('API Error: ${response.statusCode}');
return "Error: Could not generate response. Status code: ${response.statusCode}";
String message = 'Error: ${selectedModel.displayName} request failed (status ${response.statusCode}).';
try {
final apiError = jsonDecode(response.body)['error']?['message'];
if (apiError is String && apiError.trim().isNotEmpty) {
message = 'Error: $apiError';
}
} catch (_) {}
developer.log('API error ${response.statusCode}: ${response.body}', name: 'ChatbotService');
return message;
}
} catch (e) {
print('Exception: $e');
return "Error: Could not connect to API: $e";
developer.log('Request failed: $e', name: 'ChatbotService', error: e);
return 'Error: Could not reach ${selectedModel.displayName}. Check your connection and try again.';
}
}
}