environment_monitoring_app/lib/services/river_investigative_sampling_service.dart

775 lines
37 KiB
Dart

// lib/services/river_investigative_sampling_service.dart
import 'dart:async';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:image_picker/image_picker.dart';
import 'package:path_provider/path_provider.dart';
import 'package:path/path.dart' as p;
import 'package:image/image.dart' as img;
import 'package:geolocator/geolocator.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:flutter_bluetooth_serial/flutter_bluetooth_serial.dart';
import 'package:usb_serial/usb_serial.dart';
import 'dart:convert';
import 'package:intl/intl.dart';
import 'package:connectivity_plus/connectivity_plus.dart';
import 'package:provider/provider.dart'; // Keep provider import if needed internally, though less common in services
import '../auth_provider.dart';
import 'location_service.dart';
import '../models/river_inves_manual_sampling_data.dart'; // Use Investigative model
import '../bluetooth/bluetooth_manager.dart';
import '../serial/serial_manager.dart';
import 'api_service.dart'; // Keep ApiService import for DatabaseHelper access within service if needed, or remove if unused directly
import 'package:environment_monitoring_app/services/database_helper.dart';
import 'local_storage_service.dart';
import 'server_config_service.dart';
import 'zipping_service.dart';
import 'submission_api_service.dart';
import 'submission_ftp_service.dart';
import 'telegram_service.dart';
import 'retry_service.dart';
import 'base_api_service.dart'; // Import for SessionExpiredException
class RiverInvestigativeSamplingService { // Renamed class
final LocationService _locationService = LocationService();
final BluetoothManager _bluetoothManager = BluetoothManager();
final SerialManager _serialManager = SerialManager();
final SubmissionApiService _submissionApiService = SubmissionApiService();
final SubmissionFtpService _submissionFtpService = SubmissionFtpService();
final DatabaseHelper _dbHelper = DatabaseHelper();
final LocalStorageService _localStorageService = LocalStorageService();
final ServerConfigService _serverConfigService = ServerConfigService();
final ZippingService _zippingService = ZippingService();
final RetryService _retryService = RetryService();
final TelegramService _telegramService;
final ImagePicker _picker = ImagePicker();
static const platform = MethodChannel('com.example.environment_monitoring_app/usb');
RiverInvestigativeSamplingService(this._telegramService); // Constructor remains similar
Future<Position> getCurrentLocation() => _locationService.getCurrentLocation();
double calculateDistance(double lat1, double lon1, double lat2, double lon2) => _locationService.calculateDistance(lat1, lon1, lat2, lon2);
// Adapted image processing for Investigative data
Future<File?> pickAndProcessImage(ImageSource source, { required RiverInvesManualSamplingData data, required String imageInfo, bool isRequired = false, String? stationCode}) async { // Updated model type
try {
final XFile? pickedFile = await _picker.pickImage(
source: source,
imageQuality: 85, // Keep quality settings
maxWidth: 1024, // Keep resolution settings
);
if (pickedFile == null) {
return null;
}
final bytes = await pickedFile.readAsBytes();
img.Image? originalImage = img.decodeImage(bytes);
if (originalImage == null) {
return null;
}
// ✅ FIX: Apply landscape check to ALL photos, not just required ones.
if (originalImage.height > originalImage.width) {
debugPrint("Image rejected: Must be in landscape orientation.");
return null;
}
// Watermark using investigative data
final String watermarkTimestamp = "${data.samplingDate} ${data.samplingTime}";
final font = img.arial24; // Use consistent font
final textWidth = watermarkTimestamp.length * 12; // Approximate width
// Draw background rectangle for text visibility
img.fillRect(originalImage, x1: 5, y1: 5, x2: textWidth + 15, y2: 35, color: img.ColorRgb8(255, 255, 255));
// Draw timestamp string
img.drawString(originalImage, watermarkTimestamp, font: font, x: 10, y: 10, color: img.ColorRgb8(0, 0, 0));
final tempDir = await getTemporaryDirectory();
// Use the determined station code passed in (handles Manual/Triennial/New)
final finalStationCode = stationCode ?? 'NA';
final fileTimestamp = "${data.samplingDate}-${data.samplingTime}".replaceAll(':', '-');
// Consistent filename format
final newFileName = "${finalStationCode}_${fileTimestamp}_${imageInfo.replaceAll(' ', '')}.jpg";
final filePath = p.join(tempDir.path, newFileName);
// Encode and write the processed image
return File(filePath)..writeAsBytesSync(img.encodeJpg(originalImage));
} catch (e) {
debugPrint('Error in pickAndProcessImage (River Investigative): $e');
return null;
}
}
// Bluetooth and Serial Management - No changes needed, uses shared managers
ValueNotifier<BluetoothConnectionState> get bluetoothConnectionState => _bluetoothManager.connectionState;
ValueNotifier<SerialConnectionState> get serialConnectionState => _serialManager.connectionState;
ValueNotifier<String?> get sondeId {
if (_bluetoothManager.connectionState.value != BluetoothConnectionState.disconnected) {
return _bluetoothManager.sondeId;
}
return _serialManager.sondeId;
}
Stream<Map<String, double>> get bluetoothDataStream => _bluetoothManager.dataStream;
Stream<Map<String, double>> get serialDataStream => _serialManager.dataStream;
String? get connectedBluetoothDeviceName => _bluetoothManager.connectedDeviceName.value;
String? get connectedSerialDeviceName => _serialManager.connectedDeviceName.value;
Future<bool> requestDevicePermissions() async {
// Permission logic remains the same
Map<Permission, PermissionStatus> statuses = await [
Permission.bluetoothScan,
Permission.bluetoothConnect,
Permission.locationWhenInUse, // Keep location permission for GPS
].request();
if (statuses[Permission.bluetoothScan] == PermissionStatus.granted &&
statuses[Permission.bluetoothConnect] == PermissionStatus.granted &&
statuses[Permission.locationWhenInUse] == PermissionStatus.granted) { // Ensure location is granted too
return true;
} else {
debugPrint("Bluetooth Scan: ${statuses[Permission.bluetoothScan]}, Bluetooth Connect: ${statuses[Permission.bluetoothConnect]}, Location: ${statuses[Permission.locationWhenInUse]}");
return false;
}
}
Future<List<BluetoothDevice>> getPairedBluetoothDevices() => _bluetoothManager.getPairedDevices();
Future<void> connectToBluetoothDevice(BluetoothDevice device) => _bluetoothManager.connect(device);
void disconnectFromBluetooth() => _bluetoothManager.disconnect();
void startBluetoothAutoReading({Duration? interval}) => _bluetoothManager.startAutoReading(interval: interval ?? const Duration(seconds: 2));
void stopBluetoothAutoReading() => _bluetoothManager.stopAutoReading();
Future<List<UsbDevice>> getAvailableSerialDevices() => _serialManager.getAvailableDevices();
Future<bool> requestUsbPermission(UsbDevice device) async {
// USB permission logic remains the same
try {
// Ensure the platform channel name matches what's defined in your native code (Android/iOS)
return await platform.invokeMethod('requestUsbPermission', {'vid': device.vid, 'pid': device.pid}) ?? false;
} on PlatformException catch (e) {
debugPrint("Failed to request USB permission: '${e.message}'.");
return false;
}
}
Future<void> connectToSerialDevice(UsbDevice device) async {
// Serial connection logic remains the same
final bool permissionGranted = await requestUsbPermission(device);
if (permissionGranted) {
await _serialManager.connect(device);
} else {
throw Exception("USB permission was not granted.");
}
}
void disconnectFromSerial() => _serialManager.disconnect();
void startSerialAutoReading({Duration? interval}) => _serialManager.startAutoReading(interval: interval ?? const Duration(seconds: 2));
void stopSerialAutoReading() => _serialManager.stopAutoReading();
void dispose() {
_bluetoothManager.dispose();
_serialManager.dispose();
}
// Adapted Submission Logic for Investigative
Future<Map<String, dynamic>> submitData({
required RiverInvesManualSamplingData data, // Updated model type
required List<Map<String, dynamic>>? appSettings,
required AuthProvider authProvider,
String? logDirectory,
}) async {
// *** MODIFIED: Module name changed ***
const String moduleName = 'river_investigative';
final connectivityResult = await Connectivity().checkConnectivity();
bool isOnline = !connectivityResult.contains(ConnectivityResult.none);
bool isOfflineSession = authProvider.isLoggedIn && (authProvider.profileData?['token']?.startsWith("offline-session-") ?? false);
// Auto-relogin logic remains the same
if (isOnline && isOfflineSession) {
debugPrint("River Investigative submission online during offline session. Attempting auto-relogin..."); // Log context update
try {
final bool transitionSuccess = await authProvider.checkAndTransitionToOnlineSession();
if (transitionSuccess) {
isOfflineSession = false; // Successfully transitioned to online
} else {
isOnline = false; // Auto-relogin failed, treat as offline
}
} on SessionExpiredException catch (_) {
debugPrint("Session expired during auto-relogin check. Treating as offline.");
isOnline = false;
}
}
// Branch based on connectivity and session status
if (isOnline && !isOfflineSession) {
debugPrint("Proceeding with direct ONLINE River Investigative submission..."); // Log context update
return await _performOnlineSubmission(
data: data,
appSettings: appSettings,
moduleName: moduleName,
authProvider: authProvider,
logDirectory: logDirectory,
);
} else {
debugPrint("Proceeding with OFFLINE River Investigative queuing mechanism..."); // Log context update
return await _performOfflineQueuing(
data: data,
moduleName: moduleName,
logDirectory: logDirectory, // Pass for potential update
);
}
}
Future<Map<String, dynamic>> _performOnlineSubmission({
required RiverInvesManualSamplingData data, // Updated model type
required List<Map<String, dynamic>>? appSettings,
required String moduleName, // Passed in as 'river_investigative'
required AuthProvider authProvider,
String? logDirectory,
}) async {
final serverName = (await _serverConfigService.getActiveApiConfig())?['config_name'] as String? ?? 'Default';
// Get image files using the Investigative model's method
final imageFilesWithNulls = data.toApiImageFiles();
imageFilesWithNulls.removeWhere((key, value) => value == null); // Remove nulls
final Map<String, File> finalImageFiles = imageFilesWithNulls.cast<String, File>();
bool anyApiSuccess = false;
Map<String, dynamic> apiDataResult = {};
Map<String, dynamic> apiImageResult = {};
String finalMessage = '';
String finalStatus = '';
bool isSessionKnownToBeExpired = false;
try {
// 1. Submit Form Data (using Investigative endpoint and data)
apiDataResult = await _submissionApiService.submitPost(
moduleName: moduleName, // 'river_investigative'
// *** MODIFIED: API Endpoint ***
endpoint: 'river/investigative/sample', // Assumed endpoint for investigative data
body: data.toApiFormData(), // Use Investigative model's method
);
if (apiDataResult['success'] == true) {
anyApiSuccess = true;
// *** MODIFIED: Extract report ID using assumed key ***
data.reportId = apiDataResult['data']?['r_inv_id']?.toString(); // Assumed key for investigative ID
if (data.reportId != null) {
if (finalImageFiles.isNotEmpty) {
// 2. Submit Images (using Investigative endpoint)
apiImageResult = await _submissionApiService.submitMultipart(
moduleName: moduleName, // 'river_investigative'
// *** MODIFIED: API Endpoint ***
endpoint: 'river/investigative/images', // Assumed endpoint for investigative images
// *** MODIFIED: Field key for ID ***
fields: {'r_inv_id': data.reportId!}, // Use assumed investigative ID key
files: finalImageFiles,
);
if (apiImageResult['success'] != true) {
// If image upload fails after data success, mark API part as failed overall for simplicity, or handle partially.
anyApiSuccess = false; // Treat as overall API failure if images fail
}
}
// If no images, data submission success is enough
} else {
// API succeeded but didn't return an ID - treat as failure
anyApiSuccess = false;
apiDataResult['success'] = false; // Mark as failed
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a record ID.';
}
}
// If apiDataResult['success'] is false initially, SubmissionApiService queued it.
} on SessionExpiredException catch (_) {
debugPrint("Online River Investigative submission failed due to session expiry that could not be refreshed."); // Log context update
isSessionKnownToBeExpired = true;
anyApiSuccess = false;
apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'};
// Manually queue API calls
// *** MODIFIED: Use Investigative endpoints for queueing ***
await _retryService.addApiToQueue(endpoint: 'river/investigative/sample', method: 'POST', body: data.toApiFormData());
if (finalImageFiles.isNotEmpty && data.reportId != null) {
// Queue images only if we might have gotten an ID before expiry
await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {'r_inv_id': data.reportId!}, files: finalImageFiles);
} else if (finalImageFiles.isNotEmpty && data.reportId == null) {
// If data call failed before getting ID, queue images without ID - might need manual linking later or separate retry logic
debugPrint("Queueing investigative images without report ID due to session expiry during data submission.");
// How to handle this depends on backend capabilities or manual intervention needs.
// Option: Queue a complex task instead? For now, queueing individually.
await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {}, files: finalImageFiles); // Queue images without ID
}
}
// 3. Submit FTP Files (Logic remains similar, uses specific JSON methods)
Map<String, dynamic> ftpResults = {'statuses': []};
bool anyFtpSuccess = false;
if (isSessionKnownToBeExpired) {
debugPrint("Skipping FTP attempt for River Investigative due to known expired session. Manually queuing FTP tasks."); // Log context update
final baseFileNameForQueue = _generateBaseFileName(data); // Use helper
// --- START FIX: Add ftpConfigId when queuing --- (Copied from In-Situ, ensure DB structure matches)
final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? [];
final dataZip = await _zippingService.createDataZip(
jsonDataMap: { // Use specific JSON structures for River Investigative FTP
'db.json': data.toDbJson(), // Use Investigative model's method
'river_inves_basic_form.json': data.toBasicFormJson(), // Use Investigative model's method
'river_inves_reading.json': data.toReadingJson(), // Use Investigative model's method
'river_inves_manual_info.json': data.toManualInfoJson(), // Use Investigative model's method
},
baseFileName: baseFileNameForQueue,
destinationDir: null, // Save to temp dir
);
if (dataZip != null) {
// Queue for each config separately
for (final config in ftpConfigs) {
final configId = config['ftp_config_id'];
if (configId != null) {
await _retryService.addFtpToQueue(
localFilePath: dataZip.path,
remotePath: '/${p.basename(dataZip.path)}', // Standard remote path
ftpConfigId: configId // Provide the specific config ID
);
}
}
}
if (finalImageFiles.isNotEmpty) {
final imageZip = await _zippingService.createImageZip(
imageFiles: finalImageFiles.values.toList(),
baseFileName: baseFileNameForQueue,
destinationDir: null, // Save to temp dir
);
if (imageZip != null) {
// Queue for each config separately
for (final config in ftpConfigs) {
final configId = config['ftp_config_id'];
if (configId != null) {
await _retryService.addFtpToQueue(
localFilePath: imageZip.path,
remotePath: '/${p.basename(imageZip.path)}', // Standard remote path
ftpConfigId: configId // Provide the specific config ID
);
}
}
}
}
// --- END FIX ---
ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]};
anyFtpSuccess = false; // Mark FTP as unsuccessful for overall status determination
} else {
// Proceed with FTP attempt if session is okay
try {
ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); // Call helper
// Determine success based on statuses (excluding 'Not Configured')
anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured');
} catch (e) {
debugPrint("Unexpected River Investigative FTP submission error: $e"); // Log context update
anyFtpSuccess = false; // Mark FTP as failed on error
ftpResults = {'statuses': [{'status': 'Error', 'message': 'FTP process failed: $e.', 'success': false}]}; // Provide error status
}
}
// 4. Determine Final Status (Logic remains the same)
final bool overallSuccess = anyApiSuccess || anyFtpSuccess;
if (anyApiSuccess && anyFtpSuccess) {
finalMessage = 'Data submitted successfully to all destinations.';
finalStatus = 'S4'; // API OK, FTP OK
} else if (anyApiSuccess && !anyFtpSuccess) {
finalMessage = 'Data sent to API, but some FTP uploads failed or were queued.';
finalStatus = 'S3'; // API OK, FTP Failed/Queued
} else if (!anyApiSuccess && anyFtpSuccess) {
finalMessage = 'API submission failed and was queued, but files were sent to FTP successfully.';
finalStatus = 'L4'; // API Failed/Queued, FTP OK
} else { // Neither API nor FTP fully succeeded without queueing/errors
finalMessage = apiDataResult['message'] ?? 'All submission attempts failed and have been queued for retry.';
finalStatus = 'L1'; // API Failed/Queued, FTP Failed/Queued
}
// 5. Log Locally (using Investigative log method)
await _logAndSave(
data: data,
status: finalStatus,
message: finalMessage,
apiResults: [apiDataResult, apiImageResult].where((r) => r.isNotEmpty).toList(), // Filter out empty results
ftpStatuses: ftpResults['statuses'] ?? [],
serverName: serverName,
logDirectory: logDirectory,
);
// 6. Send Alert (using Investigative alert method)
if (overallSuccess) { // Send alert only if at least one part (API or FTP) succeeded without errors/queueing immediately
_handleSuccessAlert(data, appSettings, isDataOnly: finalImageFiles.isEmpty, isSessionExpired: isSessionKnownToBeExpired);
}
// Return consistent result format
return {
'status': finalStatus,
'success': overallSuccess, // Reflects if *any* part succeeded now
'message': finalMessage,
'reportId': data.reportId // May be null if API failed
};
}
/// Handles queuing the submission data when the device is offline for Investigative.
Future<Map<String, dynamic>> _performOfflineQueuing({
required RiverInvesManualSamplingData data, // Updated model type
required String moduleName, // Passed in as 'river_investigative'
String? logDirectory, // Added for potential update
}) async {
final serverConfig = await _serverConfigService.getActiveApiConfig();
final serverName = serverConfig?['config_name'] as String? ?? 'Default';
data.submissionStatus = 'Queued'; // Tentative status, will be L1 after saving
data.submissionMessage = 'Submission queued for later retry.';
String? savedLogPath = logDirectory; // Use existing path if provided for an update
// Save/Update local log first using the specific Investigative save method
if (savedLogPath != null && savedLogPath.isNotEmpty) {
// *** MODIFIED: Use correct update method ***
await _localStorageService.updateRiverInvestigativeLog(data.toMap()..['logDirectory'] = savedLogPath); // Add path for update method
debugPrint("Updated existing River Investigative log for queuing: $savedLogPath"); // Log context update
} else {
// *** MODIFIED: Use correct save method ***
savedLogPath = await _localStorageService.saveRiverInvestigativeSamplingData(data, serverName: serverName);
debugPrint("Saved new River Investigative log for queuing: $savedLogPath"); // Log context update
}
if (savedLogPath == null) {
// If saving the log itself failed
const message = "Failed to save River Investigative submission to local device storage."; // Log context update
// Log failure to central DB log if possible
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, logDirectory: logDirectory);
return {'status': 'Error', 'success': false, 'message': message};
}
// Queue the task for the RetryService
// *** MODIFIED: Use specific task type ***
await _retryService.queueTask(
type: 'river_investigative_submission', // Specific type for retry handler
payload: {
'module': moduleName, // 'river_investigative'
'localLogPath': p.join(savedLogPath, 'data.json'), // Point to the json file within the saved directory
'serverConfig': serverConfig, // Pass current server config at time of queueing
},
);
const successMessage = "Device offline. River Investigative submission has been saved locally and queued for automatic retry when connection is restored."; // Log context update
// Update final status in the data object and potentially update log again, or just log to central DB
data.submissionStatus = 'L1'; // Final queued status
data.submissionMessage = successMessage;
// Log final queued state to central DB log
await _logAndSave(data: data, status: 'L1', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, logDirectory: savedLogPath); // Ensure log reflects final state
return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': null};
}
/// Helper to generate the base filename for ZIP files (Investigative).
String _generateBaseFileName(RiverInvesManualSamplingData data) { // Updated model type
// Use the determined station code helper
final stationCode = data.getDeterminedStationCode() ?? 'UNKNOWN';
final fileTimestamp = "${data.samplingDate}_${data.samplingTime}".replaceAll(':', '-').replaceAll(' ', '_');
return "${stationCode}_$fileTimestamp"; // Consistent format
}
/// Generates data and image ZIP files and uploads them using SubmissionFtpService (Investigative).
Future<Map<String, dynamic>> _generateAndUploadFtpFiles(RiverInvesManualSamplingData data, Map<String, File> imageFiles, String serverName, String moduleName) async { // Updated model type
final baseFileName = _generateBaseFileName(data); // Use helper
// *** MODIFIED: Use correct base dir getter ***
final Directory? logDirectory = await _localStorageService.getRiverInvestigativeBaseDir(serverName: serverName); // NEW GETTER
// Determine the specific folder for this submission log within the base directory
final folderName = data.reportId ?? baseFileName; // Use report ID if available, else generated name
final Directory? localSubmissionDir = logDirectory != null ? Directory(p.join(logDirectory.path, folderName)) : null;
if (localSubmissionDir != null && !await localSubmissionDir.exists()) {
await localSubmissionDir.create(recursive: true); // Create if doesn't exist
}
// Create and upload data ZIP (with multiple JSON files specific to River Investigative)
final dataZip = await _zippingService.createDataZip(
jsonDataMap: {
// *** MODIFIED: Use Investigative model's JSON methods and filenames ***
'db.json': jsonEncode(data.toDbJson()), // Main data structure
'river_inves_basic_form.json': data.toBasicFormJson(),
'river_inves_reading.json': data.toReadingJson(),
'river_inves_manual_info.json': data.toManualInfoJson(),
},
baseFileName: baseFileName,
destinationDir: localSubmissionDir, // Save ZIP in the specific log folder
);
Map<String, dynamic> ftpDataResult = {'success': true, 'statuses': []}; // Default success if no file
if (dataZip != null) {
ftpDataResult = await _submissionFtpService.submit(
moduleName: moduleName, // 'river_investigative'
fileToUpload: dataZip,
remotePath: '/${p.basename(dataZip.path)}' // Standard remote path
);
}
// Create and upload image ZIP (if images exist)
Map<String, dynamic> ftpImageResult = {'success': true, 'statuses': []}; // Default success if no images
if (imageFiles.isNotEmpty) {
final imageZip = await _zippingService.createImageZip(
imageFiles: imageFiles.values.toList(),
baseFileName: baseFileName,
destinationDir: localSubmissionDir, // Save ZIP in the specific log folder
);
if (imageZip != null) {
ftpImageResult = await _submissionFtpService.submit(
moduleName: moduleName, // 'river_investigative'
fileToUpload: imageZip,
remotePath: '/${p.basename(imageZip.path)}' // Standard remote path
);
}
}
// Combine statuses from both uploads
return {
'statuses': <Map<String, dynamic>>[
...(ftpDataResult['statuses'] as List? ?? []), // Use null-aware spread
...(ftpImageResult['statuses'] as List? ?? []), // Use null-aware spread
],
};
}
/// Saves or updates the local log file and saves a record to the central DB log (Investigative).
Future<void> _logAndSave({
required RiverInvesManualSamplingData data, // Updated model type
required String status,
required String message,
required List<Map<String, dynamic>> apiResults,
required List<Map<String, dynamic>> ftpStatuses,
required String serverName,
String? logDirectory, // Can be null initially, gets populated on first save
}) async {
data.submissionStatus = status;
data.submissionMessage = message;
final baseFileName = _generateBaseFileName(data); // Use helper for consistent naming
// Prepare log data map using toMap()
final Map<String, dynamic> logMapData = data.toMap();
// Add submission metadata that might not be in toMap() or needs overriding
logMapData['submissionStatus'] = status;
logMapData['submissionMessage'] = message;
logMapData['reportId'] = data.reportId;
logMapData['serverConfigName'] = serverName;
// Store API/FTP results as JSON strings
logMapData['api_status'] = jsonEncode(apiResults); // Ensure apiResults is a list
logMapData['ftp_status'] = jsonEncode(ftpStatuses); // Ensure ftpStatuses is a list
String? savedLogPath = logDirectory;
// Save or Update local log file (data.json)
if (savedLogPath != null && savedLogPath.isNotEmpty) {
// Update existing log
logMapData['logDirectory'] = savedLogPath; // Ensure logDirectory path is in the map for update method
// *** MODIFIED: Use correct update method ***
await _localStorageService.updateRiverInvestigativeLog(logMapData); // NEW UPDATE METHOD
} else {
// Save new log and get the path
// *** MODIFIED: Use correct save method ***
savedLogPath = await _localStorageService.saveRiverInvestigativeSamplingData(data, serverName: serverName); // NEW SAVE METHOD
if (savedLogPath != null) {
logMapData['logDirectory'] = savedLogPath; // Add the new path for central log
} else {
debugPrint("Failed to save River Investigative log locally, central DB log might be incomplete.");
// Handle case where local save failed? Maybe skip central log or log with error?
}
}
// Save record to central DB log (submission_log table)
final imagePaths = data.toApiImageFiles().values.whereType<File>().map((f) => f.path).toList();
final centralLogData = {
'submission_id': data.reportId ?? baseFileName, // Use report ID or generated name as unique ID
// *** MODIFIED: Module and Type ***
'module': 'river', // Keep main module as 'river'
'type': 'Investigative', // Specific type
'status': status,
'message': message,
'report_id': data.reportId,
'created_at': DateTime.now().toIso8601String(),
'form_data': jsonEncode(logMapData), // Log the comprehensive map including paths and status
'image_data': jsonEncode(imagePaths), // Log original image paths used for submission attempt
'server_name': serverName,
'api_status': jsonEncode(apiResults), // Log API results
'ftp_status': jsonEncode(ftpStatuses), // Log FTP results
};
try {
await _dbHelper.saveSubmissionLog(centralLogData);
} catch (e) {
debugPrint("Error saving River Investigative submission log to DB: $e"); // Log context update
}
}
/// Handles sending or queuing the Telegram alert for River Investigative submissions.
Future<void> _handleSuccessAlert(RiverInvesManualSamplingData data, List<Map<String, dynamic>>? appSettings, {required bool isDataOnly, bool isSessionExpired = false}) async { // Updated model type
try {
final message = await _generateInvestigativeAlertMessage(data, isDataOnly: isDataOnly); // Call specific helper
// *** MODIFIED: Telegram key ***
final alertKey = 'river_investigative'; // Specific key for this module
if (isSessionExpired) {
debugPrint("Session is expired; queuing River Investigative Telegram alert directly for $alertKey."); // Log context update
await _telegramService.queueMessage(alertKey, message, appSettings);
} else {
final bool wasSent = await _telegramService.sendAlertImmediately(alertKey, message, appSettings);
if (!wasSent) {
// Fallback to queueing if immediate send fails
await _telegramService.queueMessage(alertKey, message, appSettings);
}
}
} catch (e) {
debugPrint("Failed to handle River Investigative Telegram alert: $e"); // Log context update
}
}
/// Generates the specific Telegram alert message content for River Investigative.
Future<String> _generateInvestigativeAlertMessage(RiverInvesManualSamplingData data, {required bool isDataOnly}) async { // Updated model type
final submissionType = isDataOnly ? "(Data Only)" : "(Data & Images)";
// Use helpers to get determined names/codes
final stationName = data.getDeterminedRiverName() ?? data.getDeterminedStationName() ?? 'N/A'; // Combine river/station name
final stationCode = data.getDeterminedStationCode() ?? 'N/A';
final submissionDate = data.samplingDate ?? DateFormat('yyyy-MM-dd').format(DateTime.now());
final submitter = data.firstSamplerName ?? 'N/A';
final sondeID = data.sondeId ?? 'N/A';
final distanceKm = data.distanceDifferenceInKm ?? 0;
final distanceMeters = (distanceKm * 1000).toStringAsFixed(0);
final distanceRemarks = data.distanceDifferenceRemarks ?? ''; // Default to empty string
final buffer = StringBuffer()
..writeln('✅ *River Investigative Sample ${submissionType} Submitted:*') // Updated title
..writeln();
// Adapt station info based on type
buffer.writeln('*Station Type:* ${data.stationTypeSelection ?? 'N/A'}');
if (data.stationTypeSelection == 'New Location') {
buffer.writeln('*New Location Name:* ${data.newStationName ?? 'N/A'}');
buffer.writeln('*New Location Code:* ${data.newStationCode ?? 'N/A'}');
buffer.writeln('*New Location State:* ${data.newStateName ?? 'N/A'}');
buffer.writeln('*New Location Basin:* ${data.newBasinName ?? 'N/A'}');
buffer.writeln('*New Location River:* ${data.newRiverName ?? 'N/A'}');
buffer.writeln('*Coordinates:* ${data.stationLatitude ?? 'N/A'}, ${data.stationLongitude ?? 'N/A'}');
} else {
buffer.writeln('*Station Name & Code:* $stationName ($stationCode)');
}
buffer
..writeln('*Date of Submitted:* $submissionDate')
..writeln('*Submitted by User:* $submitter')
..writeln('*Sonde ID:* $sondeID')
..writeln('*Status of Submission:* Successful');
// Include distance warning only if NOT a new location and distance > 50m
if (data.stationTypeSelection != 'New Location' && (distanceKm * 1000 > 50 || distanceRemarks.isNotEmpty)) {
buffer
..writeln()
..writeln('🔔 *Distance Alert:*')
..writeln('*Distance from station:* $distanceMeters meters');
if (distanceRemarks.isNotEmpty) {
buffer.writeln('*Remarks for distance:* $distanceRemarks');
}
}
// Add parameter limit check section (uses the same river limits)
final outOfBoundsAlert = await _getOutOfBoundsAlertSection(data); // Call helper
if (outOfBoundsAlert.isNotEmpty) {
buffer.write(outOfBoundsAlert);
}
return buffer.toString();
}
/// Helper to generate the parameter limit alert section for Telegram (River Investigative).
Future<String> _getOutOfBoundsAlertSection(RiverInvesManualSamplingData data) async { // Updated model type
// Define mapping from data model keys to parameter names used in limits table
// This mapping should be consistent with River In-Situ
const Map<String, String> _parameterKeyToLimitName = {
'oxygenConcentration': 'Oxygen Conc', 'oxygenSaturation': 'Oxygen Sat', 'ph': 'pH',
'salinity': 'Salinity', 'electricalConductivity': 'Conductivity', 'temperature': 'Temperature',
'tds': 'TDS', 'turbidity': 'Turbidity', 'ammonia': 'Ammonia', 'batteryVoltage': 'Battery',
};
// Load the same river parameter limits as In-Situ
final allLimits = await _dbHelper.loadRiverParameterLimits() ?? [];
if (allLimits.isEmpty) return ""; // No limits defined
// Get current readings from the investigative data model
final readings = {
'oxygenConcentration': data.oxygenConcentration, 'oxygenSaturation': data.oxygenSaturation,
'ph': data.ph, 'salinity': data.salinity, 'electricalConductivity': data.electricalConductivity,
'temperature': data.temperature, 'tds': data.tds, 'turbidity': data.turbidity,
'ammonia': data.ammonia, 'batteryVoltage': data.batteryVoltage,
};
final List<String> outOfBoundsMessages = [];
// Helper to parse limit values (copied from In-Situ)
double? parseLimitValue(dynamic value) {
if (value == null) return null;
if (value is num) return value.toDouble();
if (value is String) return double.tryParse(value);
return null;
}
// Iterate through readings and check against limits
readings.forEach((key, value) {
if (value == null || value == -999.0) return; // Skip missing/default values
final limitName = _parameterKeyToLimitName[key];
if (limitName == null) return; // Skip if parameter not in mapping
// Find the limit data for this parameter
final limitData = allLimits.firstWhere(
(l) => l['param_parameter_list'] == limitName,
orElse: () => <String, dynamic>{}, // Return empty map if not found
);
if (limitData.isNotEmpty) {
final lowerLimit = parseLimitValue(limitData['param_lower_limit']);
final upperLimit = parseLimitValue(limitData['param_upper_limit']);
bool isOutOfBounds = false;
// Check bounds
if (lowerLimit != null && value < lowerLimit) isOutOfBounds = true;
if (upperLimit != null && value > upperLimit) isOutOfBounds = true;
if (isOutOfBounds) {
// Format message for Telegram
final valueStr = value.toStringAsFixed(5);
final lowerStr = lowerLimit?.toStringAsFixed(5) ?? 'N/A';
final upperStr = upperLimit?.toStringAsFixed(5) ?? 'N/A';
outOfBoundsMessages.add('- *$limitName*: `$valueStr` (Limit: `$lowerStr` - `$upperStr`)');
}
}
});
// If no parameters were out of bounds, return empty string
if (outOfBoundsMessages.isEmpty) {
return "";
}
// Construct the alert section header and messages
final buffer = StringBuffer()
..writeln() // Add spacing
..writeln('⚠️ *Parameter Limit Alert:*')
..writeln('The following parameters were outside their defined limits:');
buffer.writeAll(outOfBoundsMessages, '\n'); // Add each message on a new line
return buffer.toString();
}
} // End of RiverInvestigativeSamplingService class