// lib/services/marine_investigative_sampling_service.dart import 'dart:async'; import 'dart:io'; import 'dart:convert'; import 'package:flutter/material.dart'; // Needed for debugPrint and BuildContext import 'package:flutter/services.dart'; import 'package:image_picker/image_picker.dart'; import 'package:path_provider/path_provider.dart'; import 'package:path/path.dart' as p; import 'package:image/image.dart' as img; import 'package:geolocator/geolocator.dart'; import 'package:permission_handler/permission_handler.dart'; import 'package:flutter_bluetooth_serial/flutter_bluetooth_serial.dart'; import 'package:usb_serial/usb_serial.dart'; import 'package:connectivity_plus/connectivity_plus.dart'; import 'package:intl/intl.dart'; // Import intl import '../auth_provider.dart'; import 'location_service.dart'; import '../models/marine_inves_manual_sampling_data.dart'; import '../bluetooth/bluetooth_manager.dart'; import '../serial/serial_manager.dart'; import 'local_storage_service.dart'; import 'server_config_service.dart'; import 'zipping_service.dart'; import 'submission_api_service.dart'; import 'submission_ftp_service.dart'; import 'telegram_service.dart'; import 'retry_service.dart'; import 'base_api_service.dart'; // Import for SessionExpiredException import 'api_service.dart'; // Import for DatabaseHelper import 'package:environment_monitoring_app/services/database_helper.dart'; import 'user_preferences_service.dart'; // ADDED /// A dedicated service for the Marine Investigative Sampling feature. class MarineInvestigativeSamplingService { // Business Logic Services final LocationService _locationService = LocationService(); final BluetoothManager _bluetoothManager = BluetoothManager(); final SerialManager _serialManager = SerialManager(); // Submission & Utility Services final SubmissionApiService _submissionApiService = SubmissionApiService(); final SubmissionFtpService _submissionFtpService = SubmissionFtpService(); final ZippingService _zippingService = ZippingService(); final LocalStorageService _localStorageService = LocalStorageService(); final ServerConfigService _serverConfigService = ServerConfigService(); final DatabaseHelper _dbHelper = DatabaseHelper(); final RetryService _retryService = RetryService(); final TelegramService _telegramService; final UserPreferencesService _userPreferencesService = UserPreferencesService(); // ADDED // --- START FIX: Activity Tracker --- bool _isDisposed = false; // --- END FIX --- MarineInvestigativeSamplingService(this._telegramService); static const platform = MethodChannel('com.example.environment_monitoring_app/usb'); // --- Location Services --- Future getCurrentLocation() => _locationService.getCurrentLocation(); double calculateDistance(double lat1, double lon1, double lat2, double lon2) => _locationService.calculateDistance(lat1, lon1, lat2, lon2); // --- Image Processing --- Future pickAndProcessImage(ImageSource source, { required MarineInvesManualSamplingData data, required String imageInfo, bool isRequired = false, }) async { final picker = ImagePicker(); final XFile? photo = await picker.pickImage(source: source, imageQuality: 85, maxWidth: 1024); // --- FIX: Check if photo is null or service is disposed --- if (photo == null || _isDisposed) return null; final bytes = await photo.readAsBytes(); img.Image? originalImage = img.decodeImage(bytes); if (originalImage == null) return null; // --- MODIFIED: Enforce landscape check logic from in-situ --- if (isRequired && originalImage.height > originalImage.width) { debugPrint("Image rejected: Must be in landscape orientation."); return null; // Return null if landscape is required and check fails } final String watermarkTimestamp = "${data.samplingDate} ${data.samplingTime}"; final font = img.arial24; final textWidth = watermarkTimestamp.length * 12; // Approximate width calculation // Ensure overlay box fits the text img.fillRect(originalImage, x1: 5, y1: 5, x2: textWidth + 15, y2: 35, color: img.ColorRgb8(255, 255, 255)); img.drawString(originalImage, watermarkTimestamp, font: font, x: 10, y: 10, color: img.ColorRgb8(0, 0, 0)); final tempDir = await getTemporaryDirectory(); String stationCode = 'NA'; if (data.stationTypeSelection == 'Existing Manual Station') { stationCode = data.selectedStation?['man_station_code'] ?? 'MANUAL_NA'; } else if (data.stationTypeSelection == 'Existing Tarball Station') { stationCode = data.selectedTarballStation?['tbl_station_code'] ?? 'TARBALL_NA'; } else if (data.stationTypeSelection == 'New Location') { stationCode = data.newStationCode ?? 'NEW_NA'; } final fileTimestamp = "${data.samplingDate}-${data.samplingTime}".replaceAll(':', '-'); final newFileName = "${stationCode}_${fileTimestamp}_${imageInfo.replaceAll(' ', '')}.jpg"; final filePath = p.join(tempDir.path, newFileName); File processedFile = File(filePath); await processedFile.writeAsBytes(img.encodeJpg(originalImage)); return processedFile; } // --- Device Connection (Delegated to Managers) --- ValueNotifier get bluetoothConnectionState => _bluetoothManager.connectionState; ValueNotifier get serialConnectionState => _serialManager.connectionState; ValueNotifier get sondeId => _bluetoothManager.connectionState.value != BluetoothConnectionState.disconnected ? _bluetoothManager.sondeId : _serialManager.sondeId; Stream> get bluetoothDataStream => _bluetoothManager.dataStream; Stream> get serialDataStream => _serialManager.dataStream; String? get connectedBluetoothDeviceName => _bluetoothManager.connectedDeviceName.value; String? get connectedSerialDeviceName => _serialManager.connectedDeviceName.value; // --- Permissions --- Future requestDevicePermissions() async { Map statuses = await [ Permission.bluetoothScan, Permission.bluetoothConnect, Permission.locationWhenInUse, ].request(); if (statuses[Permission.bluetoothScan] == PermissionStatus.granted && statuses[Permission.bluetoothConnect] == PermissionStatus.granted && statuses[Permission.locationWhenInUse] == PermissionStatus.granted) { return true; } else { debugPrint("Bluetooth or Location permissions denied."); return false; } } // --- Bluetooth Methods --- Future> getPairedBluetoothDevices() => _bluetoothManager.getPairedDevices(); Future connectToBluetoothDevice(BluetoothDevice device) => _bluetoothManager.connect(device); void disconnectFromBluetooth() => _bluetoothManager.disconnect(); void startBluetoothAutoReading({Duration? interval}) => _bluetoothManager.startAutoReading(interval: interval ?? const Duration(seconds: 2)); void stopBluetoothAutoReading() => _bluetoothManager.stopAutoReading(); // --- USB Serial Methods --- Future> getAvailableSerialDevices() => _serialManager.getAvailableDevices(); Future requestUsbPermission(UsbDevice device) async { try { final bool? granted = await platform.invokeMethod('requestUsbPermission', {'vid': device.vid, 'pid': device.pid}); return granted ?? false; } on PlatformException catch (e) { debugPrint("Failed to request USB permission: '${e.message}'."); return false; } } Future connectToSerialDevice(UsbDevice device) async { final bool permissionGranted = await requestUsbPermission(device); if (permissionGranted) { await _serialManager.connect(device); } else { throw Exception("USB permission was not granted."); } } // --- START FIX: Handle Thread Interruption during disconnect --- void disconnectFromSerial() { stopSerialAutoReading(); _serialManager.disconnect(); } // --- END FIX --- void startSerialAutoReading({Duration? interval}) => _serialManager.startAutoReading(interval: interval ?? const Duration(seconds: 2)); void stopSerialAutoReading() => _serialManager.stopAutoReading(); void dispose() { _isDisposed = true; // --- FIX: Track disposal --- _bluetoothManager.dispose(); _serialManager.dispose(); } // --- START: NEW HELPER METHOD --- /// Generates a unique timestamp ID from the sampling date and time. String _generateTimestampId(String? date, String? time) { final String dateTimeString = "${date ?? ''} ${time ?? ''}"; try { // Time format from model is HH:mm final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString); return samplingDateTime.millisecondsSinceEpoch.toString(); } catch (e) { // Fallback: if parsing fails, use the current time in milliseconds debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e"); return DateTime.now().millisecondsSinceEpoch.toString(); } } // --- END: NEW HELPER METHOD --- Future> submitInvestigativeSample({ required MarineInvesManualSamplingData data, required List>? appSettings, required AuthProvider authProvider, BuildContext? context, String? logDirectory, }) async { const String moduleName = 'marine_investigative'; // --- START: MODIFIED TO USE TIMESTAMP ID --- // Generate the unique timestamp ID and assign it immediately. final String timestampId = _generateTimestampId(data.samplingDate, data.samplingTime); data.reportId = timestampId; // This is the primary ID now. // --- END: MODIFIED TO USE TIMESTAMP ID --- final connectivityResult = await Connectivity().checkConnectivity(); bool isOnline = !connectivityResult.contains(ConnectivityResult.none); bool isOfflineSession = authProvider.isLoggedIn && (authProvider.profileData?['token']?.startsWith("offline-session-") ?? false); if (isOnline && isOfflineSession) { debugPrint("Investigative submission online during offline session. Attempting auto-relogin..."); try { final bool transitionSuccess = await authProvider.checkAndTransitionToOnlineSession(); if (transitionSuccess) { isOfflineSession = false; } else { isOnline = false; // Auto-relogin failed, treat as offline } } on SessionExpiredException catch (_) { debugPrint("Session expired during auto-relogin check. Treating as offline."); isOnline = false; } } if (isOnline && !isOfflineSession) { debugPrint("Proceeding with direct ONLINE Investigative submission..."); return await _performOnlineSubmission( data: data, appSettings: appSettings, moduleName: moduleName, authProvider: authProvider, logDirectory: logDirectory, ); } else { debugPrint("Proceeding with OFFLINE Investigative queuing mechanism..."); return await _performOfflineQueuing( data: data, moduleName: moduleName, logDirectory: logDirectory, // Pass for potential update ); } } Future> _performOnlineSubmission({ required MarineInvesManualSamplingData data, required List>? appSettings, required String moduleName, required AuthProvider authProvider, String? logDirectory, }) async { // --- START FIX: Capture the status before attempting submission --- final String? previousStatus = data.submissionStatus; // --- END FIX --- final serverName = (await _serverConfigService.getActiveApiConfig())?['config_name'] as String? ?? 'Default'; final imageFilesWithNulls = data.toApiImageFiles(); imageFilesWithNulls.removeWhere((key, value) => value == null); final Map finalImageFiles = imageFilesWithNulls.cast(); bool anyApiSuccess = false; Map apiDataResult = {}; Map apiImageResult = {}; String finalMessage = ''; String finalStatus = ''; bool isSessionKnownToBeExpired = false; // --- START: MODIFIED TO USE TIMESTAMP ID --- String? apiRecordId; // Will hold the DB ID (e.g., 102) from the server // data.reportId already contains the timestamp ID // --- END: MODIFIED TO USE TIMESTAMP ID --- // 1. Check module preferences for API final pref = await _userPreferencesService.getModulePreference(moduleName); bool isApiEnabled = pref?['is_api_enabled'] ?? true; bool isFtpEnabled = pref?['is_ftp_enabled'] ?? true; if (isApiEnabled) { try { // 1. Submit Form Data apiDataResult = await _submissionApiService.submitPost( moduleName: moduleName, endpoint: 'marine-investigative/sample', body: data.toApiFormData(), ); if (apiDataResult['success'] == true) { anyApiSuccess = true; // --- START: MODIFIED TO USE TIMESTAMP ID --- // Store the server's database ID in a separate variable. apiRecordId = apiDataResult['data']?['man_inves_id']?.toString(); // --- END: MODIFIED TO USE TIMESTAMP ID --- if (apiRecordId != null) { if (finalImageFiles.isNotEmpty) { // 2. Submit Images apiImageResult = await _submissionApiService.submitMultipart( moduleName: moduleName, endpoint: 'marine-investigative/images', // --- START: MODIFIED TO USE TIMESTAMP ID --- fields: {'man_inves_id': apiRecordId}, // Use server's ID // --- END: MODIFIED TO USE TIMESTAMP ID --- files: finalImageFiles, ); if (apiImageResult['success'] != true) { anyApiSuccess = false; // Mark as failed if images fail } } } else { anyApiSuccess = false; // --- START: MODIFIED TO USE TIMESTAMP ID --- apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; // --- END: MODIFIED TO USE TIMESTAMP ID --- } } } on SessionExpiredException catch (_) { debugPrint("Online submission failed due to session expiry that could not be refreshed."); isSessionKnownToBeExpired = true; // Mark session as expired anyApiSuccess = false; apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; // Manually queue the API call since SubmissionApiService was never called or failed internally due to session await _retryService.addApiToQueue(endpoint: 'marine-investigative/sample', method: 'POST', body: data.toApiFormData()); // --- START: MODIFIED TO USE TIMESTAMP ID --- if (finalImageFiles.isNotEmpty && apiRecordId != null) { // Also queue images if data call might have partially succeeded before expiry await _retryService.addApiToQueue(endpoint: 'marine-investigative/images', method: 'POST_MULTIPART', fields: {'man_inves_id': apiRecordId}, files: finalImageFiles); } // --- END: MODIFIED TO USE TIMESTAMP ID --- } } else { debugPrint("API submission disabled for $moduleName by user preference."); apiDataResult = {'success': true, 'message': 'API submission disabled by user.'}; anyApiSuccess = true; // Treated as success since it was intentional } // 3. Submit FTP Files Map ftpResults = {'statuses': []}; bool anyFtpSuccess = false; // --- START FIX: Check if FTP is enabled AND if it was already successful --- bool previousFtpSuccess = data.submissionStatus == 'L4' || data.submissionStatus == 'S4'; if (!isFtpEnabled) { debugPrint("FTP submission disabled for $moduleName by user preference. Skipping FTP."); ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'FTP disabled by user preference.', 'success': true}]}; anyFtpSuccess = true; } else if (previousFtpSuccess) { debugPrint("FTP submission skipped because it was already successful (Status: ${data.submissionStatus})."); ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'Already successful in previous attempt.', 'success': true}]}; anyFtpSuccess = true; } else { // Proceed with FTP logic only if enabled AND not previously successful if (isSessionKnownToBeExpired) { debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); // --- START: MODIFIED TO USE TIMESTAMP ID --- final baseFileNameForQueue = _generateBaseFileName(data); // --- END: MODIFIED TO USE TIMESTAMP ID --- final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; final dataZip = await _zippingService.createDataZip( jsonDataMap: {'db.json': jsonEncode(data.toDbJson())}, baseFileName: baseFileNameForQueue, destinationDir: null, // Use temp dir ); if (dataZip != null) { for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( localFilePath: dataZip.path, remotePath: '/${p.basename(dataZip.path)}', ftpConfigId: configId ); } } } if (finalImageFiles.isNotEmpty) { final imageZip = await _zippingService.createImageZip( imageFiles: finalImageFiles.values.toList(), baseFileName: baseFileNameForQueue, destinationDir: null, // Use temp dir ); if (imageZip != null) { for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( localFilePath: imageZip.path, remotePath: '/${p.basename(imageZip.path)}', ftpConfigId: configId ); } } } } ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; anyFtpSuccess = false; } else { // Session is OK, proceed with normal FTP attempt try { ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); } catch (e) { debugPrint("Unexpected FTP submission error: $e"); anyFtpSuccess = false; ftpResults = {'statuses': [{'status': 'Error', 'message': 'FTP process failed: $e.', 'success': false}]}; // Provide error status } } } // --- END FIX --- // 4. Determine Final Status final bool overallSuccess = anyApiSuccess || anyFtpSuccess; if (anyApiSuccess && anyFtpSuccess) { finalMessage = 'Data submitted successfully to all destinations.'; finalStatus = 'S4'; } else if (anyApiSuccess && !anyFtpSuccess) { finalMessage = 'Data sent to API, but some FTP uploads failed or were queued.'; finalStatus = 'S3'; } else if (!anyApiSuccess && anyFtpSuccess) { finalMessage = 'API submission failed and was queued, but files were sent to FTP successfully.'; finalStatus = 'L4'; } else { finalMessage = apiDataResult['message'] ?? 'All submission attempts failed and have been queued for retry.'; finalStatus = 'L1'; } // 5. Log Locally await _logAndSave( data: data, status: finalStatus, message: finalMessage, apiResults: [apiDataResult, apiImageResult], ftpStatuses: ftpResults['statuses'], serverName: serverName, finalImageFiles: finalImageFiles, // --- START: MODIFIED TO USE TIMESTAMP ID --- apiRecordId: apiRecordId, // Pass the server DB ID // --- END: MODIFIED TO USE TIMESTAMP ID --- logDirectory: logDirectory, ); // 6. Send Alert // --- START FIX: Check if log was already successful before sending alert --- final bool wasAlreadySuccessful = previousStatus == 'S4' || previousStatus == 'S3' || previousStatus == 'L4'; if (overallSuccess && !wasAlreadySuccessful) { // --- END FIX --- _handleInvestigativeSuccessAlert(data, appSettings, isDataOnly: finalImageFiles.isEmpty, isSessionExpired: isSessionKnownToBeExpired); } return {'success': overallSuccess, 'message': finalMessage, 'reportId': data.reportId}; // Return timestamp ID } Future> _performOfflineQueuing({ required MarineInvesManualSamplingData data, required String moduleName, String? logDirectory, // Pass for potential update }) async { final serverConfig = await _serverConfigService.getActiveApiConfig(); final serverName = serverConfig?['config_name'] as String? ?? 'Default'; data.submissionStatus = 'L1'; data.submissionMessage = 'Submission queued for later retry.'; String? savedLogPath = logDirectory; // Use existing path if provided if (savedLogPath != null && savedLogPath.isNotEmpty) { Map logUpdateData = data.toDbJson(); final imageFiles = data.toApiImageFiles(); imageFiles.forEach((key, file) { logUpdateData[key] = file?.path; // Add paths back }); logUpdateData['logDirectory'] = savedLogPath; await _localStorageService.updateInvestigativeLog(logUpdateData); debugPrint("Updated existing Investigative log for queuing: $savedLogPath"); } else { savedLogPath = await _localStorageService.saveInvestigativeSamplingData(data, serverName: serverName); debugPrint("Saved new Investigative log for queuing: $savedLogPath"); } if (savedLogPath == null) { const message = "Failed to save submission to local device storage."; // --- START: MODIFIED TO USE TIMESTAMP ID --- await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, apiRecordId: null, logDirectory: logDirectory); // --- END: MODIFIED TO USE TIMESTAMP ID --- return {'success': false, 'message': message}; } await _retryService.queueTask( type: 'investigative_submission', payload: { 'module': moduleName, 'localLogPath': savedLogPath, // Pass directory path 'serverConfig': serverConfig, }, ); const successMessage = "Device offline. Submission has been saved locally and queued for automatic retry when connection is restored."; return {'success': true, 'message': successMessage, 'reportId': data.reportId}; // Return timestamp ID } // --- START: MODIFIED _generateBaseFileName --- String _generateBaseFileName(MarineInvesManualSamplingData data) { String stationCode = 'NA'; if (data.stationTypeSelection == 'Existing Manual Station') { stationCode = data.selectedStation?['man_station_code'] ?? 'MANUAL_NA'; } else if (data.stationTypeSelection == 'Existing Tarball Station') { stationCode = data.selectedTarballStation?['tbl_station_code'] ?? 'TARBALL_NA'; } else if (data.stationTypeSelection == 'New Location') { stationCode = data.newStationCode ?? 'NEW_NA'; } // We now always use data.reportId, which we set as the timestamp. if (data.reportId == null || data.reportId!.isEmpty) { // This is a safety fallback, but should not happen. debugPrint("Warning: reportId is null in _generateBaseFileName. Using current timestamp."); return '${stationCode}_${DateTime.now().millisecondsSinceEpoch.toString()}'; } return '${stationCode}_${data.reportId}'; } // --- END: MODIFIED _generateBaseFileName --- Future> _generateAndUploadFtpFiles(MarineInvesManualSamplingData data, Map imageFiles, String serverName, String moduleName) async { final baseFileName = _generateBaseFileName(data); final Directory? logDirectory = await _localStorageService.getLogDirectory( serverName: serverName, module: 'marine', subModule: 'marine_investigative_sampling', ); // --- START: MODIFIED folderName (from in-situ) --- // Use baseFileName for the folder name to match [stationCode]_[reportId] final folderName = baseFileName; // --- END: MODIFIED folderName --- final Directory? localSubmissionDir = logDirectory != null ? Directory(p.join(logDirectory.path, folderName)) : null; if (localSubmissionDir != null && !await localSubmissionDir.exists()) { try { await localSubmissionDir.create(recursive: true); } catch (e) { debugPrint("Error creating local submission directory ${localSubmissionDir.path}: $e"); } } // --- START: MODIFIED createDataZip call (from in-situ) --- // This module does not have the extra JSON files, so we keep the single db.json final dataZip = await _zippingService.createDataZip( jsonDataMap: {'db.json': jsonEncode(data.toDbJson())}, baseFileName: baseFileName, destinationDir: localSubmissionDir, ); // --- END: MODIFIED createDataZip call --- Map ftpDataResult = {'success': true, 'statuses': []}; if (dataZip != null) { ftpDataResult = await _submissionFtpService.submit( moduleName: moduleName, fileToUpload: dataZip, remotePath: '/${p.basename(dataZip.path)}', ); } else { debugPrint("Data ZIP file was null, skipping FTP upload for data."); } final imageZip = await _zippingService.createImageZip( imageFiles: imageFiles.values.toList(), baseFileName: baseFileName, destinationDir: localSubmissionDir, ); Map ftpImageResult = {'success': true, 'statuses': []}; if (imageZip != null) { ftpImageResult = await _submissionFtpService.submit( moduleName: moduleName, fileToUpload: imageZip, remotePath: '/${p.basename(imageZip.path)}', ); } else { debugPrint("Image ZIP file was null, skipping FTP upload for images."); } return { 'statuses': >[ ...(ftpDataResult['statuses'] as List? ?? []), ...(ftpImageResult['statuses'] as List? ?? []), ], }; } Future _logAndSave({ required MarineInvesManualSamplingData data, required String status, required String message, required List> apiResults, required List> ftpStatuses, required String serverName, required Map finalImageFiles, // Use final images map // --- START: MODIFIED TO USE TIMESTAMP ID --- String? apiRecordId, // The server's DB ID // --- END: MODIFIED TO USE TIMESTAMP ID --- String? logDirectory, // Existing log directory path if updating }) async { data.submissionStatus = status; data.submissionMessage = message; final baseFileName = _generateBaseFileName(data); // This now uses the timestamp ID Map logMapData = data.toDbJson(); final imageFileMap = data.toApiImageFiles(); imageFileMap.forEach((key, file) { logMapData[key] = file?.path; // Store path or null }); logMapData['submissionStatus'] = status; logMapData['submissionMessage'] = message; // --- START: MODIFIED TO USE TIMESTAMP ID --- // data.reportId (the timestamp) is already in the map from toDbJson() logMapData['apiRecordId'] = apiRecordId; // Add the server DB ID // --- END: MODIFIED TO USE TIMESTAMP ID --- logMapData['serverConfigName'] = serverName; logMapData['api_status'] = jsonEncode(apiResults.where((r) => r.isNotEmpty).toList()); logMapData['ftp_status'] = jsonEncode(ftpStatuses); if (logDirectory != null && logDirectory.isNotEmpty) { logMapData['logDirectory'] = logDirectory; // Ensure path is in map await _localStorageService.updateInvestigativeLog(logMapData); // Use specific update } else { await _localStorageService.saveInvestigativeSamplingData(data, serverName: serverName); // Use specific save } final logData = { // --- START: MODIFIED TO USE TIMESTAMP ID --- 'submission_id': data.reportId ?? baseFileName, // Use timestamp ID // *** MODIFIED: Module and Type *** 'module': 'marine', // Keep main module as 'river' 'type': 'Investigative', // Specific type 'status': status, 'message': message, 'report_id': apiRecordId, // Use server DB ID // --- END: MODIFIED TO USE TIMESTAMP ID --- 'created_at': DateTime.now().toIso8601String(), 'form_data': jsonEncode(logMapData), // Log comprehensive map 'image_data': jsonEncode(finalImageFiles.values.map((f) => f.path).toList()), 'server_name': serverName, 'api_status': jsonEncode(apiResults), 'ftp_status': jsonEncode(ftpStatuses), }; try { await _dbHelper.saveSubmissionLog(logData); } catch (e) { debugPrint("Error saving Investigative submission log to DB: $e"); } } // --- START: MODIFIED ALERT HANDLER --- Future _handleInvestigativeSuccessAlert(MarineInvesManualSamplingData data, List>? appSettings, {required bool isDataOnly, bool isSessionExpired = false}) async { // This internal function generates the main message Future generateInvestigativeTelegramAlertMessage(MarineInvesManualSamplingData data, {required bool isDataOnly}) async { final submissionType = isDataOnly ? "(Data Only)" : "(Data & Images)"; String stationName = 'N/A'; String stationCode = 'N/A'; if (data.stationTypeSelection == 'Existing Manual Station') { stationName = data.selectedStation?['man_station_name'] ?? 'N/A'; stationCode = data.selectedStation?['man_station_code'] ?? 'N/A'; } else if (data.stationTypeSelection == 'Existing Tarball Station') { stationName = data.selectedTarballStation?['tbl_station_name'] ?? 'N/A'; stationCode = data.selectedTarballStation?['tbl_station_code'] ?? 'N/A'; } else if (data.stationTypeSelection == 'New Location') { stationName = data.newStationName ?? 'New Location'; stationCode = data.newStationCode ?? 'NEW'; } final submissionDate = data.samplingDate ?? DateFormat('yyyy-MM-dd').format(DateTime.now()); final submissionTime = data.samplingTime ?? DateFormat('HH:mm:ss').format(DateTime.now()); final submitter = data.firstSamplerName ?? 'N/A'; final buffer = StringBuffer() ..writeln('🕵️ *Marine Investigative Sample $submissionType Submitted:*') ..writeln() ..writeln('*Station Name & Code:* $stationName ($stationCode)') ..writeln('*Date & Time of Submission:* $submissionDate $submissionTime') ..writeln('*Submitted by User:* $submitter') ..writeln('*Sonde ID:* ${data.sondeId ?? "N/A"}') ..writeln('*Status of Submission:* Successful'); final distanceKm = data.distanceDifferenceInKm ?? 0; final distanceMeters = (distanceKm * 1000).toStringAsFixed(0); final distanceRemarks = data.distanceDifferenceRemarks ?? 'N/A'; if (distanceKm * 1000 > 50 || (distanceRemarks.isNotEmpty && distanceRemarks != 'N/A')) { buffer ..writeln() ..writeln('🔔 *Distance Alert:*') ..writeln('*Distance from station:* $distanceMeters meters (${distanceKm.toStringAsFixed(3)} KM)'); if (distanceRemarks.isNotEmpty && distanceRemarks != 'N/A') { buffer.writeln('*Remarks for distance:* $distanceRemarks'); } } // --- NEW: Add parameter limit checks to message --- // 1. Add station parameter limit check section final outOfBoundsAlert = await _getOutOfBoundsAlertSection(data); if (outOfBoundsAlert.isNotEmpty) { buffer.write(outOfBoundsAlert); } // 2. Add NPE parameter limit check section final npeAlert = await _getNpeAlertSection(data); if (npeAlert.isNotEmpty) { buffer.write(npeAlert); } // --- END NEW --- return buffer.toString(); } // --- End internal function --- try { // Call the internal function to build the message final message = await generateInvestigativeTelegramAlertMessage(data, isDataOnly: isDataOnly); final alertKey = 'marine_investigative'; if (isSessionExpired) { debugPrint("Session is expired; queuing Telegram alert directly for $alertKey."); await _telegramService.queueMessage(alertKey, message, appSettings); } else { final bool wasSent = await _telegramService.sendAlertImmediately(alertKey, message, appSettings); if (!wasSent) { await _telegramService.queueMessage(alertKey, message, appSettings); } } } catch (e) { debugPrint("Failed to handle Investigative Telegram alert: $e"); } } // --- NEW: Added from in-situ service --- /// Helper to generate the station-specific parameter limit alert section. Future _getOutOfBoundsAlertSection(MarineInvesManualSamplingData data) async { // Only check limits if it's a Manual Station if (data.stationTypeSelection != 'Existing Manual Station') { return ""; } const Map _parameterKeyToLimitName = { 'oxygenConcentration': 'Oxygen Conc', 'oxygenSaturation': 'Oxygen Sat', 'ph': 'pH', 'salinity': 'Salinity', 'electricalConductivity': 'Conductivity', 'temperature': 'Temperature', 'tds': 'TDS', 'turbidity': 'Turbidity', 'tss': 'TSS', 'batteryVoltage': 'Battery', }; final allLimits = await _dbHelper.loadMarineParameterLimits() ?? []; if (allLimits.isEmpty) return ""; // --- START FIX: Use correct key 'station_id' with fallback to 'man_station_id' --- final dynamic stationId = data.selectedStation?['station_id'] ?? data.selectedStation?['man_station_id']; // --- END FIX --- if (stationId == null) return ""; // Cannot check limits final readings = { 'oxygenConcentration': data.oxygenConcentration, 'oxygenSaturation': data.oxygenSaturation, 'ph': data.ph, 'salinity': data.salinity, 'electricalConductivity': data.electricalConductivity, 'temperature': data.temperature, 'tds': data.tds, 'turbidity': data.turbidity, 'tss': data.tss, 'batteryVoltage': data.batteryVoltage, }; final List outOfBoundsMessages = []; double? parseLimitValue(dynamic value) { if (value == null) return null; if (value is num) return value.toDouble(); if (value is String) return double.tryParse(value); return null; } readings.forEach((key, value) { if (value == null || value == -999.0) return; final limitName = _parameterKeyToLimitName[key]; if (limitName == null) return; // --- FIX: Ensure robust string-based ID comparison --- final limitData = allLimits.firstWhere( (l) => l['param_parameter_list'] == limitName && (l['station_id']?.toString() == stationId.toString() || l['man_station_id']?.toString() == stationId.toString()), orElse: () => {}, ); if (limitData.isNotEmpty) { final lowerLimit = parseLimitValue(limitData['param_lower_limit']); final upperLimit = parseLimitValue(limitData['param_upper_limit']); if ((lowerLimit != null && value < lowerLimit) || (upperLimit != null && value > upperLimit)) { final valueStr = value.toStringAsFixed(5); final lowerStr = lowerLimit?.toStringAsFixed(5) ?? 'N/A'; final upperStr = upperLimit?.toStringAsFixed(5) ?? 'N/A'; outOfBoundsMessages.add('- *$limitName*: `$valueStr` (Station Limit: `$lowerStr` - `$upperStr`)'); } } }); if (outOfBoundsMessages.isEmpty) { return ""; } final buffer = StringBuffer() ..writeln() ..writeln('⚠️ *Station Parameter Limit Alert:*') ..writeln('The following parameters were outside their defined station limits:'); buffer.writeAll(outOfBoundsMessages, '\n'); return buffer.toString(); } // --- NEW: Added from in-situ service --- /// Helper to generate the NPE parameter limit alert section. Future _getNpeAlertSection(MarineInvesManualSamplingData data) async { const Map _parameterKeyToLimitName = { 'oxygenConcentration': 'Oxygen Conc', 'oxygenSaturation': 'Oxygen Sat', 'ph': 'pH', 'salinity': 'Salinity', 'electricalConductivity': 'Conductivity', 'temperature': 'Temperature', 'tds': 'TDS', 'turbidity': 'Turbidity', 'tss': 'TSS', }; final npeLimits = await _dbHelper.loadNpeParameterLimits() ?? []; if (npeLimits.isEmpty) return ""; final readings = { 'oxygenConcentration': data.oxygenConcentration, 'oxygenSaturation': data.oxygenSaturation, 'ph': data.ph, 'salinity': data.salinity, 'electricalConductivity': data.electricalConductivity, 'temperature': data.temperature, 'tds': data.tds, 'turbidity': data.turbidity, 'tss': data.tss, }; final List npeMessages = []; double? parseLimitValue(dynamic value) { if (value == null) return null; if (value is num) return value.toDouble(); if (value is String) return double.tryParse(value); return null; } readings.forEach((key, value) { if (value == null || value == -999.0) return; final limitName = _parameterKeyToLimitName[key]; if (limitName == null) return; final limitData = npeLimits.firstWhere( (l) => l['param_parameter_list'] == limitName, orElse: () => {}, ); if (limitData.isNotEmpty) { final lowerLimit = parseLimitValue(limitData['param_lower_limit']); final upperLimit = parseLimitValue(limitData['param_upper_limit']); bool isHit = false; if (lowerLimit != null && upperLimit != null) { if (value >= lowerLimit && value <= upperLimit) isHit = true; } else if (lowerLimit != null && upperLimit == null) { if (value >= lowerLimit) isHit = true; } else if (upperLimit != null && lowerLimit == null) { if (value <= upperLimit) isHit = true; } if (isHit) { final valueStr = value.toStringAsFixed(5); final lowerStr = lowerLimit?.toStringAsFixed(5) ?? 'N/A'; final upperStr = upperLimit?.toStringAsFixed(5) ?? 'N/A'; String limitStr; if (lowerStr != 'N/A' && upperStr != 'N/A') { limitStr = '$lowerStr - $upperStr'; } else if (lowerStr != 'N/A') { limitStr = '>= $lowerStr'; } else { limitStr = '<= $upperStr'; } npeMessages.add('- *$limitName*: `$valueStr` (NPE Limit: `$limitStr`)'); } } }); if (npeMessages.isEmpty) { return ""; } final buffer = StringBuffer() ..writeln() ..writeln(' ') ..writeln('🚨 *Marine NPE Parameter Limit Detected:*') ..writeln('The following parameters triggered an NPE alert:'); buffer.writeAll(npeMessages, '\n'); return buffer.toString(); } }