// lib/services/river_investigative_sampling_service.dart import 'dart:async'; import 'dart:io'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:image_picker/image_picker.dart'; import 'package:path_provider/path_provider.dart'; import 'package:path/path.dart' as p; import 'package:image/image.dart' as img; import 'package:geolocator/geolocator.dart'; import 'package:permission_handler/permission_handler.dart'; import 'package:flutter_bluetooth_serial/flutter_bluetooth_serial.dart'; import 'package:usb_serial/usb_serial.dart'; import 'dart:convert'; import 'package:intl/intl.dart'; import 'package:connectivity_plus/connectivity_plus.dart'; import 'package:provider/provider.dart'; import '../auth_provider.dart'; import 'location_service.dart'; import '../models/river_inves_manual_sampling_data.dart'; import '../bluetooth/bluetooth_manager.dart'; import '../serial/serial_manager.dart'; import 'api_service.dart'; import 'package:environment_monitoring_app/services/database_helper.dart'; import 'local_storage_service.dart'; import 'server_config_service.dart'; import 'zipping_service.dart'; import 'submission_api_service.dart'; import 'submission_ftp_service.dart'; import 'telegram_service.dart'; import 'retry_service.dart'; import 'base_api_service.dart'; import 'user_preferences_service.dart'; // ADDED class RiverInvestigativeSamplingService { final LocationService _locationService = LocationService(); final BluetoothManager _bluetoothManager = BluetoothManager(); final SerialManager _serialManager = SerialManager(); final SubmissionApiService _submissionApiService = SubmissionApiService(); final SubmissionFtpService _submissionFtpService = SubmissionFtpService(); final DatabaseHelper _dbHelper = DatabaseHelper(); final LocalStorageService _localStorageService = LocalStorageService(); final ServerConfigService _serverConfigService = ServerConfigService(); final ZippingService _zippingService = ZippingService(); final RetryService _retryService = RetryService(); final TelegramService _telegramService; final UserPreferencesService _userPreferencesService = UserPreferencesService(); // ADDED final ImagePicker _picker = ImagePicker(); static const platform = MethodChannel('com.example.environment_monitoring_app/usb'); RiverInvestigativeSamplingService(this._telegramService); Future getCurrentLocation() => _locationService.getCurrentLocation(); double calculateDistance(double lat1, double lon1, double lat2, double lon2) => _locationService.calculateDistance(lat1, lon1, lat2, lon2); Future pickAndProcessImage(ImageSource source, { required RiverInvesManualSamplingData data, required String imageInfo, bool isRequired = false, String? stationCode}) async { try { final XFile? pickedFile = await _picker.pickImage( source: source, imageQuality: 85, maxWidth: 1024, ); if (pickedFile == null) { return null; } final bytes = await pickedFile.readAsBytes(); img.Image? originalImage = img.decodeImage(bytes); if (originalImage == null) { return null; } if (originalImage.height > originalImage.width) { debugPrint("Image rejected: Must be in landscape orientation."); return null; } final String watermarkTimestamp = "${data.samplingDate} ${data.samplingTime}"; final font = img.arial24; final textWidth = watermarkTimestamp.length * 12; img.fillRect(originalImage, x1: 5, y1: 5, x2: textWidth + 15, y2: 35, color: img.ColorRgb8(255, 255, 255)); img.drawString(originalImage, watermarkTimestamp, font: font, x: 10, y: 10, color: img.ColorRgb8(0, 0, 0)); final tempDir = await getTemporaryDirectory(); final finalStationCode = stationCode ?? 'NA'; final fileTimestamp = "${data.samplingDate}-${data.samplingTime}".replaceAll(':', '-'); final newFileName = "${finalStationCode}_${fileTimestamp}_${imageInfo.replaceAll(' ', '')}.jpg"; final filePath = p.join(tempDir.path, newFileName); return File(filePath)..writeAsBytesSync(img.encodeJpg(originalImage)); } catch (e) { debugPrint('Error in pickAndProcessImage (River Investigative): $e'); return null; } } ValueNotifier get bluetoothConnectionState => _bluetoothManager.connectionState; ValueNotifier get serialConnectionState => _serialManager.connectionState; ValueNotifier get sondeId { if (_bluetoothManager.connectionState.value != BluetoothConnectionState.disconnected) { return _bluetoothManager.sondeId; } return _serialManager.sondeId; } Stream> get bluetoothDataStream => _bluetoothManager.dataStream; Stream> get serialDataStream => _serialManager.dataStream; String? get connectedBluetoothDeviceName => _bluetoothManager.connectedDeviceName.value; String? get connectedSerialDeviceName => _serialManager.connectedDeviceName.value; Future requestDevicePermissions() async { Map statuses = await [ Permission.bluetoothScan, Permission.bluetoothConnect, Permission.locationWhenInUse, ].request(); if (statuses[Permission.bluetoothScan] == PermissionStatus.granted && statuses[Permission.bluetoothConnect] == PermissionStatus.granted && statuses[Permission.locationWhenInUse] == PermissionStatus.granted) { return true; } else { return false; } } Future> getPairedBluetoothDevices() => _bluetoothManager.getPairedDevices(); Future connectToBluetoothDevice(BluetoothDevice device) => _bluetoothManager.connect(device); void disconnectFromBluetooth() => _bluetoothManager.disconnect(); void startBluetoothAutoReading({Duration? interval}) => _bluetoothManager.startAutoReading(interval: interval ?? const Duration(seconds: 2)); void stopBluetoothAutoReading() => _bluetoothManager.stopAutoReading(); Future> getAvailableSerialDevices() => _serialManager.getAvailableDevices(); Future requestUsbPermission(UsbDevice device) async { try { return await platform.invokeMethod('requestUsbPermission', {'vid': device.vid, 'pid': device.pid}) ?? false; } on PlatformException catch (e) { debugPrint("Failed to request USB permission: '${e.message}'."); return false; } } Future connectToSerialDevice(UsbDevice device) async { final bool permissionGranted = await requestUsbPermission(device); if (permissionGranted) { await _serialManager.connect(device); } else { throw Exception("USB permission was not granted."); } } void disconnectFromSerial() => _serialManager.disconnect(); void startSerialAutoReading({Duration? interval}) => _serialManager.startAutoReading(interval: interval ?? const Duration(seconds: 2)); void stopSerialAutoReading() => _serialManager.stopAutoReading(); void dispose() { _bluetoothManager.dispose(); _serialManager.dispose(); } String _generateTimestampId(String? date, String? time) { final String dateTimeString = "${date ?? ''} ${time ?? ''}"; try { final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString); return samplingDateTime.millisecondsSinceEpoch.toString(); } catch (e) { debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e"); return DateTime.now().millisecondsSinceEpoch.toString(); } } Future> submitData({ required RiverInvesManualSamplingData data, required List>? appSettings, required AuthProvider authProvider, String? logDirectory, }) async { const String moduleName = 'river_investigative'; final String timestampId = _generateTimestampId(data.samplingDate, data.samplingTime); data.reportId = timestampId; final connectivityResult = await Connectivity().checkConnectivity(); bool isOnline = !connectivityResult.contains(ConnectivityResult.none); bool isOfflineSession = authProvider.isLoggedIn && (authProvider.profileData?['token']?.startsWith("offline-session-") ?? false); if (isOnline && isOfflineSession) { debugPrint("River Investigative submission online during offline session. Attempting auto-relogin..."); try { final bool transitionSuccess = await authProvider.checkAndTransitionToOnlineSession(); if (transitionSuccess) { isOfflineSession = false; } else { isOnline = false; } } on SessionExpiredException catch (_) { debugPrint("Session expired during auto-relogin check. Treating as offline."); isOnline = false; } } if (isOnline && !isOfflineSession) { debugPrint("Proceeding with direct ONLINE River Investigative submission..."); return await _performOnlineSubmission( data: data, appSettings: appSettings, moduleName: moduleName, authProvider: authProvider, logDirectory: logDirectory, ); } else { debugPrint("Proceeding with OFFLINE River Investigative queuing mechanism..."); return await _performOfflineQueuing( data: data, moduleName: moduleName, logDirectory: logDirectory, ); } } Future> _performOnlineSubmission({ required RiverInvesManualSamplingData data, required List>? appSettings, required String moduleName, required AuthProvider authProvider, String? logDirectory, }) async { final serverName = (await _serverConfigService.getActiveApiConfig())?['config_name'] as String? ?? 'Default'; final imageFilesWithNulls = data.toApiImageFiles(); imageFilesWithNulls.removeWhere((key, value) => value == null); final Map finalImageFiles = imageFilesWithNulls.cast(); bool anyApiSuccess = false; Map apiDataResult = {}; Map apiImageResult = {}; String finalMessage = ''; String finalStatus = ''; bool isSessionKnownToBeExpired = false; String? apiRecordId; // 1. Check module preferences for API final pref = await _userPreferencesService.getModulePreference(moduleName); bool isApiEnabled = pref?['is_api_enabled'] ?? true; bool isFtpEnabled = pref?['is_ftp_enabled'] ?? true; if (isApiEnabled) { try { // 1. Submit Form Data apiDataResult = await _submissionApiService.submitPost( moduleName: moduleName, endpoint: 'river/investigative/sample', body: data.toApiFormData(), ); if (apiDataResult['success'] == true) { anyApiSuccess = true; apiRecordId = apiDataResult['data']?['r_inves_id']?.toString(); if (apiRecordId != null) { if (finalImageFiles.isNotEmpty) { // 2. Submit Images apiImageResult = await _submissionApiService.submitMultipart( moduleName: moduleName, endpoint: 'river/investigative/images', fields: {'r_inves_id': apiRecordId}, files: finalImageFiles, ); if (apiImageResult['success'] != true) { anyApiSuccess = false; } } } else { anyApiSuccess = false; apiDataResult['success'] = false; apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; } } } on SessionExpiredException catch (_) { debugPrint("Online River Investigative submission failed due to session expiry that could not be refreshed."); isSessionKnownToBeExpired = true; anyApiSuccess = false; apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; // Manually queue API calls await _retryService.addApiToQueue(endpoint: 'river/investigative/sample', method: 'POST', body: data.toApiFormData()); if (finalImageFiles.isNotEmpty && apiRecordId != null) { await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {'r_inves_id': apiRecordId}, files: finalImageFiles); } else if (finalImageFiles.isNotEmpty && apiRecordId == null) { debugPrint("Queueing investigative images without report ID due to session expiry during data submission."); await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {}, files: finalImageFiles); } } } else { debugPrint("API submission disabled for $moduleName by user preference."); apiDataResult = {'success': true, 'message': 'API submission disabled by user.'}; anyApiSuccess = true; // Treated as success since it was intentional } // 3. Submit FTP Files Map ftpResults = {'statuses': []}; bool anyFtpSuccess = false; // --- START FIX: Check if FTP is enabled AND if it was already successful --- // 'L4' status means API Failed but FTP Succeeded. If re-submitting an L4 record, we skip FTP. // 'S4' means everything succeeded. bool previousFtpSuccess = data.submissionStatus == 'L4' || data.submissionStatus == 'S4'; if (!isFtpEnabled) { debugPrint("FTP submission disabled for $moduleName by user preference. Skipping FTP."); ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'FTP disabled by user preference.', 'success': true}]}; anyFtpSuccess = true; } else if (previousFtpSuccess) { debugPrint("FTP submission skipped because it was already successful (Status: ${data.submissionStatus})."); ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'Already successful in previous attempt.', 'success': true}]}; anyFtpSuccess = true; } else { // Proceed with FTP logic only if enabled AND not previously successful if (isSessionKnownToBeExpired) { debugPrint("Skipping FTP attempt for River Investigative due to known expired session. Manually queuing FTP tasks."); final baseFileNameForQueue = _generateBaseFileName(data); final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; final dataZip = await _zippingService.createDataZip( jsonDataMap: { 'db.json': data.toDbJson(), 'river_inves_basic_form.json': data.toBasicFormJson(), 'river_inves_reading.json': data.toReadingJson(), 'river_inves_manual_info.json': data.toManualInfoJson(), }, baseFileName: baseFileNameForQueue, destinationDir: null, ); if (dataZip != null) { for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( localFilePath: dataZip.path, remotePath: '/${p.basename(dataZip.path)}', ftpConfigId: configId ); } } } if (finalImageFiles.isNotEmpty) { final Map retryImages = {}; final String dateStr = (data.samplingDate ?? '').replaceAll('-', ''); final String timeStr = (data.samplingTime ?? '').replaceAll(':', ''); final String zipImageTimestamp = "$dateStr$timeStr"; void addRetryMap(File? file, String prefix) { if(file != null) retryImages['${prefix}_$zipImageTimestamp.jpg'] = file; } addRetryMap(data.backgroundStationImage, 'background'); addRetryMap(data.upstreamRiverImage, 'upstream'); addRetryMap(data.downstreamRiverImage, 'downstream'); addRetryMap(data.sampleTurbidityImage, 'sample_turbidity'); addRetryMap(data.optionalImage1, 'optional_1'); addRetryMap(data.optionalImage2, 'optional_2'); addRetryMap(data.optionalImage3, 'optional_3'); addRetryMap(data.optionalImage4, 'optional_4'); final retryImageZip = await _zippingService.createRenamedImageZip( imageFiles: retryImages, baseFileName: baseFileNameForQueue, destinationDir: null, ); if (retryImageZip != null) { for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( localFilePath: retryImageZip.path, remotePath: '/${p.basename(retryImageZip.path)}', ftpConfigId: configId ); } } } } ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; anyFtpSuccess = false; } else { try { ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); } catch (e) { debugPrint("Unexpected River Investigative FTP submission error: $e"); anyFtpSuccess = false; ftpResults = {'statuses': [{'status': 'Error', 'message': 'FTP process failed: $e.', 'success': false}]}; } } } // --- END FIX --- // 4. Determine Final Status final bool overallSuccess = anyApiSuccess || anyFtpSuccess; if (anyApiSuccess && anyFtpSuccess) { finalMessage = 'Data submitted successfully to all destinations.'; finalStatus = 'S4'; } else if (anyApiSuccess && !anyFtpSuccess) { finalMessage = 'Data sent to API, but some FTP uploads failed or were queued.'; finalStatus = 'S3'; } else if (!anyApiSuccess && anyFtpSuccess) { finalMessage = 'API submission failed and was queued, but files were sent to FTP successfully.'; finalStatus = 'L4'; } else { finalMessage = apiDataResult['message'] ?? 'All submission attempts failed and have been queued for retry.'; finalStatus = 'L1'; } // 5. Log Locally await _logAndSave( data: data, status: finalStatus, message: finalMessage, apiResults: [apiDataResult, apiImageResult].where((r) => r.isNotEmpty).toList(), ftpStatuses: ftpResults['statuses'] ?? [], serverName: serverName, apiRecordId: apiRecordId, logDirectory: logDirectory, ); // 6. Send Alert if (overallSuccess) { _handleSuccessAlert(data, appSettings, isDataOnly: finalImageFiles.isEmpty, isSessionExpired: isSessionKnownToBeExpired); } return { 'status': finalStatus, 'success': overallSuccess, 'message': finalMessage, 'reportId': data.reportId }; } Future> _performOfflineQueuing({ required RiverInvesManualSamplingData data, required String moduleName, String? logDirectory, }) async { final serverConfig = await _serverConfigService.getActiveApiConfig(); final serverName = serverConfig?['config_name'] as String? ?? 'Default'; data.submissionStatus = 'Queued'; data.submissionMessage = 'Submission queued for later retry.'; String? savedLogPath = logDirectory; if (savedLogPath != null && savedLogPath.isNotEmpty) { await _localStorageService.updateRiverInvestigativeLog(data.toMap()..['logDirectory'] = savedLogPath); debugPrint("Updated existing River Investigative log for queuing: $savedLogPath"); } else { savedLogPath = await _localStorageService.saveRiverInvestigativeSamplingData(data, serverName: serverName); debugPrint("Saved new River Investigative log for queuing: $savedLogPath"); } if (savedLogPath == null) { const message = "Failed to save River Investigative submission to local device storage."; await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: logDirectory); return {'status': 'Error', 'success': false, 'message': message}; } await _retryService.queueTask( type: 'river_investigative_submission', payload: { 'module': moduleName, 'localLogPath': p.join(savedLogPath, 'data.json'), 'serverConfig': serverConfig, }, ); const successMessage = "Device offline. River Investigative submission has been saved locally and queued for automatic retry when connection is restored."; data.submissionStatus = 'L1'; data.submissionMessage = successMessage; await _logAndSave(data: data, status: 'L1', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: savedLogPath); return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': data.reportId}; } String _generateBaseFileName(RiverInvesManualSamplingData data) { final stationCode = data.getDeterminedStationCode() ?? 'UNKNOWN'; if (data.reportId == null || data.reportId!.isEmpty) { debugPrint("Warning: reportId is null in _generateBaseFileName. Using current timestamp."); return '${stationCode}_${DateTime.now().millisecondsSinceEpoch.toString()}'; } return "${stationCode}_${data.reportId}"; } Future> _generateAndUploadFtpFiles(RiverInvesManualSamplingData data, Map imageFiles, String serverName, String moduleName) async { final String dateStr = (data.samplingDate ?? '').replaceAll('-', ''); final String timeStr = (data.samplingTime ?? '').replaceAll(':', ''); final String zipImageTimestamp = "$dateStr$timeStr"; final baseFileName = _generateBaseFileName(data); final Directory? logDirectory = await _localStorageService.getRiverInvestigativeBaseDir(serverName: serverName); final Directory? localSubmissionDir = logDirectory != null ? Directory(p.join(logDirectory.path, baseFileName)) : null; if (localSubmissionDir != null && !await localSubmissionDir.exists()) { await localSubmissionDir.create(recursive: true); } final dataZip = await _zippingService.createDataZip( jsonDataMap: { 'db.json': data.toDbJson(), 'river_inves_basic_form.json': data.toBasicFormJson(), 'river_inves_reading.json': data.toReadingJson(), 'river_inves_manual_info.json': data.toManualInfoJson(), }, baseFileName: baseFileName, destinationDir: localSubmissionDir, ); Map ftpDataResult = {'success': true, 'statuses': []}; if (dataZip != null) { ftpDataResult = await _submissionFtpService.submit( moduleName: moduleName, fileToUpload: dataZip, remotePath: '/${p.basename(dataZip.path)}' ); } Map ftpImageResult = {'success': true, 'statuses': []}; final Map imagesForZip = {}; void mapImage(File? file, String prefix) { if (file != null && file.existsSync()) { imagesForZip['${prefix}_$zipImageTimestamp.jpg'] = file; } } mapImage(data.backgroundStationImage, 'background'); mapImage(data.upstreamRiverImage, 'upstream'); mapImage(data.downstreamRiverImage, 'downstream'); mapImage(data.sampleTurbidityImage, 'turbidity'); mapImage(data.optionalImage1, 'optional_1'); mapImage(data.optionalImage2, 'optional_2'); mapImage(data.optionalImage3, 'optional_3'); mapImage(data.optionalImage4, 'optional_4'); if (imagesForZip.isNotEmpty) { final imageZip = await _zippingService.createRenamedImageZip( imageFiles: imagesForZip, baseFileName: baseFileName, destinationDir: localSubmissionDir, ); if (imageZip != null) { ftpImageResult = await _submissionFtpService.submit( moduleName: moduleName, fileToUpload: imageZip, remotePath: '/${p.basename(imageZip.path)}' ); } } return { 'statuses': >[ ...(ftpDataResult['statuses'] as List? ?? []), ...(ftpImageResult['statuses'] as List? ?? []), ], }; } Future _logAndSave({ required RiverInvesManualSamplingData data, required String status, required String message, required List> apiResults, required List> ftpStatuses, required String serverName, String? apiRecordId, String? logDirectory, }) async { data.submissionStatus = status; data.submissionMessage = message; final baseFileName = _generateBaseFileName(data); final Map logMapData = data.toMap(); logMapData['submissionStatus'] = status; logMapData['submissionMessage'] = message; logMapData['apiRecordId'] = apiRecordId; logMapData['serverConfigName'] = serverName; logMapData['api_status'] = jsonEncode(apiResults); logMapData['ftp_status'] = jsonEncode(ftpStatuses); String? savedLogPath = logDirectory; if (savedLogPath != null && savedLogPath.isNotEmpty) { logMapData['logDirectory'] = savedLogPath; await _localStorageService.updateRiverInvestigativeLog(logMapData); } else { savedLogPath = await _localStorageService.saveRiverInvestigativeSamplingData(data, serverName: serverName); if (savedLogPath != null) { logMapData['logDirectory'] = savedLogPath; } else { debugPrint("Failed to save River Investigative log locally, central DB log might be incomplete."); } } final imagePaths = data.toApiImageFiles().values.whereType().map((f) => f.path).toList(); final centralLogData = { 'submission_id': data.reportId ?? baseFileName, 'module': 'river', 'type': 'Investigative', 'status': status, 'message': message, 'report_id': apiRecordId, 'created_at': DateTime.now().toIso8601String(), 'form_data': jsonEncode(logMapData), 'image_data': jsonEncode(imagePaths), 'server_name': serverName, 'api_status': jsonEncode(apiResults), 'ftp_status': jsonEncode(ftpStatuses), }; try { await _dbHelper.saveSubmissionLog(centralLogData); } catch (e) { debugPrint("Error saving River Investigative submission log to DB: $e"); } } Future _handleSuccessAlert(RiverInvesManualSamplingData data, List>? appSettings, {required bool isDataOnly, bool isSessionExpired = false}) async { try { final message = await _generateSuccessAlertMessage(data, isDataOnly: isDataOnly); final alertKey = 'river_investigative'; if (isSessionExpired) { debugPrint("Session is expired; queuing River Investigative Telegram alert directly for $alertKey."); await _telegramService.queueMessage(alertKey, message, appSettings); } else { final bool wasSent = await _telegramService.sendAlertImmediately(alertKey, message, appSettings); if (!wasSent) { await _telegramService.queueMessage(alertKey, message, appSettings); } } } catch (e) { debugPrint("Failed to handle River Investigative Telegram alert: $e"); } } Future _generateSuccessAlertMessage(RiverInvesManualSamplingData data, {required bool isDataOnly}) async { final submissionType = isDataOnly ? "(Data Only)" : "(Data & Images)"; final stationName = data.getDeterminedRiverName() ?? data.getDeterminedStationName() ?? 'N/A'; final stationCode = data.getDeterminedStationCode() ?? 'N/A'; final submissionDate = data.samplingDate ?? DateFormat('yyyy-MM-dd').format(DateTime.now()); final submitter = data.firstSamplerName ?? 'N/A'; final sondeID = data.sondeId ?? 'N/A'; final distanceKm = data.distanceDifferenceInKm ?? 0; final distanceMeters = (distanceKm * 1000).toStringAsFixed(0); final distanceRemarks = data.distanceDifferenceRemarks ?? ''; final buffer = StringBuffer() ..writeln('✅ *River Investigative Sample ${submissionType} Submitted:*') ..writeln(); buffer.writeln('*Station Type:* ${data.stationTypeSelection ?? 'N/A'}'); if (data.stationTypeSelection == 'New Location') { buffer.writeln('*New Location Name:* ${data.newStationName ?? 'N/A'}'); buffer.writeln('*New Location Code:* ${data.newStationCode ?? 'N/A'}'); buffer.writeln('*New Location State:* ${data.newStateName ?? 'N/A'}'); buffer.writeln('*New Location Basin:* ${data.newBasinName ?? 'N/A'}'); buffer.writeln('*New Location River:* ${data.newRiverName ?? 'N/A'}'); buffer.writeln('*Coordinates:* ${data.stationLatitude ?? 'N/A'}, ${data.stationLongitude ?? 'N/A'}'); } else { buffer.writeln('*Station Name & Code:* $stationName ($stationCode)'); } buffer ..writeln('*Date of Submitted:* $submissionDate') ..writeln('*Submitted by User:* $submitter') ..writeln('*Sonde ID:* $sondeID') ..writeln('*Status of Submission:* Successful'); if (data.stationTypeSelection != 'New Location' && (distanceKm * 1000 > 50 || distanceRemarks.isNotEmpty)) { buffer ..writeln() ..writeln('🔔 *Distance Alert:*') ..writeln('*Distance from station:* $distanceMeters meters'); if (distanceRemarks.isNotEmpty) { buffer.writeln('*Remarks for distance:* $distanceRemarks'); } } final outOfBoundsAlert = await _getOutOfBoundsAlertSection(data); if (outOfBoundsAlert.isNotEmpty) { buffer.write(outOfBoundsAlert); } return buffer.toString(); } Future _getOutOfBoundsAlertSection(RiverInvesManualSamplingData data) async { const Map _parameterKeyToLimitName = { 'oxygenConcentration': 'Oxygen Conc', 'oxygenSaturation': 'Oxygen Sat', 'ph': 'pH', 'salinity': 'Salinity', 'electricalConductivity': 'Conductivity', 'temperature': 'Temperature', 'tds': 'TDS', 'turbidity': 'Turbidity', 'ammonia': 'Ammonia', 'batteryVoltage': 'Battery', }; final allLimits = await _dbHelper.loadRiverParameterLimits() ?? []; if (allLimits.isEmpty) return ""; final readings = { 'oxygenConcentration': data.oxygenConcentration, 'oxygenSaturation': data.oxygenSaturation, 'ph': data.ph, 'salinity': data.salinity, 'electricalConductivity': data.electricalConductivity, 'temperature': data.temperature, 'tds': data.tds, 'turbidity': data.turbidity, 'ammonia': data.ammonia, 'batteryVoltage': data.batteryVoltage, }; final List outOfBoundsMessages = []; double? parseLimitValue(dynamic value) { if (value == null) return null; if (value is num) return value.toDouble(); if (value is String) return double.tryParse(value); return null; } readings.forEach((key, value) { if (value == null || value == -999.0) return; final limitName = _parameterKeyToLimitName[key]; if (limitName == null) return; final limitData = allLimits.firstWhere( (l) => l['param_parameter_list'] == limitName, orElse: () => {}, ); if (limitData.isNotEmpty) { final lowerLimit = parseLimitValue(limitData['param_lower_limit']); final upperLimit = parseLimitValue(limitData['param_upper_limit']); bool isOutOfBounds = false; if (lowerLimit != null && value < lowerLimit) isOutOfBounds = true; if (upperLimit != null && value > upperLimit) isOutOfBounds = true; if (isOutOfBounds) { final valueStr = value.toStringAsFixed(5); final lowerStr = lowerLimit?.toStringAsFixed(5) ?? 'N/A'; final upperStr = upperLimit?.toStringAsFixed(5) ?? 'N/A'; outOfBoundsMessages.add('- *$limitName*: `$valueStr` (Limit: `$lowerStr` - `$upperStr`)'); } } }); if (outOfBoundsMessages.isEmpty) { return ""; } final buffer = StringBuffer() ..writeln() ..writeln('⚠️ *Parameter Limit Alert:*') ..writeln('The following parameters were outside their defined limits:'); buffer.writeAll(outOfBoundsMessages, '\n'); return buffer.toString(); } }