diff --git a/lib/models/river_inves_manual_sampling_data.dart b/lib/models/river_inves_manual_sampling_data.dart index 901aa90..52d9164 100644 --- a/lib/models/river_inves_manual_sampling_data.dart +++ b/lib/models/river_inves_manual_sampling_data.dart @@ -329,69 +329,71 @@ class RiverInvesManualSamplingData { // Sampler & Time Info (Assuming same API keys as manual) add('first_sampler_user_id', firstSamplerUserId); - add('r_inv_second_sampler_id', secondSampler?['user_id']); // Prefixed inv? - add('r_inv_date', samplingDate); - add('r_inv_time', samplingTime); - add('r_inv_type', samplingType); // Should be 'Investigative' - add('r_inv_sample_id_code', sampleIdCode); + // *** FIX: Changed 'r_inv_' to 'r_inves_' to match API *** + add('r_inves_second_sampler_id', secondSampler?['user_id']); + add('r_inves_date', samplingDate); + add('r_inves_time', samplingTime); + add('r_inves_type', samplingType); + add('r_inves_sample_id_code', sampleIdCode); // Station Info (Conditional) - add('r_inv_station_type', stationTypeSelection); + add('r_inves_station_type', stationTypeSelection); if (stationTypeSelection == 'Existing Manual Station') { add('station_id', selectedStation?['station_id']); // Assuming API wants the numeric ID - add('r_inv_station_code', selectedStation?['sampling_station_code']); // Add code for display/logging if needed + add('r_inves_station_code', selectedStation?['sampling_station_code']); // Add code for display/logging if needed } else if (stationTypeSelection == 'Existing Triennial Station') { add('triennial_station_id', selectedTriennialStation?['station_id']); // Assuming a different key - add('r_inv_station_code', selectedTriennialStation?['triennial_station_code']); + add('r_inves_station_code', selectedTriennialStation?['triennial_station_code']); } else if (stationTypeSelection == 'New Location') { - add('r_inv_new_state_name', newStateName); - add('r_inv_new_basin_name', newBasinName); - add('r_inv_new_river_name', newRiverName); - add('r_inv_new_station_name', newStationName); // Include newStationName - add('r_inv_new_station_code', newStationCode); // Optional code - add('r_inv_station_latitude', stationLatitude); // Use the captured/entered lat/lon - add('r_inv_station_longitude', stationLongitude); + add('r_inves_new_state_name', newStateName); + add('r_inves_new_basin_name', newBasinName); + add('r_inves_new_river_name', newRiverName); + add('r_inves_new_station_name', newStationName); // Include newStationName + add('r_inves_new_station_code', newStationCode); // Optional code + add('r_inves_station_latitude', stationLatitude); // Use the captured/entered lat/lon + add('r_inves_station_longitude', stationLongitude); } // Location Verification (Assuming same keys) - add('r_inv_current_latitude', currentLatitude); - add('r_inv_current_longitude', currentLongitude); - add('r_inv_distance_difference', distanceDifferenceInKm); - add('r_inv_distance_difference_remarks', distanceDifferenceRemarks); + add('r_inves_current_latitude', currentLatitude); + add('r_inves_current_longitude', currentLongitude); + add('r_inves_distance_difference', distanceDifferenceInKm); + add('r_inves_distance_difference_remarks', distanceDifferenceRemarks); // Site Info (Assuming same keys) - add('r_inv_weather', weather); - add('r_inv_event_remark', eventRemarks); - add('r_inv_lab_remark', labRemarks); + add('r_inves_weather', weather); + add('r_inves_event_remark', eventRemarks); + add('r_inves_lab_remark', labRemarks); // Optional Remarks (Assuming same keys) - add('r_inv_optional_photo_01_remarks', optionalRemark1); - add('r_inv_optional_photo_02_remarks', optionalRemark2); - add('r_inv_optional_photo_03_remarks', optionalRemark3); - add('r_inv_optional_photo_04_remarks', optionalRemark4); + add('r_inves_optional_photo_01_remarks', optionalRemark1); + add('r_inves_optional_photo_02_remarks', optionalRemark2); + add('r_inves_optional_photo_03_remarks', optionalRemark3); + add('r_inves_optional_photo_04_remarks', optionalRemark4); // Parameters (Assuming same keys) - add('r_inv_sondeID', sondeId); - add('data_capture_date', dataCaptureDate); // Reuse generic keys? - add('data_capture_time', dataCaptureTime); // Reuse generic keys? - add('r_inv_oxygen_conc', oxygenConcentration); - add('r_inv_oxygen_sat', oxygenSaturation); - add('r_inv_ph', ph); - add('r_inv_salinity', salinity); - add('r_inv_conductivity', electricalConductivity); - add('r_inv_temperature', temperature); - add('r_inv_tds', tds); - add('r_inv_turbidity', turbidity); - add('r_inv_ammonia', ammonia); - add('r_inv_battery_volt', batteryVoltage); + add('r_inves_sondeID', sondeId); + // Note: data_capture_date/time might not be used by API if not in controller, but keeping generally safe + add('data_capture_date', dataCaptureDate); + add('data_capture_time', dataCaptureTime); + add('r_inves_oxygen_conc', oxygenConcentration); + add('r_inves_oxygen_sat', oxygenSaturation); + add('r_inves_ph', ph); + add('r_inves_salinity', salinity); + add('r_inves_conductivity', electricalConductivity); + add('r_inves_temperature', temperature); + add('r_inves_tds', tds); + add('r_inves_turbidity', turbidity); + add('r_inves_ammonia', ammonia); + add('r_inves_battery_volt', batteryVoltage); // Flowrate (Assuming same keys) - add('r_inv_flowrate_method', flowrateMethod); - add('r_inv_flowrate_sd_height', flowrateSurfaceDrifterHeight); - add('r_inv_flowrate_sd_distance', flowrateSurfaceDrifterDistance); - add('r_inv_flowrate_sd_time_first', flowrateSurfaceDrifterTimeFirst); - add('r_inv_flowrate_sd_time_last', flowrateSurfaceDrifterTimeLast); - add('r_inv_flowrate_value', flowrateValue); + add('r_inves_flowrate_method', flowrateMethod); + add('r_inves_flowrate_sd_height', flowrateSurfaceDrifterHeight); + add('r_inves_flowrate_sd_distance', flowrateSurfaceDrifterDistance); + add('r_inves_flowrate_sd_time_first', flowrateSurfaceDrifterTimeFirst); + add('r_inves_flowrate_sd_time_last', flowrateSurfaceDrifterTimeLast); + add('r_inves_flowrate_value', flowrateValue); // Additional data that might be useful for display or if API needs it redundantly add('first_sampler_name', firstSamplerName); @@ -407,16 +409,16 @@ class RiverInvesManualSamplingData { /// Converts the image properties into a Map for the multipart API request. /// Keys should match the expected API endpoint fields for Investigative images. Map toApiImageFiles() { - // Assuming same keys as manual, but prefixed with r_inv_? + // *** FIX: Updated keys to 'r_inves_' to match DB/Controller *** return { - 'r_inv_background_station': backgroundStationImage, - 'r_inv_upstream_river': upstreamRiverImage, - 'r_inv_downstream_river': downstreamRiverImage, - 'r_inv_sample_turbidity': sampleTurbidityImage, - 'r_inv_optional_photo_01': optionalImage1, - 'r_inv_optional_photo_02': optionalImage2, - 'r_inv_optional_photo_03': optionalImage3, - 'r_inv_optional_photo_04': optionalImage4, + 'r_inves_background_station': backgroundStationImage, + 'r_inves_upstream_river': upstreamRiverImage, + 'r_inves_downstream_river': downstreamRiverImage, + 'r_inves_sample_turbidity': sampleTurbidityImage, + 'r_inves_optional_photo_01': optionalImage1, + 'r_inves_optional_photo_02': optionalImage2, + 'r_inves_optional_photo_03': optionalImage3, + 'r_inves_optional_photo_04': optionalImage4, }; } diff --git a/lib/screens/river/investigative/manual_sampling/river_inves_in_situ_step_3_data_capture.dart b/lib/screens/river/investigative/manual_sampling/river_inves_in_situ_step_3_data_capture.dart index b48a1ad..a30b8c4 100644 --- a/lib/screens/river/investigative/manual_sampling/river_inves_in_situ_step_3_data_capture.dart +++ b/lib/screens/river/investigative/manual_sampling/river_inves_in_situ_step_3_data_capture.dart @@ -224,8 +224,9 @@ class _RiverInvesStep3DataCaptureState extends State if (value == 'NA') { _flowrateValueController.text = 'NA'; } else if (value == 'Flowmeter') { - // Keep existing value if user switches back, or clear if desired - // _flowrateValueController.clear(); + // --- MODIFICATION: Clear flowrate value for Flowmeter --- + _flowrateValueController.clear(); + // --- END MODIFICATION --- _sdHeightController.clear(); _sdDistanceController.clear(); _sdTimeFirstController.clear(); @@ -466,10 +467,13 @@ class _RiverInvesStep3DataCaptureState extends State return; } + // --- START MODIFICATION: Disable Next if Connected --- + // --- MODIFICATION: Changed to allow proceeding if reading is STOPPED, even if connected --- if (_isAutoReading) { _showStopReadingDialog(); return; } + // --- END MODIFICATION --- if (!_formKey.currentState!.validate()) { return; @@ -649,9 +653,10 @@ class _RiverInvesStep3DataCaptureState extends State } Map? _getActiveConnectionDetails() { - // Logic copied from RiverInSituStep3DataCaptureState._getActiveConnectionDetails - // Uses the correct _samplingService instance via context.watch - final service = context.watch(); // Watch Investigative service + // --- START FIX: Use read() instead of watch() --- + final service = context.read(); + // --- END FIX --- + if (service.bluetoothConnectionState.value != BluetoothConnectionState.disconnected) { return {'type': 'bluetooth', 'state': service.bluetoothConnectionState.value, 'name': service.connectedBluetoothDeviceName}; } @@ -675,6 +680,15 @@ class _RiverInvesStep3DataCaptureState extends State final activeConnection = _getActiveConnectionDetails(); final String? activeType = activeConnection?['type'] as String?; + // Check if ANY device is currently connected + final bool isDeviceConnected = activeConnection != null; + + // --- START MODIFICATION: Logic for disabling inputs --- + // Disable interaction if auto-reading is active OR if locked out. + // If reading is stopped (even if connected), we allow interaction. + final bool shouldDisableInput = _isAutoReading || _isLockedOut; + // --- END MODIFICATION --- + return WillPopScope( onWillPop: () async { if (_isLockedOut) { @@ -719,14 +733,18 @@ class _RiverInvesStep3DataCaptureState extends State ValueListenableBuilder( valueListenable: service.sondeId, // Listen to the correct service instance builder: (context, sondeId, child) { - final newSondeId = sondeId ?? ''; - // Use addPostFrameCallback to avoid setting state during build - WidgetsBinding.instance.addPostFrameCallback((_) { - if (mounted && _sondeIdController.text != newSondeId) { - _sondeIdController.text = newSondeId; - widget.data.sondeId = newSondeId; // Update model - } - }); + // --- START FIX: Only update if non-null to prevent clearing on disconnect --- + if (sondeId != null && sondeId.isNotEmpty) { + final newSondeId = sondeId; + // Use addPostFrameCallback to avoid setting state during build + WidgetsBinding.instance.addPostFrameCallback((_) { + if (mounted && _sondeIdController.text != newSondeId) { + _sondeIdController.text = newSondeId; + widget.data.sondeId = newSondeId; // Update model + } + }); + } + // --- END FIX --- return TextFormField( controller: _sondeIdController, decoration: const InputDecoration(labelText: 'Sonde ID *', hintText: 'Connect device or enter manually'), @@ -768,14 +786,20 @@ class _RiverInvesStep3DataCaptureState extends State const Divider(height: 32), // Flowrate Section - _buildFlowrateSection(), + // --- MODIFIED: Pass connection state to Flowrate Section --- + _buildFlowrateSection(isInputDisabled: shouldDisableInput), const SizedBox(height: 32), // Next Button with Lockout Timer ElevatedButton( - onPressed: _isLockedOut ? null : _validateAndProceed, + // Disable if Locked Out OR Auto Reading is active + onPressed: (_isLockedOut || _isAutoReading) ? null : _validateAndProceed, style: ElevatedButton.styleFrom(padding: const EdgeInsets.symmetric(vertical: 16)), - child: Text(_isLockedOut ? 'Next ($_lockoutSecondsRemaining\s)' : 'Next'), + child: Text( + _isLockedOut + ? 'Next ($_lockoutSecondsRemaining\s)' + : (_isAutoReading ? 'Stop Reading to Proceed' : 'Next') // Helper text + ), ), ], ), @@ -1055,8 +1079,8 @@ class _RiverInvesStep3DataCaptureState extends State ); } - Widget _buildFlowrateSection() { - // Copied from RiverInSituStep3DataCaptureState._buildFlowrateSection, modified to use Wrap + // Updated to include disable logic + Widget _buildFlowrateSection({bool isInputDisabled = false}) { return Card( margin: const EdgeInsets.symmetric(vertical: 4.0), child: Padding( @@ -1065,26 +1089,52 @@ class _RiverInvesStep3DataCaptureState extends State crossAxisAlignment: CrossAxisAlignment.start, children: [ Text("Flowrate", style: Theme.of(context).textTheme.titleLarge), + if (isInputDisabled) + Padding( + padding: const EdgeInsets.only(top: 8.0, bottom: 8.0), + child: Row( + children: [ + Icon(Icons.info_outline, color: Colors.orange, size: 20), + const SizedBox(width: 8), + Expanded( + child: Text( + "Please stop reading to enter flowrate.", + style: TextStyle(color: Colors.orange[800], fontSize: 12), + ), + ), + ], + ), + ), const SizedBox(height: 8), - // --- START FIX: Replaced Row with Wrap to fix horizontal overflow for radio buttons --- - Wrap( - alignment: WrapAlignment.spaceAround, - spacing: 8.0, - runSpacing: 4.0, - children: [ - _buildFlowrateRadioButton("Surface Drifter"), - _buildFlowrateRadioButton("Flowmeter"), - _buildFlowrateRadioButton("NA"), // Not Applicable - ], + // Wrap content in AbsorbPointer and Opacity if connected + AbsorbPointer( + absorbing: isInputDisabled, + child: Opacity( + opacity: isInputDisabled ? 0.5 : 1.0, + child: Column( + children: [ + // Replaced Row with Wrap to fix horizontal overflow for radio buttons + Wrap( + alignment: WrapAlignment.spaceAround, + spacing: 8.0, + runSpacing: 4.0, + children: [ + _buildFlowrateRadioButton("Surface Drifter"), + _buildFlowrateRadioButton("Flowmeter"), + _buildFlowrateRadioButton("NA"), // Not Applicable + ], + ), + // Conditional fields based on selected method + if (_selectedFlowrateMethod == 'Surface Drifter') + _buildSurfaceDrifterFields(), + if (_selectedFlowrateMethod == 'Flowmeter') + _buildFlowmeterField(), + if (_selectedFlowrateMethod == 'NA') + _buildNAField(), + ], + ), + ), ), - // --- END FIX --- - // Conditional fields based on selected method - if (_selectedFlowrateMethod == 'Surface Drifter') - _buildSurfaceDrifterFields(), - if (_selectedFlowrateMethod == 'Flowmeter') - _buildFlowmeterField(), - if (_selectedFlowrateMethod == 'NA') - _buildNAField(), ], ), ), @@ -1092,7 +1142,6 @@ class _RiverInvesStep3DataCaptureState extends State } Widget _buildFlowrateRadioButton(String title) { - // Copied from RiverInSituStep3DataCaptureState._buildFlowrateRadioButton, added overflow handling return Column( children: [ Radio( @@ -1110,7 +1159,6 @@ class _RiverInvesStep3DataCaptureState extends State } Widget _buildSurfaceDrifterFields() { - // Copied from RiverInSituStep3DataCaptureState._buildSurfaceDrifterFields return Padding( padding: const EdgeInsets.only(top: 16.0), child: Column( @@ -1175,13 +1223,17 @@ class _RiverInvesStep3DataCaptureState extends State } Widget _buildNAField() { - // Copied from RiverInSituStep3DataCaptureState._buildNAField + // Fix: Use controller to set value instead of initialValue to avoid conflict crash + if (_flowrateValueController.text != 'NA') { + _flowrateValueController.text = 'NA'; + } + return Padding( padding: const EdgeInsets.only(top: 16.0), child: TextFormField( controller: _flowrateValueController, decoration: const InputDecoration(labelText: 'Flowrate (m/s)'), - initialValue: 'NA', // Set initial value to NA + // initialValue: 'NA', // Removed to fix AssertionError: initialValue == null || controller == null readOnly: true, // Make it read-only ), ); diff --git a/lib/screens/river/manual/triennial/widgets/river_manual_triennial_step_3_data_capture.dart b/lib/screens/river/manual/triennial/widgets/river_manual_triennial_step_3_data_capture.dart index aeb1104..90d618c 100644 --- a/lib/screens/river/manual/triennial/widgets/river_manual_triennial_step_3_data_capture.dart +++ b/lib/screens/river/manual/triennial/widgets/river_manual_triennial_step_3_data_capture.dart @@ -211,12 +211,20 @@ class _RiverManualTriennialStep3DataCaptureState extends State? _getActiveConnectionDetails() { - final service = context.watch(); + // --- START FIX: Use read() instead of watch() --- + final service = context.read(); + // --- END FIX --- + if (service.bluetoothConnectionState.value != BluetoothConnectionState.disconnected) { return {'type': 'bluetooth', 'state': service.bluetoothConnectionState.value, 'name': service.connectedBluetoothDeviceName}; } @@ -582,6 +623,15 @@ class _RiverManualTriennialStep3DataCaptureState extends State( valueListenable: service.sondeId, builder: (context, sondeId, child) { - final newSondeId = sondeId ?? ''; - WidgetsBinding.instance.addPostFrameCallback((_) { - if (mounted && _sondeIdController.text != newSondeId) { - _sondeIdController.text = newSondeId; - widget.data.sondeId = newSondeId; - } - }); + // --- START FIX: Only update if non-null to prevent clearing on disconnect --- + if (sondeId != null && sondeId.isNotEmpty) { + final newSondeId = sondeId; + WidgetsBinding.instance.addPostFrameCallback((_) { + if (mounted && _sondeIdController.text != newSondeId) { + _sondeIdController.text = newSondeId; + widget.data.sondeId = newSondeId; + } + }); + } + // --- END FIX --- return TextFormField( controller: _sondeIdController, decoration: const InputDecoration(labelText: 'Sonde ID *', hintText: 'Connect device or enter manually'), @@ -661,13 +716,21 @@ class _RiverManualTriennialStep3DataCaptureState extends State v == null || v.isEmpty ? 'Distance is required' : null, ), const SizedBox(height: 16), TextFormField( controller: _sdTimeFirstController, - decoration: const InputDecoration(labelText: 'Time First Deploy (HH:mm:ss)', suffixIcon: Icon(Icons.timer)), + decoration: const InputDecoration(labelText: 'Time First Deploy (HH:mm:ss) *', suffixIcon: Icon(Icons.timer)), readOnly: true, onTap: () => _selectTime(context, _sdTimeFirstController), + validator: (v) => v == null || v.isEmpty ? 'Start time is required' : null, ), const SizedBox(height: 16), TextFormField( controller: _sdTimeLastController, - decoration: const InputDecoration(labelText: 'Time Last Deploy (HH:mm:ss)', suffixIcon: Icon(Icons.timer)), + decoration: const InputDecoration(labelText: 'Time Last Deploy (HH:mm:ss) *', suffixIcon: Icon(Icons.timer)), readOnly: true, onTap: () => _selectTime(context, _sdTimeLastController), + validator: (v) => v == null || v.isEmpty ? 'End time is required' : null, ), const SizedBox(height: 16), ElevatedButton( onPressed: _calculateFlowrate, - child: const Text('Get Flowrate'), + child: const Text('Calculate Flowrate'), ), const SizedBox(height: 16), TextFormField( controller: _flowrateValueController, - decoration: const InputDecoration(labelText: 'Flowrate (m/s)'), + decoration: const InputDecoration(labelText: 'Calculated Flowrate (m/s)'), readOnly: true, + // Add validator if calculation must be done? ), ], ), @@ -1023,20 +1120,28 @@ class _RiverManualTriennialStep3DataCaptureState extends State v == null || v.isEmpty ? 'Flowrate value is required' : null, ), ); } Widget _buildNAField() { + // Fix: Use controller to set value instead of initialValue to avoid conflict crash + if (_flowrateValueController.text != 'NA') { + _flowrateValueController.text = 'NA'; + } + return Padding( padding: const EdgeInsets.only(top: 16.0), child: TextFormField( controller: _flowrateValueController, decoration: const InputDecoration(labelText: 'Flowrate (m/s)'), - readOnly: true, + // initialValue: 'NA', // Removed to fix AssertionError: initialValue == null || controller == null + readOnly: true, // Make it read-only ), ); } -} \ No newline at end of file + +} // End of State class \ No newline at end of file diff --git a/lib/screens/river/manual/widgets/river_in_situ_step_3_data_capture.dart b/lib/screens/river/manual/widgets/river_in_situ_step_3_data_capture.dart index 045ffb5..faead53 100644 --- a/lib/screens/river/manual/widgets/river_in_situ_step_3_data_capture.dart +++ b/lib/screens/river/manual/widgets/river_in_situ_step_3_data_capture.dart @@ -7,15 +7,15 @@ import 'package:flutter_bluetooth_serial/flutter_bluetooth_serial.dart'; import 'package:usb_serial/usb_serial.dart'; import 'package:intl/intl.dart'; -import '../../../../auth_provider.dart'; -import '../../../../models/river_in_situ_sampling_data.dart'; -//import '../../../../services/api_service.dart'; // Import to access DatabaseHelper +import '../../../../../auth_provider.dart'; +import '../../../../../models/river_in_situ_sampling_data.dart'; +//import '../../../../../services/api_service.dart'; // Import to access DatabaseHelper import 'package:environment_monitoring_app/services/database_helper.dart'; -import '../../../../services/river_in_situ_sampling_service.dart'; -import '../../../../bluetooth/bluetooth_manager.dart'; -import '../../../../serial/serial_manager.dart'; -import '../../../../bluetooth/widgets/bluetooth_device_list_dialog.dart'; -import '../../../../serial/widget/serial_port_list_dialog.dart'; +import '../../../../../services/river_in_situ_sampling_service.dart'; +import '../../../../../bluetooth/bluetooth_manager.dart'; +import '../../../../../serial/serial_manager.dart'; +import '../../../../../bluetooth/widgets/bluetooth_device_list_dialog.dart'; +import '../../../../../serial/widget/serial_port_list_dialog.dart'; class RiverInSituStep3DataCapture extends StatefulWidget { final RiverInSituSamplingData data; @@ -211,12 +211,20 @@ class _RiverInSituStep3DataCaptureState extends State _handleConnectionAttempt(String type) async { - final service = context.read(); - final bool hasPermissions = await service.requestDevicePermissions(); + // Uses the correct _samplingService instance + final bool hasPermissions = await _samplingService.requestDevicePermissions(); if (!hasPermissions && mounted) { _showSnackBar("Bluetooth & Location permissions are required to connect.", isError: true); return; } _disconnectFromAll(); - await Future.delayed(const Duration(milliseconds: 250)); + await Future.delayed(const Duration(milliseconds: 250)); // Short delay after disconnect final bool connectionSuccess = await _connectToDevice(type); + if (connectionSuccess && mounted) { - _dataSubscription?.cancel(); - final stream = type == 'bluetooth' ? service.bluetoothDataStream : service.serialDataStream; + _dataSubscription?.cancel(); // Cancel previous subscription if any + final stream = type == 'bluetooth' ? _samplingService.bluetoothDataStream : _samplingService.serialDataStream; _dataSubscription = stream.listen((readings) { if (mounted) { _updateTextFields(readings); } + }, onError: (error) { + debugPrint("Error on data stream: $error"); + if (mounted) _showSnackBar("Data stream error: $error", isError: true); + _disconnect(type); // Disconnect on stream error + }, onDone: () { + debugPrint("Data stream done."); + if (mounted) _disconnect(type); // Disconnect when stream closes }); } } Future _connectToDevice(String type) async { + // Uses the correct _samplingService instance setState(() => _isLoading = true); - final service = context.read(); bool success = false; try { if (type == 'bluetooth') { - final devices = await service.getPairedBluetoothDevices(); - if (devices.isEmpty && mounted) { + final devices = await _samplingService.getPairedBluetoothDevices(); + if (!mounted) return false; // Check mounted after async gap + if (devices.isEmpty) { _showSnackBar('No paired Bluetooth devices found.', isError: true); return false; } final selectedDevice = await showBluetoothDeviceListDialog(context: context, devices: devices); if (selectedDevice != null) { - await service.connectToBluetoothDevice(selectedDevice); + await _samplingService.connectToBluetoothDevice(selectedDevice); success = true; } } else if (type == 'serial') { - final devices = await service.getAvailableSerialDevices(); - if (devices.isEmpty && mounted) { - _showSnackBar('No USB Serial devices found.', isError: true); + final devices = await _samplingService.getAvailableSerialDevices(); + if (!mounted) return false; + if (devices.isEmpty) { + _showSnackBar('No USB Serial devices found. Ensure device is plugged in.', isError: true); return false; } final selectedDevice = await showSerialPortListDialog(context: context, devices: devices); if (selectedDevice != null) { - await service.connectToSerialDevice(selectedDevice); + await _samplingService.connectToSerialDevice(selectedDevice); success = true; } } @@ -357,6 +386,7 @@ class _RiverInSituStep3DataCaptureState extends State? _getActiveConnectionDetails() { - final service = context.watch(); + // --- START FIX: Use read() instead of watch() --- + final service = context.read(); + // --- END FIX --- + if (service.bluetoothConnectionState.value != BluetoothConnectionState.disconnected) { return {'type': 'bluetooth', 'state': service.bluetoothConnectionState.value, 'name': service.connectedBluetoothDeviceName}; } @@ -580,13 +627,22 @@ class _RiverInSituStep3DataCaptureState extends State( valueListenable: service.sondeId, builder: (context, sondeId, child) { - final newSondeId = sondeId ?? ''; - WidgetsBinding.instance.addPostFrameCallback((_) { - if (mounted && _sondeIdController.text != newSondeId) { - _sondeIdController.text = newSondeId; - widget.data.sondeId = newSondeId; - } - }); + // --- START FIX: Only update if non-null to prevent clearing on disconnect --- + if (sondeId != null && sondeId.isNotEmpty) { + final newSondeId = sondeId; + WidgetsBinding.instance.addPostFrameCallback((_) { + if (mounted && _sondeIdController.text != newSondeId) { + _sondeIdController.text = newSondeId; + widget.data.sondeId = newSondeId; + } + }); + } + // --- END FIX --- return TextFormField( controller: _sondeIdController, decoration: const InputDecoration(labelText: 'Sonde ID *', hintText: 'Connect device or enter manually'), @@ -659,25 +719,32 @@ class _RiverInSituStep3DataCaptureState extends State v == null || v.isEmpty ? 'Distance is required' : null, ), const SizedBox(height: 16), TextFormField( controller: _sdTimeFirstController, - decoration: const InputDecoration(labelText: 'Time First Deploy (HH:mm:ss)', suffixIcon: Icon(Icons.timer)), + decoration: const InputDecoration(labelText: 'Time First Deploy (HH:mm:ss) *', suffixIcon: Icon(Icons.timer)), readOnly: true, onTap: () => _selectTime(context, _sdTimeFirstController), + validator: (v) => v == null || v.isEmpty ? 'Start time is required' : null, ), const SizedBox(height: 16), TextFormField( controller: _sdTimeLastController, - decoration: const InputDecoration(labelText: 'Time Last Deploy (HH:mm:ss)', suffixIcon: Icon(Icons.timer)), + decoration: const InputDecoration(labelText: 'Time Last Deploy (HH:mm:ss) *', suffixIcon: Icon(Icons.timer)), readOnly: true, onTap: () => _selectTime(context, _sdTimeLastController), + validator: (v) => v == null || v.isEmpty ? 'End time is required' : null, ), const SizedBox(height: 16), ElevatedButton( onPressed: _calculateFlowrate, - child: const Text('Get Flowrate'), + child: const Text('Calculate Flowrate'), ), const SizedBox(height: 16), TextFormField( controller: _flowrateValueController, - decoration: const InputDecoration(labelText: 'Flowrate (m/s)'), + decoration: const InputDecoration(labelText: 'Calculated Flowrate (m/s)'), readOnly: true, + // Add validator if calculation must be done? ), ], ), @@ -1019,19 +1120,26 @@ class _RiverInSituStep3DataCaptureState extends State v == null || v.isEmpty ? 'Flowrate value is required' : null, ), ); } Widget _buildNAField() { + // Fix: Use controller to set value instead of initialValue to avoid conflict crash + if (_flowrateValueController.text != 'NA') { + _flowrateValueController.text = 'NA'; + } + return Padding( padding: const EdgeInsets.only(top: 16.0), child: TextFormField( controller: _flowrateValueController, decoration: const InputDecoration(labelText: 'Flowrate (m/s)'), - readOnly: true, + // initialValue: 'NA', // Removed to fix AssertionError: initialValue == null || controller == null + readOnly: true, // Make it read-only ), ); } diff --git a/lib/services/marine_in_situ_sampling_service.dart b/lib/services/marine_in_situ_sampling_service.dart index 14b0635..e057650 100644 --- a/lib/services/marine_in_situ_sampling_service.dart +++ b/lib/services/marine_in_situ_sampling_service.dart @@ -32,7 +32,7 @@ import 'submission_ftp_service.dart'; import 'telegram_service.dart'; import 'retry_service.dart'; import 'base_api_service.dart'; // Import for SessionExpiredException - +import 'user_preferences_service.dart'; // ADDED /// A dedicated service to handle all business logic for the Marine In-Situ Sampling feature. /// This includes location, image processing, device communication, and data submission. @@ -51,6 +51,7 @@ class MarineInSituSamplingService { final DatabaseHelper _dbHelper = DatabaseHelper(); final RetryService _retryService = RetryService(); final TelegramService _telegramService; + final UserPreferencesService _userPreferencesService = UserPreferencesService(); // ADDED MarineInSituSamplingService(this._telegramService); @@ -262,132 +263,159 @@ class MarineInSituSamplingService { // data.reportId already contains the timestamp ID // --- END: MODIFIED TO USE TIMESTAMP ID --- - try { - // 1. Submit Form Data - apiDataResult = await _submissionApiService.submitPost( - moduleName: moduleName, - endpoint: 'marine/manual/sample', // Correct endpoint for In-Situ data - body: data.toApiFormData(), - ); + // 1. Check module preferences for API + final pref = await _userPreferencesService.getModulePreference(moduleName); + bool isApiEnabled = pref?['is_api_enabled'] ?? true; + bool isFtpEnabled = pref?['is_ftp_enabled'] ?? true; - if (apiDataResult['success'] == true) { - anyApiSuccess = true; - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // Store the server's database ID in a separate variable. - apiRecordId = apiDataResult['data']?['man_id']?.toString(); // Correct ID key for In-Situ - // --- END: MODIFIED TO USE TIMESTAMP ID --- + if (isApiEnabled) { + try { + // 1. Submit Form Data + apiDataResult = await _submissionApiService.submitPost( + moduleName: moduleName, + endpoint: 'marine/manual/sample', // Correct endpoint for In-Situ data + body: data.toApiFormData(), + ); - if (apiRecordId != null) { - if (finalImageFiles.isNotEmpty) { - // 2. Submit Images - apiImageResult = await _submissionApiService.submitMultipart( - moduleName: moduleName, - endpoint: 'marine/manual/images', // Correct endpoint for In-Situ images - // --- START: MODIFIED TO USE TIMESTAMP ID --- - fields: {'man_id': apiRecordId}, // Correct field key for In-Situ - // --- END: MODIFIED TO USE TIMESTAMP ID --- - files: finalImageFiles, - ); - if (apiImageResult['success'] != true) { - anyApiSuccess = false; - } - } - } else { - anyApiSuccess = false; + if (apiDataResult['success'] == true) { + anyApiSuccess = true; // --- START: MODIFIED TO USE TIMESTAMP ID --- - apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // Store the server's database ID in a separate variable. + apiRecordId = apiDataResult['data']?['man_id']?.toString(); // Correct ID key for In-Situ // --- END: MODIFIED TO USE TIMESTAMP ID --- - } - } - // If apiDataResult['success'] is false, SubmissionApiService queued it. - } on SessionExpiredException catch (_) { - debugPrint("Online submission failed due to session expiry that could not be refreshed."); - isSessionKnownToBeExpired = true; - anyApiSuccess = false; - apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; - // Manually queue API calls - await _retryService.addApiToQueue(endpoint: 'marine/manual/sample', method: 'POST', body: data.toApiFormData()); - // --- START: MODIFIED TO USE TIMESTAMP ID --- - if (finalImageFiles.isNotEmpty && apiRecordId != null) { - // Also queue images if data call might have partially succeeded before expiry - await _retryService.addApiToQueue(endpoint: 'marine/manual/images', method: 'POST_MULTIPART', fields: {'man_id': apiRecordId}, files: finalImageFiles); + if (apiRecordId != null) { + if (finalImageFiles.isNotEmpty) { + // 2. Submit Images + apiImageResult = await _submissionApiService.submitMultipart( + moduleName: moduleName, + endpoint: 'marine/manual/images', // Correct endpoint for In-Situ images + // --- START: MODIFIED TO USE TIMESTAMP ID --- + fields: {'man_id': apiRecordId}, // Correct field key for In-Situ + // --- END: MODIFIED TO USE TIMESTAMP ID --- + files: finalImageFiles, + ); + if (apiImageResult['success'] != true) { + anyApiSuccess = false; + } + } + } else { + anyApiSuccess = false; + // --- START: MODIFIED TO USE TIMESTAMP ID --- + apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // --- END: MODIFIED TO USE TIMESTAMP ID --- + } + } + // If apiDataResult['success'] is false, SubmissionApiService queued it. + + } on SessionExpiredException catch (_) { + debugPrint("Online submission failed due to session expiry that could not be refreshed."); + isSessionKnownToBeExpired = true; + anyApiSuccess = false; + apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; + // Manually queue API calls + await _retryService.addApiToQueue(endpoint: 'marine/manual/sample', method: 'POST', body: data.toApiFormData()); + // --- START: MODIFIED TO USE TIMESTAMP ID --- + if (finalImageFiles.isNotEmpty && apiRecordId != null) { + // Also queue images if data call might have partially succeeded before expiry + await _retryService.addApiToQueue(endpoint: 'marine/manual/images', method: 'POST_MULTIPART', fields: {'man_id': apiRecordId}, files: finalImageFiles); + } + // --- END: MODIFIED TO USE TIMESTAMP ID --- } - // --- END: MODIFIED TO USE TIMESTAMP ID --- + } else { + debugPrint("API submission disabled for $moduleName by user preference."); + apiDataResult = {'success': true, 'message': 'API submission disabled by user.'}; + anyApiSuccess = true; // Treated as success since it was intentional } // 3. Submit FTP Files Map ftpResults = {'statuses': []}; bool anyFtpSuccess = false; - if (isSessionKnownToBeExpired) { - debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); - // --- START: MODIFIED TO USE TIMESTAMP ID --- - final baseFileNameForQueue = _generateBaseFileName(data); // Use helper - // --- END: MODIFIED TO USE TIMESTAMP ID --- + // --- START FIX: Check if FTP is enabled AND if it was already successful --- + bool previousFtpSuccess = data.submissionStatus == 'L4' || data.submissionStatus == 'S4'; - // --- START FIX: Add ftpConfigId when queuing --- - final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; + if (!isFtpEnabled) { + debugPrint("FTP submission disabled for $moduleName by user preference. Skipping FTP."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'FTP disabled by user preference.', 'success': true}]}; + anyFtpSuccess = true; + } else if (previousFtpSuccess) { + debugPrint("FTP submission skipped because it was already successful (Status: ${data.submissionStatus})."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'Already successful in previous attempt.', 'success': true}]}; + anyFtpSuccess = true; + } else { + // Proceed with FTP logic only if enabled AND not previously successful + if (isSessionKnownToBeExpired) { + debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); + // --- START: MODIFIED TO USE TIMESTAMP ID --- + final baseFileNameForQueue = _generateBaseFileName(data); // Use helper + // --- END: MODIFIED TO USE TIMESTAMP ID --- - // --- MODIFIED: Use new data model methods for multi-json zip --- - final dataZip = await _zippingService.createDataZip( - jsonDataMap: { - 'db.json': data.toDbJson(), - 'marine_insitu_basic_form.json': data.toBasicFormJson(), - 'marine_sampling_reading.json': data.toReadingJson(), - 'marine_manual_info.json': data.toManualInfoJson(), - }, - baseFileName: baseFileNameForQueue, - destinationDir: null, // Use temp dir - ); - // --- END MODIFIED --- + // --- START FIX: Add ftpConfigId when queuing --- + final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; - if (dataZip != null) { - // Queue for each config separately - for (final config in ftpConfigs) { - final configId = config['ftp_config_id']; - if (configId != null) { - await _retryService.addFtpToQueue( - localFilePath: dataZip.path, - remotePath: '/${p.basename(dataZip.path)}', - ftpConfigId: configId // Provide the specific config ID - ); - } - } - } - - if (finalImageFiles.isNotEmpty) { - final imageZip = await _zippingService.createImageZip( - imageFiles: finalImageFiles.values.toList(), + // --- MODIFIED: Use new data model methods for multi-json zip --- + final dataZip = await _zippingService.createDataZip( + jsonDataMap: { + 'db.json': data.toDbJson(), + 'marine_insitu_basic_form.json': data.toBasicFormJson(), + 'marine_sampling_reading.json': data.toReadingJson(), + 'marine_manual_info.json': data.toManualInfoJson(), + }, baseFileName: baseFileNameForQueue, destinationDir: null, // Use temp dir ); - if (imageZip != null) { + // --- END MODIFIED --- + + if (dataZip != null) { // Queue for each config separately for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( - localFilePath: imageZip.path, - remotePath: '/${p.basename(imageZip.path)}', + localFilePath: dataZip.path, + remotePath: '/${p.basename(dataZip.path)}', ftpConfigId: configId // Provide the specific config ID ); } } } - } - // --- END FIX --- - ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; - anyFtpSuccess = false; - } else { - try { - ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); - anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); - } catch (e) { - debugPrint("Unexpected FTP submission error: $e"); + + if (finalImageFiles.isNotEmpty) { + final imageZip = await _zippingService.createImageZip( + imageFiles: finalImageFiles.values.toList(), + baseFileName: baseFileNameForQueue, + destinationDir: null, // Use temp dir + ); + if (imageZip != null) { + // Queue for each config separately + for (final config in ftpConfigs) { + final configId = config['ftp_config_id']; + if (configId != null) { + await _retryService.addFtpToQueue( + localFilePath: imageZip.path, + remotePath: '/${p.basename(imageZip.path)}', + ftpConfigId: configId // Provide the specific config ID + ); + } + } + } + } + // --- END FIX --- + ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; anyFtpSuccess = false; + } else { + try { + ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); + anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); + } catch (e) { + debugPrint("Unexpected FTP submission error: $e"); + anyFtpSuccess = false; + ftpResults = {'statuses': [{'status': 'Error', 'message': 'FTP process failed: $e.', 'success': false}]}; + } } } + // --- END FIX --- // 4. Determine Final Status final bool overallSuccess = anyApiSuccess || anyFtpSuccess; diff --git a/lib/services/marine_investigative_sampling_service.dart b/lib/services/marine_investigative_sampling_service.dart index 7f06b29..3803bb8 100644 --- a/lib/services/marine_investigative_sampling_service.dart +++ b/lib/services/marine_investigative_sampling_service.dart @@ -31,6 +31,7 @@ import 'retry_service.dart'; import 'base_api_service.dart'; // Import for SessionExpiredException import 'api_service.dart'; // Import for DatabaseHelper import 'package:environment_monitoring_app/services/database_helper.dart'; +import 'user_preferences_service.dart'; // ADDED /// A dedicated service for the Marine Investigative Sampling feature. @@ -49,6 +50,7 @@ class MarineInvestigativeSamplingService { final DatabaseHelper _dbHelper = DatabaseHelper(); final RetryService _retryService = RetryService(); final TelegramService _telegramService; + final UserPreferencesService _userPreferencesService = UserPreferencesService(); // ADDED MarineInvestigativeSamplingService(this._telegramService); @@ -268,121 +270,147 @@ class MarineInvestigativeSamplingService { // data.reportId already contains the timestamp ID // --- END: MODIFIED TO USE TIMESTAMP ID --- - try { - // 1. Submit Form Data - apiDataResult = await _submissionApiService.submitPost( - moduleName: moduleName, - endpoint: 'marine-investigative/sample', - body: data.toApiFormData(), - ); + // 1. Check module preferences for API + final pref = await _userPreferencesService.getModulePreference(moduleName); + bool isApiEnabled = pref?['is_api_enabled'] ?? true; + bool isFtpEnabled = pref?['is_ftp_enabled'] ?? true; - if (apiDataResult['success'] == true) { - anyApiSuccess = true; - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // Store the server's database ID in a separate variable. - apiRecordId = apiDataResult['data']?['man_inves_id']?.toString(); - // --- END: MODIFIED TO USE TIMESTAMP ID --- + if (isApiEnabled) { + try { + // 1. Submit Form Data + apiDataResult = await _submissionApiService.submitPost( + moduleName: moduleName, + endpoint: 'marine-investigative/sample', + body: data.toApiFormData(), + ); - if (apiRecordId != null) { - if (finalImageFiles.isNotEmpty) { - // 2. Submit Images - apiImageResult = await _submissionApiService.submitMultipart( - moduleName: moduleName, - endpoint: 'marine-investigative/images', - // --- START: MODIFIED TO USE TIMESTAMP ID --- - fields: {'man_inves_id': apiRecordId}, // Use server's ID - // --- END: MODIFIED TO USE TIMESTAMP ID --- - files: finalImageFiles, - ); - if (apiImageResult['success'] != true) { - anyApiSuccess = false; // Mark as failed if images fail - } - } - } else { - anyApiSuccess = false; + if (apiDataResult['success'] == true) { + anyApiSuccess = true; // --- START: MODIFIED TO USE TIMESTAMP ID --- - apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // Store the server's database ID in a separate variable. + apiRecordId = apiDataResult['data']?['man_inves_id']?.toString(); // --- END: MODIFIED TO USE TIMESTAMP ID --- + + if (apiRecordId != null) { + if (finalImageFiles.isNotEmpty) { + // 2. Submit Images + apiImageResult = await _submissionApiService.submitMultipart( + moduleName: moduleName, + endpoint: 'marine-investigative/images', + // --- START: MODIFIED TO USE TIMESTAMP ID --- + fields: {'man_inves_id': apiRecordId}, // Use server's ID + // --- END: MODIFIED TO USE TIMESTAMP ID --- + files: finalImageFiles, + ); + if (apiImageResult['success'] != true) { + anyApiSuccess = false; // Mark as failed if images fail + } + } + } else { + anyApiSuccess = false; + // --- START: MODIFIED TO USE TIMESTAMP ID --- + apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // --- END: MODIFIED TO USE TIMESTAMP ID --- + } } + } on SessionExpiredException catch (_) { + debugPrint("Online submission failed due to session expiry that could not be refreshed."); + isSessionKnownToBeExpired = true; // Mark session as expired + anyApiSuccess = false; + apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; + // Manually queue the API call since SubmissionApiService was never called or failed internally due to session + await _retryService.addApiToQueue(endpoint: 'marine-investigative/sample', method: 'POST', body: data.toApiFormData()); + // --- START: MODIFIED TO USE TIMESTAMP ID --- + if (finalImageFiles.isNotEmpty && apiRecordId != null) { + // Also queue images if data call might have partially succeeded before expiry + await _retryService.addApiToQueue(endpoint: 'marine-investigative/images', method: 'POST_MULTIPART', fields: {'man_inves_id': apiRecordId}, files: finalImageFiles); + } + // --- END: MODIFIED TO USE TIMESTAMP ID --- } - } on SessionExpiredException catch (_) { - debugPrint("Online submission failed due to session expiry that could not be refreshed."); - isSessionKnownToBeExpired = true; // Mark session as expired - anyApiSuccess = false; - apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; - // Manually queue the API call since SubmissionApiService was never called or failed internally due to session - await _retryService.addApiToQueue(endpoint: 'marine-investigative/sample', method: 'POST', body: data.toApiFormData()); - // --- START: MODIFIED TO USE TIMESTAMP ID --- - if (finalImageFiles.isNotEmpty && apiRecordId != null) { - // Also queue images if data call might have partially succeeded before expiry - await _retryService.addApiToQueue(endpoint: 'marine-investigative/images', method: 'POST_MULTIPART', fields: {'man_inves_id': apiRecordId}, files: finalImageFiles); - } - // --- END: MODIFIED TO USE TIMESTAMP ID --- + } else { + debugPrint("API submission disabled for $moduleName by user preference."); + apiDataResult = {'success': true, 'message': 'API submission disabled by user.'}; + anyApiSuccess = true; // Treated as success since it was intentional } - // We no longer catch SocketException or TimeoutException here. // 3. Submit FTP Files Map ftpResults = {'statuses': []}; bool anyFtpSuccess = false; - if (isSessionKnownToBeExpired) { - debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); - // --- START: MODIFIED TO USE TIMESTAMP ID --- - final baseFileNameForQueue = _generateBaseFileName(data); - // --- END: MODIFIED TO USE TIMESTAMP ID --- + // --- START FIX: Check if FTP is enabled AND if it was already successful --- + bool previousFtpSuccess = data.submissionStatus == 'L4' || data.submissionStatus == 'S4'; - final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; + if (!isFtpEnabled) { + debugPrint("FTP submission disabled for $moduleName by user preference. Skipping FTP."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'FTP disabled by user preference.', 'success': true}]}; + anyFtpSuccess = true; + } else if (previousFtpSuccess) { + debugPrint("FTP submission skipped because it was already successful (Status: ${data.submissionStatus})."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'Already successful in previous attempt.', 'success': true}]}; + anyFtpSuccess = true; + } else { + // Proceed with FTP logic only if enabled AND not previously successful + if (isSessionKnownToBeExpired) { + debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); + // --- START: MODIFIED TO USE TIMESTAMP ID --- + final baseFileNameForQueue = _generateBaseFileName(data); + // --- END: MODIFIED TO USE TIMESTAMP ID --- - final dataZip = await _zippingService.createDataZip( - jsonDataMap: {'db.json': jsonEncode(data.toDbJson())}, - baseFileName: baseFileNameForQueue, - destinationDir: null, // Use temp dir - ); - if (dataZip != null) { - for (final config in ftpConfigs) { - final configId = config['ftp_config_id']; - if (configId != null) { - await _retryService.addFtpToQueue( - localFilePath: dataZip.path, - remotePath: '/${p.basename(dataZip.path)}', - ftpConfigId: configId - ); - } - } - } + final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; - if (finalImageFiles.isNotEmpty) { - final imageZip = await _zippingService.createImageZip( - imageFiles: finalImageFiles.values.toList(), + final dataZip = await _zippingService.createDataZip( + jsonDataMap: {'db.json': jsonEncode(data.toDbJson())}, baseFileName: baseFileNameForQueue, destinationDir: null, // Use temp dir ); - if (imageZip != null) { + if (dataZip != null) { for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( - localFilePath: imageZip.path, - remotePath: '/${p.basename(imageZip.path)}', + localFilePath: dataZip.path, + remotePath: '/${p.basename(dataZip.path)}', ftpConfigId: configId ); } } } - } - ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; - anyFtpSuccess = false; - } else { - // Session is OK, proceed with normal FTP attempt - try { - ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); - anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); - } catch (e) { - debugPrint("Unexpected FTP submission error: $e"); + if (finalImageFiles.isNotEmpty) { + final imageZip = await _zippingService.createImageZip( + imageFiles: finalImageFiles.values.toList(), + baseFileName: baseFileNameForQueue, + destinationDir: null, // Use temp dir + ); + if (imageZip != null) { + for (final config in ftpConfigs) { + final configId = config['ftp_config_id']; + if (configId != null) { + await _retryService.addFtpToQueue( + localFilePath: imageZip.path, + remotePath: '/${p.basename(imageZip.path)}', + ftpConfigId: configId + ); + } + } + } + } + ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; anyFtpSuccess = false; + + } else { + // Session is OK, proceed with normal FTP attempt + try { + ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); + anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); + } catch (e) { + debugPrint("Unexpected FTP submission error: $e"); + anyFtpSuccess = false; + ftpResults = {'statuses': [{'status': 'Error', 'message': 'FTP process failed: $e.', 'success': false}]}; // Provide error status + } } } + // --- END FIX --- // 4. Determine Final Status final bool overallSuccess = anyApiSuccess || anyFtpSuccess; @@ -609,12 +637,13 @@ class MarineInvestigativeSamplingService { final logData = { // --- START: MODIFIED TO USE TIMESTAMP ID --- - 'submission_id': data.reportId ?? baseFileName, // This is the timestamp ID - 'module': 'marine', - 'type': 'Investigative', + 'submission_id': data.reportId ?? baseFileName, // Use timestamp ID + // *** MODIFIED: Module and Type *** + 'module': 'marine', // Keep main module as 'river' + 'type': 'Investigative', // Specific type 'status': status, 'message': message, - 'report_id': apiRecordId, // This is the server DB ID + 'report_id': apiRecordId, // Use server DB ID // --- END: MODIFIED TO USE TIMESTAMP ID --- 'created_at': DateTime.now().toIso8601String(), 'form_data': jsonEncode(logMapData), // Log comprehensive map @@ -873,5 +902,5 @@ class MarineInvestigativeSamplingService { return buffer.toString(); } -// --- END: MODIFIED ALERT HANDLER & HELPERS --- +// --- END: NEW METHOD --- } \ No newline at end of file diff --git a/lib/services/marine_tarball_sampling_service.dart b/lib/services/marine_tarball_sampling_service.dart index 4102aba..546e14a 100644 --- a/lib/services/marine_tarball_sampling_service.dart +++ b/lib/services/marine_tarball_sampling_service.dart @@ -23,6 +23,7 @@ import 'package:environment_monitoring_app/services/telegram_service.dart'; import 'package:environment_monitoring_app/services/retry_service.dart'; import 'package:environment_monitoring_app/auth_provider.dart'; import 'package:environment_monitoring_app/services/base_api_service.dart'; // Import for SessionExpiredException +import 'user_preferences_service.dart'; // ADDED /// A dedicated service to handle all business logic for the Marine Tarball Sampling feature. class MarineTarballSamplingService { @@ -34,6 +35,7 @@ class MarineTarballSamplingService { final DatabaseHelper _dbHelper = DatabaseHelper(); final RetryService _retryService = RetryService(); final TelegramService _telegramService; + final UserPreferencesService _userPreferencesService = UserPreferencesService(); // ADDED MarineTarballSamplingService(this._telegramService); @@ -130,6 +132,10 @@ class MarineTarballSamplingService { required AuthProvider? authProvider, // Accept potentially null provider String? logDirectory, // Added for retry consistency }) async { + // --- START FIX: Capture the status before attempting submission --- + final String? previousStatus = data.submissionStatus; + // --- END FIX --- + final serverName = (await _serverConfigService.getActiveApiConfig())?['config_name'] as String? ?? 'Default'; final imageFiles = data.toImageFiles()..removeWhere((key, value) => value == null); final finalImageFiles = imageFiles.cast(); @@ -146,130 +152,157 @@ class MarineTarballSamplingService { // data.reportId already contains the timestamp ID // --- END: MODIFIED TO USE TIMESTAMP ID --- - try { - // 1. Submit Form Data - apiDataResult = await _submissionApiService.submitPost( - moduleName: moduleName, - endpoint: 'marine/tarball/sample', // Correct endpoint - body: data.toFormData(), // Use specific method for tarball form data - ); + // 1. Check module preferences for API + final pref = await _userPreferencesService.getModulePreference(moduleName); + bool isApiEnabled = pref?['is_api_enabled'] ?? true; + bool isFtpEnabled = pref?['is_ftp_enabled'] ?? true; - if (apiDataResult['success'] == true) { - anyApiSuccess = true; - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // Store the server's database ID in a separate variable. - apiRecordId = apiDataResult['data']?['autoid']?.toString(); // Correct ID key - // --- END: MODIFIED TO USE TIMESTAMP ID --- + if (isApiEnabled) { + try { + // 1. Submit Form Data + apiDataResult = await _submissionApiService.submitPost( + moduleName: moduleName, + endpoint: 'marine/tarball/sample', // Correct endpoint + body: data.toFormData(), // Use specific method for tarball form data + ); - if (apiRecordId != null) { - if (finalImageFiles.isNotEmpty) { - // 2. Submit Images - apiImageResult = await _submissionApiService.submitMultipart( - moduleName: moduleName, - endpoint: 'marine/tarball/images', // Correct endpoint - // --- START: MODIFIED TO USE TIMESTAMP ID --- - fields: {'autoid': apiRecordId}, // Correct field key - // --- END: MODIFIED TO USE TIMESTAMP ID --- - files: finalImageFiles, - ); - if (apiImageResult['success'] != true) { - anyApiSuccess = false; // Downgrade success if images fail - } - } - // If data succeeded but no images, API part is still successful - } else { - anyApiSuccess = false; + if (apiDataResult['success'] == true) { + anyApiSuccess = true; // --- START: MODIFIED TO USE TIMESTAMP ID --- - apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // Store the server's database ID in a separate variable. + apiRecordId = apiDataResult['data']?['autoid']?.toString(); // Correct ID key // --- END: MODIFIED TO USE TIMESTAMP ID --- - } - } - // If apiDataResult['success'] is false, SubmissionApiService queued it. - } on SessionExpiredException catch (_) { - debugPrint("API submission failed with SessionExpiredException during online submission."); - isSessionKnownToBeExpired = true; - anyApiSuccess = false; - apiDataResult = {'success': false, 'message': 'Session expired. API submission queued.'}; - // Manually queue API calls - await _retryService.addApiToQueue(endpoint: 'marine/tarball/sample', method: 'POST', body: data.toFormData()); - // --- START: MODIFIED TO USE TIMESTAMP ID --- - if (finalImageFiles.isNotEmpty && apiRecordId != null) { - // Queue images if data might have partially succeeded - await _retryService.addApiToQueue(endpoint: 'marine/tarball/images', method: 'POST_MULTIPART', fields: {'autoid': apiRecordId}, files: finalImageFiles); + if (apiRecordId != null) { + if (finalImageFiles.isNotEmpty) { + // 2. Submit Images + apiImageResult = await _submissionApiService.submitMultipart( + moduleName: moduleName, + endpoint: 'marine/tarball/images', // Correct endpoint + // --- START: MODIFIED TO USE TIMESTAMP ID --- + fields: {'autoid': apiRecordId}, // Correct field key + // --- END: MODIFIED TO USE TIMESTAMP ID --- + files: finalImageFiles, + ); + if (apiImageResult['success'] != true) { + anyApiSuccess = false; // Downgrade success if images fail + } + } + // If data succeeded but no images, API part is still successful + } else { + anyApiSuccess = false; + // --- START: MODIFIED TO USE TIMESTAMP ID --- + apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // --- END: MODIFIED TO USE TIMESTAMP ID --- + } + } + // If apiDataResult['success'] is false, SubmissionApiService queued it. + + } on SessionExpiredException catch (_) { + debugPrint("API submission failed with SessionExpiredException during online submission."); + isSessionKnownToBeExpired = true; + anyApiSuccess = false; + apiDataResult = {'success': false, 'message': 'Session expired. API submission queued.'}; + // Manually queue API calls + await _retryService.addApiToQueue(endpoint: 'marine/tarball/sample', method: 'POST', body: data.toFormData()); + // --- START: MODIFIED TO USE TIMESTAMP ID --- + if (finalImageFiles.isNotEmpty && apiRecordId != null) { + // Queue images if data might have partially succeeded + await _retryService.addApiToQueue(endpoint: 'marine/tarball/images', method: 'POST_MULTIPART', fields: {'autoid': apiRecordId}, files: finalImageFiles); + } + // --- END: MODIFIED TO USE TIMESTAMP ID --- } - // --- END: MODIFIED TO USE TIMESTAMP ID --- + } else { + debugPrint("API submission disabled for $moduleName by user preference."); + apiDataResult = {'success': true, 'message': 'API submission disabled by user.'}; + anyApiSuccess = true; // Treated as success since it was intentional } // 3. Submit FTP Files Map ftpResults = {'statuses': []}; bool anyFtpSuccess = false; - if (isSessionKnownToBeExpired) { - debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); - // --- START: MODIFIED TO USE TIMESTAMP ID --- - final baseFileNameForQueue = _generateBaseFileName(data); // Use helper - // --- END: MODIFIED TO USE TIMESTAMP ID --- + // --- START FIX: Check if FTP is enabled AND if it was already successful --- + bool previousFtpSuccess = data.submissionStatus == 'L4' || data.submissionStatus == 'S4'; - // --- START FIX: Add ftpConfigId when queuing --- - final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; + if (!isFtpEnabled) { + debugPrint("FTP submission disabled for $moduleName by user preference. Skipping FTP."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'FTP disabled by user preference.', 'success': true}]}; + anyFtpSuccess = true; + } else if (previousFtpSuccess) { + debugPrint("FTP submission skipped because it was already successful (Status: ${data.submissionStatus})."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'Already successful in previous attempt.', 'success': true}]}; + anyFtpSuccess = true; + } else { + // Proceed with FTP logic only if enabled AND not previously successful + if (isSessionKnownToBeExpired) { + debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); + // --- START: MODIFIED TO USE TIMESTAMP ID --- + final baseFileNameForQueue = _generateBaseFileName(data); // Use helper + // --- END: MODIFIED TO USE TIMESTAMP ID --- - final dataZip = await _zippingService.createDataZip( - jsonDataMap: { // Use specific JSON structures for Tarball FTP - 'data.json': jsonEncode(data.toDbJson()), - 'basic_form.json': jsonEncode(data.toBasicFormJson()), - 'reading.json': jsonEncode(data.toReadingJson()), - 'manual_info.json': jsonEncode(data.toManualInfoJson()), - }, - baseFileName: baseFileNameForQueue, - destinationDir: null, - ); - if (dataZip != null) { - // Queue for each config separately - for (final config in ftpConfigs) { - final configId = config['ftp_config_id']; - if (configId != null) { - await _retryService.addFtpToQueue( - localFilePath: dataZip.path, - remotePath: '/${p.basename(dataZip.path)}', - ftpConfigId: configId // Provide the specific config ID - ); - } - } - } + // --- START FIX: Add ftpConfigId when queuing --- + final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; - if (finalImageFiles.isNotEmpty) { - final imageZip = await _zippingService.createImageZip( - imageFiles: finalImageFiles.values.toList(), + final dataZip = await _zippingService.createDataZip( + jsonDataMap: { // Use specific JSON structures for Tarball FTP + 'data.json': jsonEncode(data.toDbJson()), + 'basic_form.json': jsonEncode(data.toBasicFormJson()), + 'reading.json': jsonEncode(data.toReadingJson()), + 'manual_info.json': jsonEncode(data.toManualInfoJson()), + }, baseFileName: baseFileNameForQueue, destinationDir: null, ); - if (imageZip != null) { + if (dataZip != null) { // Queue for each config separately for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( - localFilePath: imageZip.path, - remotePath: '/${p.basename(imageZip.path)}', + localFilePath: dataZip.path, + remotePath: '/${p.basename(dataZip.path)}', ftpConfigId: configId // Provide the specific config ID ); } } } - } - // --- END FIX --- - ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; - anyFtpSuccess = false; - } else { - try { - ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); - anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); - } catch (e) { - debugPrint("Unexpected FTP submission error: $e"); + + if (finalImageFiles.isNotEmpty) { + final imageZip = await _zippingService.createImageZip( + imageFiles: finalImageFiles.values.toList(), + baseFileName: baseFileNameForQueue, + destinationDir: null, + ); + if (imageZip != null) { + // Queue for each config separately + for (final config in ftpConfigs) { + final configId = config['ftp_config_id']; + if (configId != null) { + await _retryService.addFtpToQueue( + localFilePath: imageZip.path, + remotePath: '/${p.basename(imageZip.path)}', + ftpConfigId: configId // Provide the specific config ID + ); + } + } + } + } + // --- END FIX --- + ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; anyFtpSuccess = false; + } else { + try { + ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); + anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); + } catch (e) { + debugPrint("Unexpected FTP submission error: $e"); + anyFtpSuccess = false; + ftpResults = {'statuses': [{'status': 'Error', 'message': 'FTP process failed: $e.', 'success': false}]}; // Add error status + } } } + // --- END FIX --- // 4. Determine Final Status @@ -305,9 +338,12 @@ class MarineTarballSamplingService { ); // 6. Send Alert - if (overallSuccess) { + // --- START FIX: Check if log was already successful before sending alert --- + final bool wasAlreadySuccessful = previousStatus == 'S4' || previousStatus == 'S3' || previousStatus == 'L4'; + if (overallSuccess && !wasAlreadySuccessful) { _handleTarballSuccessAlert(data, appSettings, isDataOnly: finalImageFiles.isEmpty, isSessionExpired: isSessionKnownToBeExpired); } + // --- END FIX --- return {'success': overallSuccess, 'message': finalMessage, 'reportId': data.reportId}; // Return timestamp ID } diff --git a/lib/services/retry_service.dart b/lib/services/retry_service.dart index 263d17e..1c2bb51 100644 --- a/lib/services/retry_service.dart +++ b/lib/services/retry_service.dart @@ -35,6 +35,7 @@ import 'package:environment_monitoring_app/services/base_api_service.dart'; import 'package:environment_monitoring_app/services/ftp_service.dart'; import 'package:environment_monitoring_app/services/server_config_service.dart'; import 'package:environment_monitoring_app/auth_provider.dart'; +import 'package:environment_monitoring_app/services/user_preferences_service.dart'; // ADDED /// A dedicated service to manage the queue of failed API, FTP, and complex submission tasks. class RetryService { @@ -42,6 +43,7 @@ class RetryService { final BaseApiService _baseApiService = BaseApiService(); final FtpService _ftpService = FtpService(); final ServerConfigService _serverConfigService = ServerConfigService(); + final UserPreferencesService _userPreferencesService = UserPreferencesService(); // ADDED bool _isProcessing = false; // Sampling Services @@ -601,10 +603,39 @@ class RetryService { await _dbHelper.deleteRequestFromQueue(taskId); return false; } + + // --- START FIX: Check if this FTP module is enabled in preferences --- + final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; + final config = ftpConfigs.firstWhere( + (c) => c['ftp_config_id'] == ftpConfigId, + orElse: () => {}, + ); + + if (config.isNotEmpty) { + String? moduleKey = config['ftp_module']; + // Map legacy module names if needed (e.g., river_manual -> river_in_situ) + if (moduleKey == 'river_manual') { + moduleKey = 'river_in_situ'; + } else if (moduleKey == 'marine_manual') { + moduleKey = 'marine_in_situ'; + } + // Add other mappings if needed for consistency with user preferences keys + + if (moduleKey != null) { + final pref = await _userPreferencesService.getModulePreference(moduleKey); + final bool isFtpEnabled = pref?['is_ftp_enabled'] ?? true; + + if (!isFtpEnabled) { + debugPrint("RetryService: FTP upload for module '$moduleKey' is disabled by user. Removing task $taskId."); + await _dbHelper.deleteRequestFromQueue(taskId); + return false; + } + } + } + // --- END FIX --- + if (await localFile.exists()) { - final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; - final config = ftpConfigs.firstWhere((c) => c['ftp_config_id'] == ftpConfigId, orElse: () => {}); - if (config.isEmpty) return false; + if (config.isEmpty) return false; // Config missing final result = await _ftpService.uploadFile(config: config, fileToUpload: localFile, remotePath: remotePath); success = result['success']; diff --git a/lib/services/river_in_situ_sampling_service.dart b/lib/services/river_in_situ_sampling_service.dart index 530d8bc..4e7c334 100644 --- a/lib/services/river_in_situ_sampling_service.dart +++ b/lib/services/river_in_situ_sampling_service.dart @@ -33,6 +33,7 @@ import 'submission_ftp_service.dart'; import 'telegram_service.dart'; import 'retry_service.dart'; import 'base_api_service.dart'; // Import for SessionExpiredException +import 'user_preferences_service.dart'; // ADDED class RiverInSituSamplingService { @@ -47,6 +48,7 @@ class RiverInSituSamplingService { final ZippingService _zippingService = ZippingService(); final RetryService _retryService = RetryService(); final TelegramService _telegramService; + final UserPreferencesService _userPreferencesService = UserPreferencesService(); // ADDED final ImagePicker _picker = ImagePicker(); static const platform = MethodChannel('com.example.environment_monitoring_app/usb'); @@ -260,149 +262,176 @@ class RiverInSituSamplingService { // data.reportId already contains the timestamp ID // --- END: MODIFIED TO USE TIMESTAMP ID --- - try { - // 1. Submit Form Data - apiDataResult = await _submissionApiService.submitPost( - moduleName: moduleName, - endpoint: 'river/manual/sample', // Correct endpoint - body: data.toApiFormData(), - ); + // 1. Check module preferences for API + final pref = await _userPreferencesService.getModulePreference(moduleName); + bool isApiEnabled = pref?['is_api_enabled'] ?? true; + bool isFtpEnabled = pref?['is_ftp_enabled'] ?? true; // --- MODIFIED: Check FTP pref early --- - if (apiDataResult['success'] == true) { - anyApiSuccess = true; - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // Store the server's database ID in a separate variable. - // data.reportId (the timestamp) REMAINS UNCHANGED. - apiRecordId = apiDataResult['data']?['r_man_id']?.toString(); // Correct ID key - // --- END: MODIFIED TO USE TIMESTAMP ID --- + if (isApiEnabled) { + try { + // 1. Submit Form Data + apiDataResult = await _submissionApiService.submitPost( + moduleName: moduleName, + endpoint: 'river/manual/sample', // Correct endpoint + body: data.toApiFormData(), + ); - if (apiRecordId != null) { // Check if server returned an ID - if (finalImageFiles.isNotEmpty) { - // 2. Submit Images - apiImageResult = await _submissionApiService.submitMultipart( - moduleName: moduleName, - endpoint: 'river/manual/images', // Correct endpoint - // --- START: MODIFIED TO USE TIMESTAMP ID --- - fields: {'r_man_id': apiRecordId}, // Use server's ID for relation - // --- END: MODIFIED TO USE TIMESTAMP ID --- - files: finalImageFiles, - ); - if (apiImageResult['success'] != true) { - anyApiSuccess = false; - } - } - } else { - anyApiSuccess = false; + if (apiDataResult['success'] == true) { + anyApiSuccess = true; // --- START: MODIFIED TO USE TIMESTAMP ID --- - apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // Store the server's database ID in a separate variable. + // data.reportId (the timestamp) REMAINS UNCHANGED. + apiRecordId = apiDataResult['data']?['r_man_id']?.toString(); // Correct ID key + // --- END: MODIFIED TO USE TIMESTAMP ID --- + + if (apiRecordId != null) { // Check if server returned an ID + if (finalImageFiles.isNotEmpty) { + // 2. Submit Images + apiImageResult = await _submissionApiService.submitMultipart( + moduleName: moduleName, + endpoint: 'river/manual/images', // Correct endpoint + // --- START: MODIFIED TO USE TIMESTAMP ID --- + fields: {'r_man_id': apiRecordId}, // Use server's ID for relation + // --- END: MODIFIED TO USE TIMESTAMP ID --- + files: finalImageFiles, + ); + if (apiImageResult['success'] != true) { + anyApiSuccess = false; + } + } + } else { + anyApiSuccess = false; + // --- START: MODIFIED TO USE TIMESTAMP ID --- + apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // --- END: MODIFIED TO USE TIMESTAMP ID --- + } + } + // If apiDataResult['success'] is false, SubmissionApiService queued it. + + } on SessionExpiredException catch (_) { + debugPrint("Online submission failed due to session expiry that could not be refreshed."); + isSessionKnownToBeExpired = true; + anyApiSuccess = false; + apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; + // Manually queue API calls + await _retryService.addApiToQueue(endpoint: 'river/manual/sample', method: 'POST', body: data.toApiFormData()); + if (finalImageFiles.isNotEmpty && apiRecordId != null) { + // Also queue images if data call might have partially succeeded before expiry + // --- START: MODIFIED TO USE TIMESTAMP ID --- + await _retryService.addApiToQueue(endpoint: 'river/manual/images', method: 'POST_MULTIPART', fields: {'r_man_id': apiRecordId}, files: finalImageFiles); // --- END: MODIFIED TO USE TIMESTAMP ID --- } } - // If apiDataResult['success'] is false, SubmissionApiService queued it. - - } on SessionExpiredException catch (_) { - debugPrint("Online submission failed due to session expiry that could not be refreshed."); - isSessionKnownToBeExpired = true; - anyApiSuccess = false; - apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; - // Manually queue API calls - await _retryService.addApiToQueue(endpoint: 'river/manual/sample', method: 'POST', body: data.toApiFormData()); - if (finalImageFiles.isNotEmpty && apiRecordId != null) { - // Also queue images if data call might have partially succeeded before expiry - // --- START: MODIFIED TO USE TIMESTAMP ID --- - await _retryService.addApiToQueue(endpoint: 'river/manual/images', method: 'POST_MULTIPART', fields: {'r_man_id': apiRecordId}, files: finalImageFiles); - // --- END: MODIFIED TO USE TIMESTAMP ID --- - } + } else { + debugPrint("API submission disabled for $moduleName by user preference."); + apiDataResult = {'success': true, 'message': 'API submission disabled by user.'}; + anyApiSuccess = true; // Treated as success since it was intentional } // 3. Submit FTP Files Map ftpResults = {'statuses': []}; bool anyFtpSuccess = false; - if (isSessionKnownToBeExpired) { - debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // We can now safely call _generateBaseFileName, as data.reportId is the timestamp - final baseFileNameForQueue = _generateBaseFileName(data); - // --- END: MODIFIED TO USE TIMESTAMP ID --- + // --- START FIX: Check if FTP is enabled AND if it was already successful --- + // 'L4' status means API Failed but FTP Succeeded. If re-submitting an L4 record, we skip FTP. + // 'S4' means everything succeeded. + bool previousFtpSuccess = data.submissionStatus == 'L4' || data.submissionStatus == 'S4'; - // --- START FIX: Add ftpConfigId when queuing --- - // Get all potential FTP configs - final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; + if (!isFtpEnabled) { + debugPrint("FTP submission disabled for $moduleName by user preference. Skipping FTP."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'FTP disabled by user preference.', 'success': true}]}; + anyFtpSuccess = true; + } else if (previousFtpSuccess) { + debugPrint("FTP submission skipped because it was already successful (Status: ${data.submissionStatus})."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'Already successful in previous attempt.', 'success': true}]}; + anyFtpSuccess = true; + } else { + // Proceed with FTP logic only if enabled AND not previously successful + if (isSessionKnownToBeExpired) { + debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); + // --- START: MODIFIED TO USE TIMESTAMP ID --- + // We can now safely call _generateBaseFileName, as data.reportId is the timestamp + final baseFileNameForQueue = _generateBaseFileName(data); + // --- END: MODIFIED TO USE TIMESTAMP ID --- - final dataZip = await _zippingService.createDataZip( - jsonDataMap: { // Use specific JSON structures for River In-Situ FTP - 'db.json': data.toDbJson(), - 'river_insitu_basic_form.json': data.toBasicFormJson(), - 'river_sampling_reading.json': data.toReadingJson(), - 'river_manual_info.json': data.toManualInfoJson(), - }, - baseFileName: baseFileNameForQueue, - destinationDir: null, - ); - if (dataZip != null) { - // Queue for each config separately - for (final config in ftpConfigs) { - final configId = config['ftp_config_id']; - if (configId != null) { - await _retryService.addFtpToQueue( - localFilePath: dataZip.path, - remotePath: '/${p.basename(dataZip.path)}', - ftpConfigId: configId // Provide the specific config ID - ); - } - } - } + // --- START FIX: Add ftpConfigId when queuing --- + // Get all potential FTP configs + final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; - if (finalImageFiles.isNotEmpty) { - // Re-construct the map for retry to attempt renaming even in fallback - final Map retryImages = {}; - final String dateStr = (data.samplingDate ?? '').replaceAll('-', ''); - final String timeStr = (data.samplingTime ?? '').replaceAll(':', ''); - final String timestampId = "$dateStr$timeStr"; - - void addRetryMap(File? file, String prefix) { - if(file != null) retryImages['${prefix}_$timestampId.jpg'] = file; - } - addRetryMap(data.backgroundStationImage, 'background'); - addRetryMap(data.upstreamRiverImage, 'upstream'); - addRetryMap(data.downstreamRiverImage, 'downstream'); - addRetryMap(data.sampleTurbidityImage, 'sample_turbidity'); - addRetryMap(data.optionalImage1, 'optional_1'); - addRetryMap(data.optionalImage2, 'optional_2'); - addRetryMap(data.optionalImage3, 'optional_3'); - addRetryMap(data.optionalImage4, 'optional_4'); - - final retryImageZip = await _zippingService.createRenamedImageZip( - imageFiles: retryImages, + final dataZip = await _zippingService.createDataZip( + jsonDataMap: { // Use specific JSON structures for River In-Situ FTP + 'db.json': data.toDbJson(), + 'river_insitu_basic_form.json': data.toBasicFormJson(), + 'river_sampling_reading.json': data.toReadingJson(), + 'river_manual_info.json': data.toManualInfoJson(), + }, baseFileName: baseFileNameForQueue, destinationDir: null, ); - - if (retryImageZip != null) { + if (dataZip != null) { // Queue for each config separately for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( - localFilePath: retryImageZip.path, - remotePath: '/${p.basename(retryImageZip.path)}', + localFilePath: dataZip.path, + remotePath: '/${p.basename(dataZip.path)}', ftpConfigId: configId // Provide the specific config ID ); } } } - } - // --- END FIX --- - ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; - anyFtpSuccess = false; - } else { - try { - ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); - anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); - } catch (e) { - debugPrint("Unexpected FTP submission error: $e"); + + if (finalImageFiles.isNotEmpty) { + // Re-construct the map for retry to attempt renaming even in fallback + final Map retryImages = {}; + final String dateStr = (data.samplingDate ?? '').replaceAll('-', ''); + final String timeStr = (data.samplingTime ?? '').replaceAll(':', ''); + final String timestampId = "$dateStr$timeStr"; + + void addRetryMap(File? file, String prefix) { + if(file != null) retryImages['${prefix}_$timestampId.jpg'] = file; + } + addRetryMap(data.backgroundStationImage, 'background'); + addRetryMap(data.upstreamRiverImage, 'upstream'); + addRetryMap(data.downstreamRiverImage, 'downstream'); + addRetryMap(data.sampleTurbidityImage, 'sample_turbidity'); + addRetryMap(data.optionalImage1, 'optional_1'); + addRetryMap(data.optionalImage2, 'optional_2'); + addRetryMap(data.optionalImage3, 'optional_3'); + addRetryMap(data.optionalImage4, 'optional_4'); + + final retryImageZip = await _zippingService.createRenamedImageZip( + imageFiles: retryImages, + baseFileName: baseFileNameForQueue, + destinationDir: null, + ); + + if (retryImageZip != null) { + // Queue for each config separately + for (final config in ftpConfigs) { + final configId = config['ftp_config_id']; + if (configId != null) { + await _retryService.addFtpToQueue( + localFilePath: retryImageZip.path, + remotePath: '/${p.basename(retryImageZip.path)}', + ftpConfigId: configId // Provide the specific config ID + ); + } + } + } + } + // --- END FIX --- + ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; anyFtpSuccess = false; + } else { + try { + ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); + anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); + } catch (e) { + debugPrint("Unexpected FTP submission error: $e"); + anyFtpSuccess = false; + } } } @@ -574,7 +603,7 @@ class RiverInSituSamplingService { mapImage(data.backgroundStationImage, 'background'); mapImage(data.upstreamRiverImage, 'upstream'); mapImage(data.downstreamRiverImage, 'downstream'); - mapImage(data.sampleTurbidityImage, 'turbidity'); + mapImage(data.sampleTurbidityImage, 'sample_turbidity'); mapImage(data.optionalImage1, 'optional_1'); mapImage(data.optionalImage2, 'optional_2'); mapImage(data.optionalImage3, 'optional_3'); diff --git a/lib/services/river_investigative_sampling_service.dart b/lib/services/river_investigative_sampling_service.dart index 1fc3d6f..f81410d 100644 --- a/lib/services/river_investigative_sampling_service.dart +++ b/lib/services/river_investigative_sampling_service.dart @@ -15,14 +15,14 @@ import 'package:usb_serial/usb_serial.dart'; import 'dart:convert'; import 'package:intl/intl.dart'; import 'package:connectivity_plus/connectivity_plus.dart'; -import 'package:provider/provider.dart'; // Keep provider import if needed internally, though less common in services +import 'package:provider/provider.dart'; import '../auth_provider.dart'; import 'location_service.dart'; -import '../models/river_inves_manual_sampling_data.dart'; // Use Investigative model +import '../models/river_inves_manual_sampling_data.dart'; import '../bluetooth/bluetooth_manager.dart'; import '../serial/serial_manager.dart'; -import 'api_service.dart'; // Keep ApiService import for DatabaseHelper access within service if needed, or remove if unused directly +import 'api_service.dart'; import 'package:environment_monitoring_app/services/database_helper.dart'; import 'local_storage_service.dart'; import 'server_config_service.dart'; @@ -31,10 +31,10 @@ import 'submission_api_service.dart'; import 'submission_ftp_service.dart'; import 'telegram_service.dart'; import 'retry_service.dart'; -import 'base_api_service.dart'; // Import for SessionExpiredException +import 'base_api_service.dart'; +import 'user_preferences_service.dart'; // ADDED - -class RiverInvestigativeSamplingService { // Renamed class +class RiverInvestigativeSamplingService { final LocationService _locationService = LocationService(); final BluetoothManager _bluetoothManager = BluetoothManager(); final SerialManager _serialManager = SerialManager(); @@ -46,22 +46,22 @@ class RiverInvestigativeSamplingService { // Renamed class final ZippingService _zippingService = ZippingService(); final RetryService _retryService = RetryService(); final TelegramService _telegramService; + final UserPreferencesService _userPreferencesService = UserPreferencesService(); // ADDED final ImagePicker _picker = ImagePicker(); static const platform = MethodChannel('com.example.environment_monitoring_app/usb'); - RiverInvestigativeSamplingService(this._telegramService); // Constructor remains similar + RiverInvestigativeSamplingService(this._telegramService); Future getCurrentLocation() => _locationService.getCurrentLocation(); double calculateDistance(double lat1, double lon1, double lat2, double lon2) => _locationService.calculateDistance(lat1, lon1, lat2, lon2); - // Adapted image processing for Investigative data - Future pickAndProcessImage(ImageSource source, { required RiverInvesManualSamplingData data, required String imageInfo, bool isRequired = false, String? stationCode}) async { // Updated model type + Future pickAndProcessImage(ImageSource source, { required RiverInvesManualSamplingData data, required String imageInfo, bool isRequired = false, String? stationCode}) async { try { final XFile? pickedFile = await _picker.pickImage( source: source, - imageQuality: 85, // Keep quality settings - maxWidth: 1024, // Keep resolution settings + imageQuality: 85, + maxWidth: 1024, ); if (pickedFile == null) { @@ -74,30 +74,24 @@ class RiverInvestigativeSamplingService { // Renamed class return null; } - // ✅ FIX: Apply landscape check to ALL photos, not just required ones. if (originalImage.height > originalImage.width) { debugPrint("Image rejected: Must be in landscape orientation."); return null; } - // Watermark using investigative data final String watermarkTimestamp = "${data.samplingDate} ${data.samplingTime}"; - final font = img.arial24; // Use consistent font - final textWidth = watermarkTimestamp.length * 12; // Approximate width - // Draw background rectangle for text visibility + final font = img.arial24; + final textWidth = watermarkTimestamp.length * 12; + img.fillRect(originalImage, x1: 5, y1: 5, x2: textWidth + 15, y2: 35, color: img.ColorRgb8(255, 255, 255)); - // Draw timestamp string img.drawString(originalImage, watermarkTimestamp, font: font, x: 10, y: 10, color: img.ColorRgb8(0, 0, 0)); final tempDir = await getTemporaryDirectory(); - // Use the determined station code passed in (handles Manual/Triennial/New) final finalStationCode = stationCode ?? 'NA'; final fileTimestamp = "${data.samplingDate}-${data.samplingTime}".replaceAll(':', '-'); - // Consistent filename format final newFileName = "${finalStationCode}_${fileTimestamp}_${imageInfo.replaceAll(' ', '')}.jpg"; final filePath = p.join(tempDir.path, newFileName); - // Encode and write the processed image return File(filePath)..writeAsBytesSync(img.encodeJpg(originalImage)); } catch (e) { @@ -106,7 +100,6 @@ class RiverInvestigativeSamplingService { // Renamed class } } - // Bluetooth and Serial Management - No changes needed, uses shared managers ValueNotifier get bluetoothConnectionState => _bluetoothManager.connectionState; ValueNotifier get serialConnectionState => _serialManager.connectionState; @@ -123,19 +116,17 @@ class RiverInvestigativeSamplingService { // Renamed class String? get connectedSerialDeviceName => _serialManager.connectedDeviceName.value; Future requestDevicePermissions() async { - // Permission logic remains the same Map statuses = await [ Permission.bluetoothScan, Permission.bluetoothConnect, - Permission.locationWhenInUse, // Keep location permission for GPS + Permission.locationWhenInUse, ].request(); if (statuses[Permission.bluetoothScan] == PermissionStatus.granted && statuses[Permission.bluetoothConnect] == PermissionStatus.granted && - statuses[Permission.locationWhenInUse] == PermissionStatus.granted) { // Ensure location is granted too + statuses[Permission.locationWhenInUse] == PermissionStatus.granted) { return true; } else { - debugPrint("Bluetooth Scan: ${statuses[Permission.bluetoothScan]}, Bluetooth Connect: ${statuses[Permission.bluetoothConnect]}, Location: ${statuses[Permission.locationWhenInUse]}"); return false; } } @@ -148,9 +139,7 @@ class RiverInvestigativeSamplingService { // Renamed class Future> getAvailableSerialDevices() => _serialManager.getAvailableDevices(); Future requestUsbPermission(UsbDevice device) async { - // USB permission logic remains the same try { - // Ensure the platform channel name matches what's defined in your native code (Android/iOS) return await platform.invokeMethod('requestUsbPermission', {'vid': device.vid, 'pid': device.pid}) ?? false; } on PlatformException catch (e) { debugPrint("Failed to request USB permission: '${e.message}'."); @@ -159,7 +148,6 @@ class RiverInvestigativeSamplingService { // Renamed class } Future connectToSerialDevice(UsbDevice device) async { - // Serial connection logic remains the same final bool permissionGranted = await requestUsbPermission(device); if (permissionGranted) { await _serialManager.connect(device); @@ -176,51 +164,40 @@ class RiverInvestigativeSamplingService { // Renamed class _serialManager.dispose(); } - // --- START: NEW HELPER METHOD --- - /// Generates a unique timestamp ID from the sampling date and time. String _generateTimestampId(String? date, String? time) { final String dateTimeString = "${date ?? ''} ${time ?? ''}"; try { - // Time format from model is HH:mm final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString); return samplingDateTime.millisecondsSinceEpoch.toString(); } catch (e) { - // Fallback: if parsing fails, use the current time in milliseconds debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e"); return DateTime.now().millisecondsSinceEpoch.toString(); } } - // --- END: NEW HELPER METHOD --- - // Adapted Submission Logic for Investigative Future> submitData({ - required RiverInvesManualSamplingData data, // Updated model type + required RiverInvesManualSamplingData data, required List>? appSettings, required AuthProvider authProvider, String? logDirectory, }) async { - // *** MODIFIED: Module name changed *** const String moduleName = 'river_investigative'; - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // Generate the unique timestamp ID and assign it immediately. final String timestampId = _generateTimestampId(data.samplingDate, data.samplingTime); - data.reportId = timestampId; // This is the primary ID now. - // --- END: MODIFIED TO USE TIMESTAMP ID --- + data.reportId = timestampId; final connectivityResult = await Connectivity().checkConnectivity(); bool isOnline = !connectivityResult.contains(ConnectivityResult.none); bool isOfflineSession = authProvider.isLoggedIn && (authProvider.profileData?['token']?.startsWith("offline-session-") ?? false); - // Auto-relogin logic remains the same if (isOnline && isOfflineSession) { - debugPrint("River Investigative submission online during offline session. Attempting auto-relogin..."); // Log context update + debugPrint("River Investigative submission online during offline session. Attempting auto-relogin..."); try { final bool transitionSuccess = await authProvider.checkAndTransitionToOnlineSession(); if (transitionSuccess) { - isOfflineSession = false; // Successfully transitioned to online + isOfflineSession = false; } else { - isOnline = false; // Auto-relogin failed, treat as offline + isOnline = false; } } on SessionExpiredException catch (_) { debugPrint("Session expired during auto-relogin check. Treating as offline."); @@ -228,9 +205,8 @@ class RiverInvestigativeSamplingService { // Renamed class } } - // Branch based on connectivity and session status if (isOnline && !isOfflineSession) { - debugPrint("Proceeding with direct ONLINE River Investigative submission..."); // Log context update + debugPrint("Proceeding with direct ONLINE River Investigative submission..."); return await _performOnlineSubmission( data: data, appSettings: appSettings, @@ -239,26 +215,25 @@ class RiverInvestigativeSamplingService { // Renamed class logDirectory: logDirectory, ); } else { - debugPrint("Proceeding with OFFLINE River Investigative queuing mechanism..."); // Log context update + debugPrint("Proceeding with OFFLINE River Investigative queuing mechanism..."); return await _performOfflineQueuing( data: data, moduleName: moduleName, - logDirectory: logDirectory, // Pass for potential update + logDirectory: logDirectory, ); } } Future> _performOnlineSubmission({ - required RiverInvesManualSamplingData data, // Updated model type + required RiverInvesManualSamplingData data, required List>? appSettings, - required String moduleName, // Passed in as 'river_investigative' + required String moduleName, required AuthProvider authProvider, String? logDirectory, }) async { final serverName = (await _serverConfigService.getActiveApiConfig())?['config_name'] as String? ?? 'Default'; - // Get image files using the Investigative model's method final imageFilesWithNulls = data.toApiImageFiles(); - imageFilesWithNulls.removeWhere((key, value) => value == null); // Remove nulls + imageFilesWithNulls.removeWhere((key, value) => value == null); final Map finalImageFiles = imageFilesWithNulls.cast(); bool anyApiSuccess = false; @@ -268,372 +243,305 @@ class RiverInvestigativeSamplingService { // Renamed class String finalStatus = ''; bool isSessionKnownToBeExpired = false; - // --- START: MODIFIED TO USE TIMESTAMP ID --- - String? apiRecordId; // Will hold the DB ID (e.g., 102) from the server - // data.reportId already contains the timestamp ID - // --- END: MODIFIED TO USE TIMESTAMP ID --- + String? apiRecordId; - try { - // 1. Submit Form Data (using Investigative endpoint and data) - apiDataResult = await _submissionApiService.submitPost( - moduleName: moduleName, // 'river_investigative' - // *** MODIFIED: API Endpoint *** - endpoint: 'river/investigative/sample', // Assumed endpoint for investigative data - body: data.toApiFormData(), // Use Investigative model's method - ); + // 1. Check module preferences for API + final pref = await _userPreferencesService.getModulePreference(moduleName); + bool isApiEnabled = pref?['is_api_enabled'] ?? true; + bool isFtpEnabled = pref?['is_ftp_enabled'] ?? true; - if (apiDataResult['success'] == true) { - anyApiSuccess = true; - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // *** MODIFIED: Extract report ID using assumed key *** - apiRecordId = apiDataResult['data']?['r_inv_id']?.toString(); // Assumed key for investigative ID - // --- END: MODIFIED TO USE TIMESTAMP ID --- + if (isApiEnabled) { + try { + // 1. Submit Form Data + apiDataResult = await _submissionApiService.submitPost( + moduleName: moduleName, + endpoint: 'river/investigative/sample', + body: data.toApiFormData(), + ); - if (apiRecordId != null) { - if (finalImageFiles.isNotEmpty) { - // 2. Submit Images (using Investigative endpoint) - apiImageResult = await _submissionApiService.submitMultipart( - moduleName: moduleName, // 'river_investigative' - // *** MODIFIED: API Endpoint *** - endpoint: 'river/investigative/images', // Assumed endpoint for investigative images - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // *** MODIFIED: Field key for ID *** - fields: {'r_inv_id': apiRecordId}, // Use assumed investigative ID key - // --- END: MODIFIED TO USE TIMESTAMP ID --- - files: finalImageFiles, - ); - if (apiImageResult['success'] != true) { - // If image upload fails after data success, mark API part as failed overall for simplicity, or handle partially. - anyApiSuccess = false; // Treat as overall API failure if images fail + if (apiDataResult['success'] == true) { + anyApiSuccess = true; + apiRecordId = apiDataResult['data']?['r_inves_id']?.toString(); + + if (apiRecordId != null) { + if (finalImageFiles.isNotEmpty) { + // 2. Submit Images + apiImageResult = await _submissionApiService.submitMultipart( + moduleName: moduleName, + endpoint: 'river/investigative/images', + fields: {'r_inves_id': apiRecordId}, + files: finalImageFiles, + ); + if (apiImageResult['success'] != true) { + anyApiSuccess = false; + } } + } else { + anyApiSuccess = false; + apiDataResult['success'] = false; + apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; } - // If no images, data submission success is enough - } else { - // API succeeded but didn't return an ID - treat as failure - anyApiSuccess = false; - apiDataResult['success'] = false; // Mark as failed - // --- START: MODIFIED TO USE TIMESTAMP ID --- - apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; - // --- END: MODIFIED TO USE TIMESTAMP ID --- + } + } on SessionExpiredException catch (_) { + debugPrint("Online River Investigative submission failed due to session expiry that could not be refreshed."); + isSessionKnownToBeExpired = true; + anyApiSuccess = false; + apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; + // Manually queue API calls + await _retryService.addApiToQueue(endpoint: 'river/investigative/sample', method: 'POST', body: data.toApiFormData()); + + if (finalImageFiles.isNotEmpty && apiRecordId != null) { + await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {'r_inves_id': apiRecordId}, files: finalImageFiles); + } else if (finalImageFiles.isNotEmpty && apiRecordId == null) { + debugPrint("Queueing investigative images without report ID due to session expiry during data submission."); + await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {}, files: finalImageFiles); } } - // If apiDataResult['success'] is false initially, SubmissionApiService queued it. - - } on SessionExpiredException catch (_) { - debugPrint("Online River Investigative submission failed due to session expiry that could not be refreshed."); // Log context update - isSessionKnownToBeExpired = true; - anyApiSuccess = false; - apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; - // Manually queue API calls - // *** MODIFIED: Use Investigative endpoints for queueing *** - await _retryService.addApiToQueue(endpoint: 'river/investigative/sample', method: 'POST', body: data.toApiFormData()); - // --- START: MODIFIED TO USE TIMESTAMP ID --- - if (finalImageFiles.isNotEmpty && apiRecordId != null) { - // Queue images only if we might have gotten an ID before expiry - await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {'r_inv_id': apiRecordId}, files: finalImageFiles); - } else if (finalImageFiles.isNotEmpty && apiRecordId == null) { - // --- END: MODIFIED TO USE TIMESTAMP ID --- - // If data call failed before getting ID, queue images without ID - might need manual linking later or separate retry logic - debugPrint("Queueing investigative images without report ID due to session expiry during data submission."); - // How to handle this depends on backend capabilities or manual intervention needs. - // Option: Queue a complex task instead? For now, queueing individually. - await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {}, files: finalImageFiles); // Queue images without ID - } + } else { + debugPrint("API submission disabled for $moduleName by user preference."); + apiDataResult = {'success': true, 'message': 'API submission disabled by user.'}; + anyApiSuccess = true; // Treated as success since it was intentional } - // 3. Submit FTP Files (Logic remains similar, uses specific JSON methods) + + // 3. Submit FTP Files Map ftpResults = {'statuses': []}; bool anyFtpSuccess = false; - if (isSessionKnownToBeExpired) { - debugPrint("Skipping FTP attempt for River Investigative due to known expired session. Manually queuing FTP tasks."); // Log context update - // --- START: MODIFIED TO USE TIMESTAMP ID --- - final baseFileNameForQueue = _generateBaseFileName(data); // Use helper - // --- END: MODIFIED TO USE TIMESTAMP ID --- + // --- START FIX: Check if FTP is enabled AND if it was already successful --- + // 'L4' status means API Failed but FTP Succeeded. If re-submitting an L4 record, we skip FTP. + // 'S4' means everything succeeded. + bool previousFtpSuccess = data.submissionStatus == 'L4' || data.submissionStatus == 'S4'; - // --- START FIX: Add ftpConfigId when queuing --- (Copied from In-Situ, ensure DB structure matches) - final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; + if (!isFtpEnabled) { + debugPrint("FTP submission disabled for $moduleName by user preference. Skipping FTP."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'FTP disabled by user preference.', 'success': true}]}; + anyFtpSuccess = true; + } else if (previousFtpSuccess) { + debugPrint("FTP submission skipped because it was already successful (Status: ${data.submissionStatus})."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'Already successful in previous attempt.', 'success': true}]}; + anyFtpSuccess = true; + } else { + // Proceed with FTP logic only if enabled AND not previously successful + if (isSessionKnownToBeExpired) { + debugPrint("Skipping FTP attempt for River Investigative due to known expired session. Manually queuing FTP tasks."); + final baseFileNameForQueue = _generateBaseFileName(data); + final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; - final dataZip = await _zippingService.createDataZip( - jsonDataMap: { // Use specific JSON structures for River Investigative FTP - 'db.json': data.toDbJson(), // Use Investigative model's method - 'river_inves_basic_form.json': data.toBasicFormJson(), // Use Investigative model's method - 'river_inves_reading.json': data.toReadingJson(), // Use Investigative model's method - 'river_inves_manual_info.json': data.toManualInfoJson(), // Use Investigative model's method - }, - baseFileName: baseFileNameForQueue, - destinationDir: null, // Save to temp dir - ); - if (dataZip != null) { - // Queue for each config separately - for (final config in ftpConfigs) { - final configId = config['ftp_config_id']; - if (configId != null) { - await _retryService.addFtpToQueue( - localFilePath: dataZip.path, - remotePath: '/${p.basename(dataZip.path)}', // Standard remote path - ftpConfigId: configId // Provide the specific config ID - ); - } - } - } - - if (finalImageFiles.isNotEmpty) { - // Use existing queue logic for fallback (no renaming complexity here to be safe) - final Map retryImages = {}; - final String dateStr = (data.samplingDate ?? '').replaceAll('-', ''); - final String timeStr = (data.samplingTime ?? '').replaceAll(':', ''); - final String zipImageTimestamp = "$dateStr$timeStr"; - - void addRetryMap(File? file, String prefix) { - if(file != null) retryImages['${prefix}_$zipImageTimestamp.jpg'] = file; - } - addRetryMap(data.backgroundStationImage, 'background'); - addRetryMap(data.upstreamRiverImage, 'upstream'); - addRetryMap(data.downstreamRiverImage, 'downstream'); - addRetryMap(data.sampleTurbidityImage, 'sample_turbidity'); - addRetryMap(data.optionalImage1, 'optional_1'); - addRetryMap(data.optionalImage2, 'optional_2'); - addRetryMap(data.optionalImage3, 'optional_3'); - addRetryMap(data.optionalImage4, 'optional_4'); - - final retryImageZip = await _zippingService.createRenamedImageZip( - imageFiles: retryImages, + final dataZip = await _zippingService.createDataZip( + jsonDataMap: { + 'db.json': data.toDbJson(), + 'river_inves_basic_form.json': data.toBasicFormJson(), + 'river_inves_reading.json': data.toReadingJson(), + 'river_inves_manual_info.json': data.toManualInfoJson(), + }, baseFileName: baseFileNameForQueue, destinationDir: null, ); - - if (retryImageZip != null) { - // Queue for each config separately + if (dataZip != null) { for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( - localFilePath: retryImageZip.path, - remotePath: '/${p.basename(retryImageZip.path)}', // Standard remote path - ftpConfigId: configId // Provide the specific config ID + localFilePath: dataZip.path, + remotePath: '/${p.basename(dataZip.path)}', + ftpConfigId: configId ); } } } - } - // --- END FIX --- - ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; - anyFtpSuccess = false; // Mark FTP as unsuccessful for overall status determination - } else { - // Proceed with FTP attempt if session is okay - try { - ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); // Call helper - // Determine success based on statuses (excluding 'Not Configured') - anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); - } catch (e) { - debugPrint("Unexpected River Investigative FTP submission error: $e"); // Log context update - anyFtpSuccess = false; // Mark FTP as failed on error - ftpResults = {'statuses': [{'status': 'Error', 'message': 'FTP process failed: $e.', 'success': false}]}; // Provide error status + + if (finalImageFiles.isNotEmpty) { + final Map retryImages = {}; + final String dateStr = (data.samplingDate ?? '').replaceAll('-', ''); + final String timeStr = (data.samplingTime ?? '').replaceAll(':', ''); + final String zipImageTimestamp = "$dateStr$timeStr"; + + void addRetryMap(File? file, String prefix) { + if(file != null) retryImages['${prefix}_$zipImageTimestamp.jpg'] = file; + } + addRetryMap(data.backgroundStationImage, 'background'); + addRetryMap(data.upstreamRiverImage, 'upstream'); + addRetryMap(data.downstreamRiverImage, 'downstream'); + addRetryMap(data.sampleTurbidityImage, 'sample_turbidity'); + addRetryMap(data.optionalImage1, 'optional_1'); + addRetryMap(data.optionalImage2, 'optional_2'); + addRetryMap(data.optionalImage3, 'optional_3'); + addRetryMap(data.optionalImage4, 'optional_4'); + + final retryImageZip = await _zippingService.createRenamedImageZip( + imageFiles: retryImages, + baseFileName: baseFileNameForQueue, + destinationDir: null, + ); + + if (retryImageZip != null) { + for (final config in ftpConfigs) { + final configId = config['ftp_config_id']; + if (configId != null) { + await _retryService.addFtpToQueue( + localFilePath: retryImageZip.path, + remotePath: '/${p.basename(retryImageZip.path)}', + ftpConfigId: configId + ); + } + } + } + } + ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; + anyFtpSuccess = false; + } else { + try { + ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); + anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); + } catch (e) { + debugPrint("Unexpected River Investigative FTP submission error: $e"); + anyFtpSuccess = false; + ftpResults = {'statuses': [{'status': 'Error', 'message': 'FTP process failed: $e.', 'success': false}]}; + } } } + // --- END FIX --- - // 4. Determine Final Status (Logic remains the same) + // 4. Determine Final Status final bool overallSuccess = anyApiSuccess || anyFtpSuccess; if (anyApiSuccess && anyFtpSuccess) { finalMessage = 'Data submitted successfully to all destinations.'; - finalStatus = 'S4'; // API OK, FTP OK + finalStatus = 'S4'; } else if (anyApiSuccess && !anyFtpSuccess) { finalMessage = 'Data sent to API, but some FTP uploads failed or were queued.'; - finalStatus = 'S3'; // API OK, FTP Failed/Queued + finalStatus = 'S3'; } else if (!anyApiSuccess && anyFtpSuccess) { finalMessage = 'API submission failed and was queued, but files were sent to FTP successfully.'; - finalStatus = 'L4'; // API Failed/Queued, FTP OK - } else { // Neither API nor FTP fully succeeded without queueing/errors + finalStatus = 'L4'; + } else { finalMessage = apiDataResult['message'] ?? 'All submission attempts failed and have been queued for retry.'; - finalStatus = 'L1'; // API Failed/Queued, FTP Failed/Queued + finalStatus = 'L1'; } - // 5. Log Locally (using Investigative log method) + // 5. Log Locally await _logAndSave( data: data, status: finalStatus, message: finalMessage, - apiResults: [apiDataResult, apiImageResult].where((r) => r.isNotEmpty).toList(), // Filter out empty results + apiResults: [apiDataResult, apiImageResult].where((r) => r.isNotEmpty).toList(), ftpStatuses: ftpResults['statuses'] ?? [], serverName: serverName, - // --- START: MODIFIED TO USE TIMESTAMP ID --- - apiRecordId: apiRecordId, // Pass the server ID - // --- END: MODIFIED TO USE TIMESTAMP ID --- + apiRecordId: apiRecordId, logDirectory: logDirectory, ); - // 6. Send Alert (using Investigative alert method) - if (overallSuccess) { // Send alert only if at least one part (API or FTP) succeeded without errors/queueing immediately + // 6. Send Alert + if (overallSuccess) { _handleSuccessAlert(data, appSettings, isDataOnly: finalImageFiles.isEmpty, isSessionExpired: isSessionKnownToBeExpired); } - // Return consistent result format return { 'status': finalStatus, - 'success': overallSuccess, // Reflects if *any* part succeeded now + 'success': overallSuccess, 'message': finalMessage, - // --- START: MODIFIED TO USE TIMESTAMP ID --- - 'reportId': data.reportId // This is now the timestamp ID - // --- END: MODIFIED TO USE TIMESTAMP ID --- + 'reportId': data.reportId }; } - /// Handles queuing the submission data when the device is offline for Investigative. Future> _performOfflineQueuing({ - required RiverInvesManualSamplingData data, // Updated model type - required String moduleName, // Passed in as 'river_investigative' - String? logDirectory, // Added for potential update + required RiverInvesManualSamplingData data, + required String moduleName, + String? logDirectory, }) async { final serverConfig = await _serverConfigService.getActiveApiConfig(); final serverName = serverConfig?['config_name'] as String? ?? 'Default'; - data.submissionStatus = 'Queued'; // Tentative status, will be L1 after saving + data.submissionStatus = 'Queued'; data.submissionMessage = 'Submission queued for later retry.'; - String? savedLogPath = logDirectory; // Use existing path if provided for an update + String? savedLogPath = logDirectory; - // Save/Update local log first using the specific Investigative save method if (savedLogPath != null && savedLogPath.isNotEmpty) { - // *** MODIFIED: Use correct update method *** - await _localStorageService.updateRiverInvestigativeLog(data.toMap()..['logDirectory'] = savedLogPath); // Add path for update method - debugPrint("Updated existing River Investigative log for queuing: $savedLogPath"); // Log context update + await _localStorageService.updateRiverInvestigativeLog(data.toMap()..['logDirectory'] = savedLogPath); + debugPrint("Updated existing River Investigative log for queuing: $savedLogPath"); } else { - // *** MODIFIED: Use correct save method *** savedLogPath = await _localStorageService.saveRiverInvestigativeSamplingData(data, serverName: serverName); - debugPrint("Saved new River Investigative log for queuing: $savedLogPath"); // Log context update + debugPrint("Saved new River Investigative log for queuing: $savedLogPath"); } if (savedLogPath == null) { - // If saving the log itself failed - const message = "Failed to save River Investigative submission to local device storage."; // Log context update - // Log failure to central DB log if possible - // --- START: MODIFIED TO USE TIMESTAMP ID --- + const message = "Failed to save River Investigative submission to local device storage."; await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: logDirectory); - // --- END: MODIFIED TO USE TIMESTAMP ID --- return {'status': 'Error', 'success': false, 'message': message}; } - // Queue the task for the RetryService - // *** MODIFIED: Use specific task type *** await _retryService.queueTask( - type: 'river_investigative_submission', // Specific type for retry handler + type: 'river_investigative_submission', payload: { - 'module': moduleName, // 'river_investigative' - 'localLogPath': p.join(savedLogPath, 'data.json'), // Point to the json file within the saved directory - 'serverConfig': serverConfig, // Pass current server config at time of queueing + 'module': moduleName, + 'localLogPath': p.join(savedLogPath, 'data.json'), + 'serverConfig': serverConfig, }, ); - const successMessage = "Device offline. River Investigative submission has been saved locally and queued for automatic retry when connection is restored."; // Log context update - // Update final status in the data object and potentially update log again, or just log to central DB - data.submissionStatus = 'L1'; // Final queued status + const successMessage = "Device offline. River Investigative submission has been saved locally and queued for automatic retry when connection is restored."; + data.submissionStatus = 'L1'; data.submissionMessage = successMessage; - // Log final queued state to central DB log - // --- START: MODIFIED TO USE TIMESTAMP ID --- - await _logAndSave(data: data, status: 'L1', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: savedLogPath); // Ensure log reflects final state - // --- END: MODIFIED TO USE TIMESTAMP ID --- + await _logAndSave(data: data, status: 'L1', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: savedLogPath); - return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': data.reportId}; // Return timestamp ID + return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': data.reportId}; } - // --- START: NEW HELPER METHOD (for timestamp ID) --- - /// Generates a unique timestamp ID from the sampling date and time. - // Note: This function was duplicated. The duplicate has been removed. - // The first occurrence of this function is kept, even though the error message pointed to it. - // Keeping this one: - /* - String _generateTimestampId(String? date, String? time) { - final String dateTimeString = "${date ?? ''} ${time ?? ''}"; - try { - // Time format from model is HH:mm - final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString); - return samplingDateTime.millisecondsSinceEpoch.toString(); - } catch (e) { - // Fallback: if parsing fails, use the current time in milliseconds - debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e"); - return DateTime.now().millisecondsSinceEpoch.toString(); - } - } - */ - // --- END: NEW HELPER METHOD --- - - // --- START: MODIFIED _generateBaseFileName --- - /// Helper to generate the base filename for ZIP files (Investigative). - String _generateBaseFileName(RiverInvesManualSamplingData data) { // Updated model type - // Use the determined station code helper + String _generateBaseFileName(RiverInvesManualSamplingData data) { final stationCode = data.getDeterminedStationCode() ?? 'UNKNOWN'; - - // We now always use data.reportId, which we set as the timestamp. if (data.reportId == null || data.reportId!.isEmpty) { - // This is a safety fallback, but should not happen if submitData is used. debugPrint("Warning: reportId is null in _generateBaseFileName. Using current timestamp."); return '${stationCode}_${DateTime.now().millisecondsSinceEpoch.toString()}'; } - return "${stationCode}_${data.reportId}"; // Consistent format + return "${stationCode}_${data.reportId}"; } - // --- END: MODIFIED _generateBaseFileName --- - /// Generates data and image ZIP files and uploads them using SubmissionFtpService (Investigative). Future> _generateAndUploadFtpFiles(RiverInvesManualSamplingData data, Map imageFiles, String serverName, String moduleName) async { - // 1. GENERATE TIMESTAMP FOR IMAGE RENAMING - // e.g., "2025-09-30" and "14:34:19" -> "20250930143419" final String dateStr = (data.samplingDate ?? '').replaceAll('-', ''); final String timeStr = (data.samplingTime ?? '').replaceAll(':', ''); final String zipImageTimestamp = "$dateStr$timeStr"; - // 2. USE ORIGINAL BASE FILENAME (Report ID / Milliseconds) for Folder/Zip - final baseFileName = _generateBaseFileName(data); // Use helper - - // 3. SETUP DIRECTORIES - final Directory? logDirectory = await _localStorageService.getRiverInvestigativeBaseDir(serverName: serverName); // NEW GETTER + final baseFileName = _generateBaseFileName(data); + final Directory? logDirectory = await _localStorageService.getRiverInvestigativeBaseDir(serverName: serverName); final Directory? localSubmissionDir = logDirectory != null ? Directory(p.join(logDirectory.path, baseFileName)) : null; if (localSubmissionDir != null && !await localSubmissionDir.exists()) { - await localSubmissionDir.create(recursive: true); // Create if doesn't exist + await localSubmissionDir.create(recursive: true); } - // 4. CREATE DATA ZIP final dataZip = await _zippingService.createDataZip( - // --- START FIX: Include all four JSON files --- jsonDataMap: { - // *** MODIFIED: Use Investigative model's JSON methods and filenames *** - 'db.json': data.toDbJson(), // Main data structure + 'db.json': data.toDbJson(), 'river_inves_basic_form.json': data.toBasicFormJson(), 'river_inves_reading.json': data.toReadingJson(), 'river_inves_manual_info.json': data.toManualInfoJson(), }, - // --- END FIX --- baseFileName: baseFileName, - destinationDir: localSubmissionDir, // Save ZIP in the specific log folder + destinationDir: localSubmissionDir, ); - Map ftpDataResult = {'success': true, 'statuses': []}; // Default success if no file + Map ftpDataResult = {'success': true, 'statuses': []}; if (dataZip != null) { ftpDataResult = await _submissionFtpService.submit( - moduleName: moduleName, // 'river_investigative' + moduleName: moduleName, fileToUpload: dataZip, - remotePath: '/${p.basename(dataZip.path)}' // Standard remote path + remotePath: '/${p.basename(dataZip.path)}' ); } - // 5. CREATE IMAGE ZIP (RENAMING LOGIC) Map ftpImageResult = {'success': true, 'statuses': []}; - - // Create mapping: "New Name Inside Zip" -> "Original File on Phone" final Map imagesForZip = {}; void mapImage(File? file, String prefix) { if (file != null && file.existsSync()) { - // Rename inside zip: prefix_20250930143419.jpg imagesForZip['${prefix}_$zipImageTimestamp.jpg'] = file; } } - // Map images (Investigative model uses same names as others) mapImage(data.backgroundStationImage, 'background'); mapImage(data.upstreamRiverImage, 'upstream'); mapImage(data.downstreamRiverImage, 'downstream'); @@ -644,152 +552,121 @@ class RiverInvestigativeSamplingService { // Renamed class mapImage(data.optionalImage4, 'optional_4'); if (imagesForZip.isNotEmpty) { - // *** MODIFICATION: Call the NEW renaming function *** final imageZip = await _zippingService.createRenamedImageZip( imageFiles: imagesForZip, baseFileName: baseFileName, - destinationDir: localSubmissionDir, // Save ZIP in the specific log folder + destinationDir: localSubmissionDir, ); if (imageZip != null) { ftpImageResult = await _submissionFtpService.submit( - moduleName: moduleName, // 'river_investigative' + moduleName: moduleName, fileToUpload: imageZip, - remotePath: '/${p.basename(imageZip.path)}' // Standard remote path + remotePath: '/${p.basename(imageZip.path)}' ); } } - // Combine statuses from both uploads return { 'statuses': >[ - ...(ftpDataResult['statuses'] as List? ?? []), // Use null-aware spread - ...(ftpImageResult['statuses'] as List? ?? []), // Use null-aware spread + ...(ftpDataResult['statuses'] as List? ?? []), + ...(ftpImageResult['statuses'] as List? ?? []), ], }; } - /// Saves or updates the local log file and saves a record to the central DB log (Investigative). Future _logAndSave({ - required RiverInvesManualSamplingData data, // Updated model type + required RiverInvesManualSamplingData data, required String status, required String message, required List> apiResults, required List> ftpStatuses, required String serverName, - // --- START: MODIFIED TO USE TIMESTAMP ID --- - String? apiRecordId, // The server's DB ID (e.g., 102) - // --- END: MODIFIED TO USE TIMESTAMP ID --- - String? logDirectory, // Can be null initially, gets populated on first save + String? apiRecordId, + String? logDirectory, }) async { data.submissionStatus = status; data.submissionMessage = message; - final baseFileName = _generateBaseFileName(data); // Use helper for consistent naming + final baseFileName = _generateBaseFileName(data); - // Prepare log data map using toMap() final Map logMapData = data.toMap(); - // Add submission metadata that might not be in toMap() or needs overriding logMapData['submissionStatus'] = status; logMapData['submissionMessage'] = message; - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // data.reportId (the timestamp) is already in the map from toMap() - logMapData['apiRecordId'] = apiRecordId; // Add the server DB ID - // --- END: MODIFIED TO USE TIMESTAMP ID --- + logMapData['apiRecordId'] = apiRecordId; logMapData['serverConfigName'] = serverName; - // Store API/FTP results as JSON strings - logMapData['api_status'] = jsonEncode(apiResults); // Ensure apiResults is a list - logMapData['ftp_status'] = jsonEncode(ftpStatuses); // Ensure ftpStatuses is a list + logMapData['api_status'] = jsonEncode(apiResults); + logMapData['ftp_status'] = jsonEncode(ftpStatuses); String? savedLogPath = logDirectory; - // Save or Update local log file (data.json) if (savedLogPath != null && savedLogPath.isNotEmpty) { - // Update existing log - logMapData['logDirectory'] = savedLogPath; // Ensure logDirectory path is in the map for update method - // *** MODIFIED: Use correct update method *** - await _localStorageService.updateRiverInvestigativeLog(logMapData); // NEW UPDATE METHOD + logMapData['logDirectory'] = savedLogPath; + await _localStorageService.updateRiverInvestigativeLog(logMapData); } else { - // Save new log and get the path - // *** MODIFIED: Use correct save method *** - savedLogPath = await _localStorageService.saveRiverInvestigativeSamplingData(data, serverName: serverName); // NEW SAVE METHOD + savedLogPath = await _localStorageService.saveRiverInvestigativeSamplingData(data, serverName: serverName); if (savedLogPath != null) { - logMapData['logDirectory'] = savedLogPath; // Add the new path for central log + logMapData['logDirectory'] = savedLogPath; } else { debugPrint("Failed to save River Investigative log locally, central DB log might be incomplete."); - // Handle case where local save failed? Maybe skip central log or log with error? } } - - // Save record to central DB log (submission_log table) final imagePaths = data.toApiImageFiles().values.whereType().map((f) => f.path).toList(); final centralLogData = { - // --- START: MODIFIED TO USE TIMESTAMP ID --- - 'submission_id': data.reportId ?? baseFileName, // Use timestamp ID - // *** MODIFIED: Module and Type *** - 'module': 'river', // Keep main module as 'river' - 'type': 'Investigative', // Specific type + 'submission_id': data.reportId ?? baseFileName, + 'module': 'river', + 'type': 'Investigative', 'status': status, 'message': message, - 'report_id': apiRecordId, // Use server DB ID - // --- END: MODIFIED TO USE TIMESTAMP ID --- + 'report_id': apiRecordId, 'created_at': DateTime.now().toIso8601String(), - 'form_data': jsonEncode(logMapData), // Log the comprehensive map including paths and status - 'image_data': jsonEncode(imagePaths), // Log original image paths used for submission attempt + 'form_data': jsonEncode(logMapData), + 'image_data': jsonEncode(imagePaths), 'server_name': serverName, - 'api_status': jsonEncode(apiResults), // Log API results - 'ftp_status': jsonEncode(ftpStatuses), // Log FTP results + 'api_status': jsonEncode(apiResults), + 'ftp_status': jsonEncode(ftpStatuses), }; try { await _dbHelper.saveSubmissionLog(centralLogData); } catch (e) { - debugPrint("Error saving River Investigative submission log to DB: $e"); // Log context update + debugPrint("Error saving River Investigative submission log to DB: $e"); } } - - /// Handles sending or queuing the Telegram alert for River Investigative submissions. - Future _handleSuccessAlert(RiverInvesManualSamplingData data, List>? appSettings, {required bool isDataOnly, bool isSessionExpired = false}) async { // Updated model type + Future _handleSuccessAlert(RiverInvesManualSamplingData data, List>? appSettings, {required bool isDataOnly, bool isSessionExpired = false}) async { try { - // --- FIX: Correct function name to the defined helper method --- - final message = await _generateSuccessAlertMessage(data, isDataOnly: isDataOnly); // Call specific helper - // --- END FIX --- - // *** MODIFIED: Telegram key *** - final alertKey = 'river_investigative'; // Specific key for this module + final message = await _generateSuccessAlertMessage(data, isDataOnly: isDataOnly); + final alertKey = 'river_investigative'; if (isSessionExpired) { - debugPrint("Session is expired; queuing River Investigative Telegram alert directly for $alertKey."); // Log context update + debugPrint("Session is expired; queuing River Investigative Telegram alert directly for $alertKey."); await _telegramService.queueMessage(alertKey, message, appSettings); } else { final bool wasSent = await _telegramService.sendAlertImmediately(alertKey, message, appSettings); if (!wasSent) { - // Fallback to queueing if immediate send fails await _telegramService.queueMessage(alertKey, message, appSettings); } } } catch (e) { - debugPrint("Failed to handle River Investigative Telegram alert: $e"); // Log context update + debugPrint("Failed to handle River Investigative Telegram alert: $e"); } } - /// Generates the specific Telegram alert message content for River Investigative. - Future _generateSuccessAlertMessage(RiverInvesManualSamplingData data, {required bool isDataOnly}) async { // Updated model type + Future _generateSuccessAlertMessage(RiverInvesManualSamplingData data, {required bool isDataOnly}) async { final submissionType = isDataOnly ? "(Data Only)" : "(Data & Images)"; - // Use helpers to get determined names/codes - final stationName = data.getDeterminedRiverName() ?? data.getDeterminedStationName() ?? 'N/A'; // Combine river/station name + final stationName = data.getDeterminedRiverName() ?? data.getDeterminedStationName() ?? 'N/A'; final stationCode = data.getDeterminedStationCode() ?? 'N/A'; final submissionDate = data.samplingDate ?? DateFormat('yyyy-MM-dd').format(DateTime.now()); final submitter = data.firstSamplerName ?? 'N/A'; final sondeID = data.sondeId ?? 'N/A'; final distanceKm = data.distanceDifferenceInKm ?? 0; final distanceMeters = (distanceKm * 1000).toStringAsFixed(0); - final distanceRemarks = data.distanceDifferenceRemarks ?? ''; // Default to empty string + final distanceRemarks = data.distanceDifferenceRemarks ?? ''; final buffer = StringBuffer() - ..writeln('✅ *River Investigative Sample ${submissionType} Submitted:*') // Updated title + ..writeln('✅ *River Investigative Sample ${submissionType} Submitted:*') ..writeln(); - // Adapt station info based on type buffer.writeln('*Station Type:* ${data.stationTypeSelection ?? 'N/A'}'); if (data.stationTypeSelection == 'New Location') { buffer.writeln('*New Location Name:* ${data.newStationName ?? 'N/A'}'); @@ -808,7 +685,6 @@ class RiverInvestigativeSamplingService { // Renamed class ..writeln('*Sonde ID:* $sondeID') ..writeln('*Status of Submission:* Successful'); - // Include distance warning only if NOT a new location and distance > 50m if (data.stationTypeSelection != 'New Location' && (distanceKm * 1000 > 50 || distanceRemarks.isNotEmpty)) { buffer ..writeln() @@ -819,8 +695,7 @@ class RiverInvestigativeSamplingService { // Renamed class } } - // Add parameter limit check section (uses the same river limits) - final outOfBoundsAlert = await _getOutOfBoundsAlertSection(data); // Call helper + final outOfBoundsAlert = await _getOutOfBoundsAlertSection(data); if (outOfBoundsAlert.isNotEmpty) { buffer.write(outOfBoundsAlert); } @@ -828,21 +703,16 @@ class RiverInvestigativeSamplingService { // Renamed class return buffer.toString(); } - /// Helper to generate the parameter limit alert section for Telegram (River Investigative). - Future _getOutOfBoundsAlertSection(RiverInvesManualSamplingData data) async { // Updated model type - // Define mapping from data model keys to parameter names used in limits table - // This mapping should be consistent with River In-Situ + Future _getOutOfBoundsAlertSection(RiverInvesManualSamplingData data) async { const Map _parameterKeyToLimitName = { 'oxygenConcentration': 'Oxygen Conc', 'oxygenSaturation': 'Oxygen Sat', 'ph': 'pH', 'salinity': 'Salinity', 'electricalConductivity': 'Conductivity', 'temperature': 'Temperature', 'tds': 'TDS', 'turbidity': 'Turbidity', 'ammonia': 'Ammonia', 'batteryVoltage': 'Battery', }; - // Load the same river parameter limits as In-Situ final allLimits = await _dbHelper.loadRiverParameterLimits() ?? []; - if (allLimits.isEmpty) return ""; // No limits defined + if (allLimits.isEmpty) return ""; - // Get current readings from the investigative data model final readings = { 'oxygenConcentration': data.oxygenConcentration, 'oxygenSaturation': data.oxygenSaturation, 'ph': data.ph, 'salinity': data.salinity, 'electricalConductivity': data.electricalConductivity, @@ -852,7 +722,6 @@ class RiverInvestigativeSamplingService { // Renamed class final List outOfBoundsMessages = []; - // Helper to parse limit values (copied from In-Situ) double? parseLimitValue(dynamic value) { if (value == null) return null; if (value is num) return value.toDouble(); @@ -860,17 +729,15 @@ class RiverInvestigativeSamplingService { // Renamed class return null; } - // Iterate through readings and check against limits readings.forEach((key, value) { - if (value == null || value == -999.0) return; // Skip missing/default values + if (value == null || value == -999.0) return; final limitName = _parameterKeyToLimitName[key]; - if (limitName == null) return; // Skip if parameter not in mapping + if (limitName == null) return; - // Find the limit data for this parameter final limitData = allLimits.firstWhere( (l) => l['param_parameter_list'] == limitName, - orElse: () => {}, // Return empty map if not found + orElse: () => {}, ); if (limitData.isNotEmpty) { @@ -878,12 +745,10 @@ class RiverInvestigativeSamplingService { // Renamed class final upperLimit = parseLimitValue(limitData['param_upper_limit']); bool isOutOfBounds = false; - // Check bounds if (lowerLimit != null && value < lowerLimit) isOutOfBounds = true; if (upperLimit != null && value > upperLimit) isOutOfBounds = true; if (isOutOfBounds) { - // Format message for Telegram final valueStr = value.toStringAsFixed(5); final lowerStr = lowerLimit?.toStringAsFixed(5) ?? 'N/A'; final upperStr = upperLimit?.toStringAsFixed(5) ?? 'N/A'; @@ -892,19 +757,17 @@ class RiverInvestigativeSamplingService { // Renamed class } }); - // If no parameters were out of bounds, return empty string if (outOfBoundsMessages.isEmpty) { return ""; } - // Construct the alert section header and messages final buffer = StringBuffer() - ..writeln() // Add spacing + ..writeln() ..writeln('⚠️ *Parameter Limit Alert:*') ..writeln('The following parameters were outside their defined limits:'); - buffer.writeAll(outOfBoundsMessages, '\n'); // Add each message on a new line + buffer.writeAll(outOfBoundsMessages, '\n'); - return buffer.toString(); // --- FIX: Missing return statement was fixed --- + return buffer.toString(); } -} // End of RiverInvestigativeSamplingService class \ No newline at end of file +} \ No newline at end of file diff --git a/lib/services/river_manual_triennial_sampling_service.dart b/lib/services/river_manual_triennial_sampling_service.dart index 2fca90c..18c7b87 100644 --- a/lib/services/river_manual_triennial_sampling_service.dart +++ b/lib/services/river_manual_triennial_sampling_service.dart @@ -33,6 +33,7 @@ import 'submission_ftp_service.dart'; import 'telegram_service.dart'; import 'retry_service.dart'; import 'base_api_service.dart'; // Import for SessionExpiredException +import 'user_preferences_service.dart'; // ADDED class RiverManualTriennialSamplingService { @@ -47,6 +48,7 @@ class RiverManualTriennialSamplingService { final ZippingService _zippingService = ZippingService(); final RetryService _retryService = RetryService(); final TelegramService _telegramService; + final UserPreferencesService _userPreferencesService = UserPreferencesService(); // ADDED final ImagePicker _picker = ImagePicker(); static const platform = MethodChannel('com.example.environment_monitoring_app/usb'); @@ -239,6 +241,10 @@ class RiverManualTriennialSamplingService { required AuthProvider authProvider, String? logDirectory, }) async { + // --- START FIX: Capture the status before attempting submission --- + final String? previousStatus = data.submissionStatus; + // --- END FIX --- + final serverName = (await _serverConfigService.getActiveApiConfig())?['config_name'] as String? ?? 'Default'; final imageFilesWithNulls = data.toApiImageFiles(); imageFilesWithNulls.removeWhere((key, value) => value == null); @@ -256,177 +262,204 @@ class RiverManualTriennialSamplingService { // data.reportId already contains the timestamp ID // --- END: MODIFIED TO USE TIMESTAMP ID --- - try { - // 1. Submit Form Data - apiDataResult = await _submissionApiService.submitPost( - moduleName: moduleName, - endpoint: 'river/triennial/sample', // Correct endpoint - body: data.toApiFormData(), - ); + // 1. Check module preferences for API + final pref = await _userPreferencesService.getModulePreference(moduleName); + bool isApiEnabled = pref?['is_api_enabled'] ?? true; + bool isFtpEnabled = pref?['is_ftp_enabled'] ?? true; // --- MODIFIED: Check FTP pref early --- - if (apiDataResult['success'] == true) { - anyApiSuccess = true; - // --- START: MODIFIED TO USE TIMESTAMP ID --- - // Store the server's database ID in a separate variable. - apiRecordId = apiDataResult['data']?['r_tri_id']?.toString(); // Correct ID key - // --- END: MODIFIED TO USE TIMESTAMP ID --- + if (isApiEnabled) { + try { + // 1. Submit Form Data + apiDataResult = await _submissionApiService.submitPost( + moduleName: moduleName, + endpoint: 'river/triennial/sample', // Correct endpoint + body: data.toApiFormData(), + ); - if (apiRecordId != null) { - if (finalImageFiles.isNotEmpty) { - // 2. Submit Images - apiImageResult = await _submissionApiService.submitMultipart( - moduleName: moduleName, - endpoint: 'river/triennial/images', // Correct endpoint - // --- START: MODIFIED TO USE TIMESTAMP ID --- - fields: {'r_tri_id': apiRecordId}, // Correct field key - // --- END: MODIFIED TO USE TIMESTAMP ID --- - files: finalImageFiles, - ); - if (apiImageResult['success'] != true) { - anyApiSuccess = false; - } - } - } else { - anyApiSuccess = false; + if (apiDataResult['success'] == true) { + anyApiSuccess = true; // --- START: MODIFIED TO USE TIMESTAMP ID --- - apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // Store the server's database ID in a separate variable. + apiRecordId = apiDataResult['data']?['r_tri_id']?.toString(); // Correct ID key + // --- END: MODIFIED TO USE TIMESTAMP ID --- + + if (apiRecordId != null) { + if (finalImageFiles.isNotEmpty) { + // 2. Submit Images + apiImageResult = await _submissionApiService.submitMultipart( + moduleName: moduleName, + endpoint: 'river/triennial/images', // Correct endpoint + // --- START: MODIFIED TO USE TIMESTAMP ID --- + fields: {'r_tri_id': apiRecordId}, // Correct field key + // --- END: MODIFIED TO USE TIMESTAMP ID --- + files: finalImageFiles, + ); + if (apiImageResult['success'] != true) { + anyApiSuccess = false; + } + } + } else { + anyApiSuccess = false; + // --- START: MODIFIED TO USE TIMESTAMP ID --- + apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.'; + // --- END: MODIFIED TO USE TIMESTAMP ID --- + } + } + // If apiDataResult['success'] is false, SubmissionApiService queued it. + + } on SessionExpiredException catch (_) { + debugPrint("Online submission failed due to session expiry that could not be refreshed."); + isSessionKnownToBeExpired = true; + anyApiSuccess = false; + apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; + // Manually queue API calls + await _retryService.addApiToQueue(endpoint: 'river/triennial/sample', method: 'POST', body: data.toApiFormData()); + if (finalImageFiles.isNotEmpty && apiRecordId != null) { + // Also queue images if data call might have partially succeeded before expiry + // --- START: MODIFIED TO USE TIMESTAMP ID --- + await _retryService.addApiToQueue(endpoint: 'river/triennial/images', method: 'POST_MULTIPART', fields: {'r_tri_id': apiRecordId}, files: finalImageFiles); // --- END: MODIFIED TO USE TIMESTAMP ID --- } - } - // If apiDataResult['success'] is false, SubmissionApiService queued it. - } on SessionExpiredException catch (_) { - debugPrint("Online submission failed due to session expiry that could not be refreshed."); - isSessionKnownToBeExpired = true; - anyApiSuccess = false; - apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'}; - // Manually queue API calls - await _retryService.addApiToQueue(endpoint: 'river/triennial/sample', method: 'POST', body: data.toApiFormData()); - if (finalImageFiles.isNotEmpty && apiRecordId != null) { - // Also queue images if data call might have partially succeeded before expiry - // --- START: MODIFIED TO USE TIMESTAMP ID --- - await _retryService.addApiToQueue(endpoint: 'river/triennial/images', method: 'POST_MULTIPART', fields: {'r_tri_id': apiRecordId}, files: finalImageFiles); - // --- END: MODIFIED TO USE TIMESTAMP ID --- - } + // --- START FIX: Queue all four JSON files --- + // Get all potential FTP configs + final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; - // --- START FIX: Queue all four JSON files --- - // Get all potential FTP configs - final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; - - final dataZip = await _zippingService.createDataZip( - jsonDataMap: { // Use specific JSON structures for River Triennial FTP - 'db.json': data.toDbJson(), // Assuming similar structure is needed, adjust if different - 'river_triennial_basic_form.json': data.toBasicFormJson(), - 'river_triennial_reading.json': data.toReadingJson(), - 'river_triennial_manual_info.json': data.toManualInfoJson(), - }, - baseFileName: _generateBaseFileName(data), - destinationDir: null, - ); - if (dataZip != null) { - // Queue for each config separately - for (final config in ftpConfigs) { - final configId = config['ftp_config_id']; - if (configId != null) { - await _retryService.addFtpToQueue( - localFilePath: dataZip.path, - remotePath: '/${p.basename(dataZip.path)}', - ftpConfigId: configId // Provide the specific config ID - ); + final dataZip = await _zippingService.createDataZip( + jsonDataMap: { // Use specific JSON structures for River Triennial FTP + 'db.json': data.toDbJson(), // Assuming similar structure is needed, adjust if different + 'river_triennial_basic_form.json': data.toBasicFormJson(), + 'river_triennial_reading.json': data.toReadingJson(), + 'river_triennial_manual_info.json': data.toManualInfoJson(), + }, + baseFileName: _generateBaseFileName(data), + destinationDir: null, + ); + if (dataZip != null) { + // Queue for each config separately + for (final config in ftpConfigs) { + final configId = config['ftp_config_id']; + if (configId != null) { + await _retryService.addFtpToQueue( + localFilePath: dataZip.path, + remotePath: '/${p.basename(dataZip.path)}', + ftpConfigId: configId // Provide the specific config ID + ); + } } } + // --- END FIX --- } - // --- END FIX --- + } else { + debugPrint("API submission disabled for $moduleName by user preference."); + apiDataResult = {'success': true, 'message': 'API submission disabled by user.'}; + anyApiSuccess = true; // Treated as success since it was intentional } // 3. Submit FTP Files Map ftpResults = {'statuses': []}; bool anyFtpSuccess = false; - if (isSessionKnownToBeExpired) { - debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); - // --- START: MODIFIED TO USE TIMESTAMP ID --- - final baseFileNameForQueue = _generateBaseFileName(data); // Use helper - // --- END: MODIFIED TO USE TIMESTAMP ID --- + // --- START FIX: Check if FTP is enabled AND if it was already successful --- + // 'L4' status means API Failed but FTP Succeeded. If re-submitting an L4 record, we skip FTP. + // 'S4' means everything succeeded. + bool previousFtpSuccess = data.submissionStatus == 'L4' || data.submissionStatus == 'S4'; - // --- START FIX: Add ftpConfigId when queuing --- - // Get all potential FTP configs - final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; + if (!isFtpEnabled) { + debugPrint("FTP submission disabled for $moduleName by user preference. Skipping FTP."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'FTP disabled by user preference.', 'success': true}]}; + anyFtpSuccess = true; + } else if (previousFtpSuccess) { + debugPrint("FTP submission skipped because it was already successful (Status: ${data.submissionStatus})."); + ftpResults = {'statuses': [{'status': 'Skipped', 'message': 'Already successful in previous attempt.', 'success': true}]}; + anyFtpSuccess = true; + } else { + // Proceed with FTP logic only if enabled AND not previously successful + if (isSessionKnownToBeExpired) { + debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks."); + // --- START: MODIFIED TO USE TIMESTAMP ID --- + final baseFileNameForQueue = _generateBaseFileName(data); // Use helper + // --- END: MODIFIED TO USE TIMESTAMP ID --- - final dataZip = await _zippingService.createDataZip( - jsonDataMap: { // Use specific JSON structures for River Triennial FTP - 'db.json': data.toDbJson(), // Assuming similar structure is needed, adjust if different - 'river_triennial_basic_form.json': data.toBasicFormJson(), // ADDED - 'river_triennial_reading.json': data.toReadingJson(), // ADDED - 'river_triennial_manual_info.json': data.toManualInfoJson(), // ADDED - }, - baseFileName: baseFileNameForQueue, - destinationDir: null, - ); - if (dataZip != null) { - // Queue for each config separately - for (final config in ftpConfigs) { - final configId = config['ftp_config_id']; - if (configId != null) { - await _retryService.addFtpToQueue( - localFilePath: dataZip.path, - remotePath: '/${p.basename(dataZip.path)}', - ftpConfigId: configId // Provide the specific config ID - ); - } - } - } + // --- START FIX: Add ftpConfigId when queuing --- + // Get all potential FTP configs + final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? []; - if (finalImageFiles.isNotEmpty) { - // Note: For the session expired case, renaming logic would ideally be here too, - // but requires complex reconstruction of the map. Following the previous pattern, - // we attempt to respect the rename if possible. - final Map retryImages = {}; - final String dateStr = (data.samplingDate ?? '').replaceAll('-', ''); - final String timeStr = (data.samplingTime ?? '').replaceAll(':', ''); - final String timestampId = "$dateStr$timeStr"; - - void addRetryMap(File? file, String prefix) { - if(file != null) retryImages['${prefix}_$timestampId.jpg'] = file; - } - addRetryMap(data.backgroundStationImage, 'background'); - addRetryMap(data.upstreamRiverImage, 'upstream'); - addRetryMap(data.downstreamRiverImage, 'downstream'); - addRetryMap(data.sampleTurbidityImage, 'sample_turbidity'); - addRetryMap(data.optionalImage1, 'optional_1'); - addRetryMap(data.optionalImage2, 'optional_2'); - addRetryMap(data.optionalImage3, 'optional_3'); - addRetryMap(data.optionalImage4, 'optional_4'); - - final retryImageZip = await _zippingService.createRenamedImageZip( - imageFiles: retryImages, + final dataZip = await _zippingService.createDataZip( + jsonDataMap: { // Use specific JSON structures for River Triennial FTP + 'db.json': data.toDbJson(), // Assuming similar structure is needed, adjust if different + 'river_triennial_basic_form.json': data.toBasicFormJson(), // ADDED + 'river_triennial_reading.json': data.toReadingJson(), // ADDED + 'river_triennial_manual_info.json': data.toManualInfoJson(), // ADDED + }, baseFileName: baseFileNameForQueue, destinationDir: null, ); - if (retryImageZip != null) { + if (dataZip != null) { // Queue for each config separately for (final config in ftpConfigs) { final configId = config['ftp_config_id']; if (configId != null) { await _retryService.addFtpToQueue( - localFilePath: retryImageZip.path, - remotePath: '/${p.basename(retryImageZip.path)}', + localFilePath: dataZip.path, + remotePath: '/${p.basename(dataZip.path)}', ftpConfigId: configId // Provide the specific config ID ); } } } - } - // --- END FIX --- - ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; - anyFtpSuccess = false; - } else { - try { - ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); - anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); - } catch (e) { - debugPrint("Unexpected FTP submission error: $e"); + + if (finalImageFiles.isNotEmpty) { + // Note: For the session expired case, renaming logic would ideally be here too, + // but requires complex reconstruction of the map. Following the previous pattern, + // we attempt to respect the rename if possible. + final Map retryImages = {}; + final String dateStr = (data.samplingDate ?? '').replaceAll('-', ''); + final String timeStr = (data.samplingTime ?? '').replaceAll(':', ''); + final String timestampId = "$dateStr$timeStr"; + + void addRetryMap(File? file, String prefix) { + if(file != null) retryImages['${prefix}_$timestampId.jpg'] = file; + } + addRetryMap(data.backgroundStationImage, 'background'); + addRetryMap(data.upstreamRiverImage, 'upstream'); + addRetryMap(data.downstreamRiverImage, 'downstream'); + addRetryMap(data.sampleTurbidityImage, 'sample_turbidity'); + addRetryMap(data.optionalImage1, 'optional_1'); + addRetryMap(data.optionalImage2, 'optional_2'); + addRetryMap(data.optionalImage3, 'optional_3'); + addRetryMap(data.optionalImage4, 'optional_4'); + + final retryImageZip = await _zippingService.createRenamedImageZip( + imageFiles: retryImages, + baseFileName: baseFileNameForQueue, + destinationDir: null, + ); + if (retryImageZip != null) { + // Queue for each config separately + for (final config in ftpConfigs) { + final configId = config['ftp_config_id']; + if (configId != null) { + await _retryService.addFtpToQueue( + localFilePath: retryImageZip.path, + remotePath: '/${p.basename(retryImageZip.path)}', + ftpConfigId: configId // Provide the specific config ID + ); + } + } + } + } + // --- END FIX --- + ftpResults = {'statuses': [{'status': 'Queued', 'message': 'FTP upload queued due to API session issue.', 'success': false}]}; anyFtpSuccess = false; + } else { + try { + ftpResults = await _generateAndUploadFtpFiles(data, finalImageFiles, serverName, moduleName); + anyFtpSuccess = !(ftpResults['statuses'] as List).any((status) => status['success'] == false && status['status'] != 'Not Configured'); + } catch (e) { + debugPrint("Unexpected FTP submission error: $e"); + anyFtpSuccess = false; + } } } @@ -462,9 +495,12 @@ class RiverManualTriennialSamplingService { ); // 6. Send Alert - if (overallSuccess) { + // --- START FIX: Check if log was already successful before sending alert --- + final bool wasAlreadySuccessful = previousStatus == 'S4' || previousStatus == 'S3' || previousStatus == 'L4'; + if (overallSuccess && !wasAlreadySuccessful) { _handleSuccessAlert(data, appSettings, isDataOnly: finalImageFiles.isEmpty, isSessionExpired: isSessionKnownToBeExpired); } + // --- END FIX --- // Return consistent format return {