modify ftp module to follow mms 1.0 format

This commit is contained in:
ALim Aidrus 2025-11-18 20:40:49 +08:00
parent 629c582aa9
commit c543e82d5b
18 changed files with 729 additions and 300 deletions

View File

@ -344,7 +344,7 @@ class InSituSamplingData {
'remarks_lab': labRemarks,
};
// Remove null values before encoding
data.removeWhere((key, value) => value == null);
//data.removeWhere((key, value) => value == null);
return jsonEncode(data);
}

View File

@ -354,7 +354,7 @@ class RiverInSituSamplingData {
// --- END FIX ---
};
// Remove null values before encoding
data.removeWhere((key, value) => value == null);
//data.removeWhere((key, value) => value == null);
return jsonEncode(data);
}

View File

@ -482,7 +482,7 @@ class RiverInvesManualSamplingData {
'new_river': stationTypeSelection == 'New Location' ? newRiverName : null,
'new_station_name': stationTypeSelection == 'New Location' ? newStationName : null, // Include newStationName
};
data.removeWhere((key, value) => value == null);
//data.removeWhere((key, value) => value == null);
return jsonEncode(data);
}

View File

@ -331,7 +331,7 @@ class RiverManualTriennialSamplingData {
'remarks_event': eventRemarks,
'remarks_lab': labRemarks,
};
data.removeWhere((key, value) => value == null);
//data.removeWhere((key, value) => value == null);
return jsonEncode(data);
}
}

View File

@ -187,7 +187,7 @@ class TarballSamplingData {
'longitude': currentLongitude, // Current location
'sample_id': reportId, // Using reportId if available
};
data.removeWhere((key, value) => value == null);
//data.removeWhere((key, value) => value == null);
return data;
}

View File

@ -53,7 +53,7 @@ class _MarineInvesManualStep1SamplingInfoState extends State<MarineInvesManualSt
List<String> _tarballStatesList = [];
List<Map<String, dynamic>> _stationsForTarballState = [];
final List<String> _samplingTypes = ['Schedule', 'Ad-Hoc', 'Complaint', 'Investigative'];
final List<String> _samplingTypes = ['Investigative'];
@override
void initState() {

View File

@ -40,7 +40,7 @@ class _InSituStep1SamplingInfoState extends State<InSituStep1SamplingInfo> {
List<String> _statesList = [];
List<String> _categoriesForState = [];
List<Map<String, dynamic>> _stationsForCategory = [];
final List<String> _samplingTypes = ['Schedule', 'Ad-Hoc', 'Complaint'];
final List<String> _samplingTypes = ['Schedule'];
@override
void initState() {

View File

@ -64,7 +64,7 @@ class _InSituStep2SiteInfoState extends State<InSituStep2SiteInfo> {
if (file != null) {
setState(() => setImageCallback(file));
} else if (mounted) {
_showSnackBar('Image selection failed. Please ensure all photos are taken in landscape (vertical) mode.', isError: true);
_showSnackBar('Image selection failed. Please ensure all photos are taken in landscape (horizontal) mode.', isError: true);
}
if (mounted) {
@ -143,7 +143,7 @@ class _InSituStep2SiteInfoState extends State<InSituStep2SiteInfo> {
// --- Section: Required Photos ---
Text("Required Photos *", style: Theme.of(context).textTheme.titleLarge),
const Text(
"All photos must be in landscape (vertical) orientation. A watermark will be applied automatically.",
"All photos must be in landscape (horizontal) orientation. A watermark will be applied automatically.",
style: TextStyle(color: Colors.grey)
),
const SizedBox(height: 8),

View File

@ -409,7 +409,7 @@ class _InSituStep3DataCaptureState extends State<InSituStep3DataCapture> with Wi
List<Map<String, dynamic>> _validateParameters(Map<String, double> readings, List<Map<String, dynamic>> limits) {
final List<Map<String, dynamic>> invalidParams = [];
final int? stationId = widget.data.selectedStation?['man_station_id'];
final int? stationId = widget.data.selectedStation?['station_id'];
debugPrint("--- Parameter Validation Start ---");
debugPrint("Selected Station ID: $stationId");

View File

@ -116,16 +116,42 @@ class AirSamplingService {
return File(filePath)..writeAsBytesSync(img.encodeJpg(originalImage));
}
// --- START: MODIFIED FILENAME LOGIC ---
/// Generates a unique timestamp ID from the sampling date and time.
String _generateTimestampId(String? date, String? time) {
final String dateTimeString = "${date ?? ''} ${time ?? ''}";
try {
// Air time format seems to be 'HH:mm'
final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString);
return samplingDateTime.millisecondsSinceEpoch.toString();
} catch (e) {
debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e");
return DateTime.now().millisecondsSinceEpoch.toString();
}
}
// --- END: MODIFIED FILENAME LOGIC ---
// --- REFACTORED submitInstallation method with granular error handling ---
Future<Map<String, dynamic>> submitInstallation(AirInstallationData data, List<Map<String, dynamic>>? appSettings) async {
const String moduleName = 'air_installation';
final activeConfig = await _serverConfigService.getActiveApiConfig();
final serverName = activeConfig?['config_name'] as String? ?? 'Default';
// --- START: MODIFIED FILENAME LOGIC ---
// Generate the unique timestamp ID and base filename FIRST.
final String timestampId = _generateTimestampId(data.installationDate, data.installationTime);
final String stationCode = data.stationID ?? 'UNKNOWN';
final String baseFileName = "${stationCode}_INSTALLATION_${timestampId}";
// Assign this as the primary refID for the log
data.refID = baseFileName;
// --- END: MODIFIED FILENAME LOGIC ---
bool anyApiSuccess = false;
Map<String, dynamic> apiDataResult = {};
Map<String, dynamic> apiImageResult = {};
final imageFiles = data.getImagesForUpload();
String? apiRecordId; // Will hold the DB ID (e.g., 102)
// Step 1: Attempt API Submission
try {
@ -136,13 +162,13 @@ class AirSamplingService {
);
if (apiDataResult['success'] == true) {
final recordId = apiDataResult['data']?['air_man_id']?.toString();
if (recordId != null) {
data.airManId = int.tryParse(recordId);
apiRecordId = apiDataResult['data']?['air_man_id']?.toString();
if (apiRecordId != null) {
data.airManId = int.tryParse(apiRecordId); // Save the DB ID
apiImageResult = await _submissionApiService.submitMultipart(
moduleName: moduleName,
endpoint: 'air/manual/installation-images',
fields: {'air_man_id': recordId},
fields: {'air_man_id': apiRecordId}, // Use DB ID for image upload
files: imageFiles,
);
anyApiSuccess = apiImageResult['success'] == true;
@ -170,9 +196,8 @@ class AirSamplingService {
Map<String, dynamic> ftpImageResult = {'statuses': []};
bool anyFtpSuccess = false;
try {
final stationCode = data.stationID ?? 'UNKNOWN';
final samplingDateTime = "${data.installationDate}_${data.installationTime}".replaceAll(':', '-').replaceAll(' ', '_');
final baseFileName = "${stationCode}_INSTALLATION_${samplingDateTime}";
// --- FILENAME LOGIC MOVED ---
// baseFileName is already generated above.
final dataZip = await _zippingService.createDataZip(jsonDataMap: {'db.json': jsonEncode(data.toDbJson())}, baseFileName: baseFileName);
if (dataZip != null) {
@ -209,7 +234,20 @@ class AirSamplingService {
finalMessage = 'Both API and FTP submissions failed and were queued.';
}
await _logAndSave(data: data, status: finalStatus, message: finalMessage, apiResults: [apiDataResult, apiImageResult], ftpStatuses: [...ftpDataResult['statuses'], ...ftpImageResult['statuses']], serverName: serverName, type: 'Installation');
// --- START: MODIFIED LOGGING ---
await _logAndSave(
data: data,
status: finalStatus,
message: finalMessage,
apiResults: [apiDataResult, apiImageResult],
ftpStatuses: [...ftpDataResult['statuses'], ...ftpImageResult['statuses']],
serverName: serverName,
type: 'Installation',
baseFileName: baseFileName, // Pass the generated filename
apiRecordId: apiRecordId, // Pass the DB ID
);
// --- END: MODIFIED LOGGING ---
if (anyApiSuccess || anyFtpSuccess) {
_handleInstallationSuccessAlert(data, appSettings, isDataOnly: imageFiles.isEmpty);
}
@ -223,14 +261,28 @@ class AirSamplingService {
final activeConfig = await _serverConfigService.getActiveApiConfig();
final serverName = activeConfig?['config_name'] as String? ?? 'Default';
// --- START: MODIFIED FILENAME LOGIC ---
// Generate the unique timestamp ID and base filename FIRST.
final String timestampId = _generateTimestampId(data.collectionDate, data.collectionTime);
final String stationCode = installationData.stationID ?? 'UNKNOWN';
final String baseFileName = "${stationCode}_COLLECTION_${timestampId}";
// Assign this as the primary refID for the log
data.installationRefID = baseFileName;
// --- END: MODIFIED FILENAME LOGIC ---
bool anyApiSuccess = false;
Map<String, dynamic> apiDataResult = {};
Map<String, dynamic> apiImageResult = {};
final imageFiles = data.getImagesForUpload();
String? apiRecordId; // Will hold the DB ID
// Step 1: Attempt API Submission
try {
// Use the DB ID from the original installation
data.airManId = installationData.airManId;
apiRecordId = data.airManId?.toString(); // Store it for logging
apiDataResult = await _submissionApiService.submitPost(
moduleName: moduleName,
endpoint: 'air/manual/collection',
@ -241,7 +293,7 @@ class AirSamplingService {
apiImageResult = await _submissionApiService.submitMultipart(
moduleName: moduleName,
endpoint: 'air/manual/collection-images',
fields: {'air_man_id': data.airManId.toString()},
fields: {'air_man_id': data.airManId.toString()}, // Use DB ID
files: imageFiles,
);
anyApiSuccess = apiImageResult['success'] == true;
@ -265,9 +317,8 @@ class AirSamplingService {
Map<String, dynamic> ftpImageResult = {'statuses': []};
bool anyFtpSuccess = false;
try {
final stationCode = installationData.stationID ?? 'UNKNOWN';
final samplingDateTime = "${data.collectionDate}_${data.collectionTime}".replaceAll(':', '-').replaceAll(' ', '_');
final baseFileName = "${stationCode}_COLLECTION_${samplingDateTime}";
// --- FILENAME LOGIC MOVED ---
// baseFileName is already generated above.
final combinedJson = jsonEncode({"installation": installationData.toDbJson(), "collection": data.toMap()});
final dataZip = await _zippingService.createDataZip(jsonDataMap: {'db.json': combinedJson}, baseFileName: baseFileName);
@ -305,7 +356,21 @@ class AirSamplingService {
finalMessage = 'Both API and FTP submissions failed and were queued.';
}
await _logAndSave(data: data, installationData: installationData, status: finalStatus, message: finalMessage, apiResults: [apiDataResult, apiImageResult], ftpStatuses: [...ftpDataResult['statuses'], ...ftpImageResult['statuses']], serverName: serverName, type: 'Collection');
// --- START: MODIFIED LOGGING ---
await _logAndSave(
data: data,
installationData: installationData,
status: finalStatus,
message: finalMessage,
apiResults: [apiDataResult, apiImageResult],
ftpStatuses: [...ftpDataResult['statuses'], ...ftpImageResult['statuses']],
serverName: serverName,
type: 'Collection',
baseFileName: baseFileName, // Pass the generated filename
apiRecordId: apiRecordId, // Pass the DB ID
);
// --- END: MODIFIED LOGGING ---
if(anyApiSuccess || anyFtpSuccess) {
_handleCollectionSuccessAlert(data, installationData, appSettings, isDataOnly: imageFiles.isEmpty);
}
@ -323,6 +388,10 @@ class AirSamplingService {
required List<Map<String, dynamic>> ftpStatuses,
required String serverName,
required String type,
// --- START: MODIFIED LOGGING ---
required String baseFileName, // Use this as the primary ID
String? apiRecordId, // The ID from the server DB (e.g., 102)
// --- END: MODIFIED LOGGING ---
}) async {
String refID;
Map<String, dynamic> formData;
@ -331,26 +400,28 @@ class AirSamplingService {
if (type == 'Installation') {
final installation = data as AirInstallationData;
installation.status = status;
refID = installation.refID!;
refID = installation.refID!; // This is now baseFileName
formData = installation.toMap();
imagePaths = _getInstallationImagePaths(installation);
await _localStorageService.saveAirSamplingRecord(_toMapForLocalSave(installation), refID, serverName: serverName);
} else {
final collection = data as AirCollectionData;
collection.status = status;
refID = collection.installationRefID!;
refID = collection.installationRefID!; // This is now baseFileName
formData = collection.toMap();
imagePaths = _getCollectionImagePaths(collection);
await _localStorageService.saveAirSamplingRecord(_toMapForLocalSave(installationData!..collectionData = collection), refID, serverName: serverName);
}
final logData = {
'submission_id': refID,
// --- START: MODIFIED LOGGING ---
'submission_id': baseFileName, // Use the timestamp-based filename as the primary ID
'module': 'air',
'type': type,
'status': status,
'message': message,
'report_id': (data.airManId ?? installationData?.airManId)?.toString(),
'report_id': apiRecordId, // Store the server DB ID (e.g., 102) here
// --- END: MODIFIED LOGGING ---
'created_at': DateTime.now().toIso8601String(),
'form_data': jsonEncode(formData),
'image_data': jsonEncode(imagePaths),

View File

@ -339,7 +339,7 @@ class LocalStorageService {
}
// --- START: MODIFICATION (FIXED ERROR) ---
// Changed data.toDbJson() to data.toMap() to get a Map, not a String.
// This line is CORRECT. It uses data.toMap() to get a Map.
final Map<String, dynamic> jsonData = data.toMap();
// --- END: MODIFICATION (FIXED ERROR) ---

View File

@ -164,6 +164,22 @@ class MarineInSituSamplingService {
_serialManager.dispose();
}
// --- START: NEW HELPER METHOD ---
/// Generates a unique timestamp ID from the sampling date and time.
String _generateTimestampId(String? date, String? time) {
final String dateTimeString = "${date ?? ''} ${time ?? ''}";
try {
// Time format from model is HH:mm
final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString);
return samplingDateTime.millisecondsSinceEpoch.toString();
} catch (e) {
// Fallback: if parsing fails, use the current time in milliseconds
debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e");
return DateTime.now().millisecondsSinceEpoch.toString();
}
}
// --- END: NEW HELPER METHOD ---
Future<Map<String, dynamic>> submitInSituSample({
required InSituSamplingData data,
required List<Map<String, dynamic>>? appSettings,
@ -173,6 +189,12 @@ class MarineInSituSamplingService {
}) async {
const String moduleName = 'marine_in_situ';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Generate the unique timestamp ID and assign it immediately.
final String timestampId = _generateTimestampId(data.samplingDate, data.samplingTime);
data.reportId = timestampId; // This is the primary ID now.
// --- END: MODIFIED TO USE TIMESTAMP ID ---
final connectivityResult = await Connectivity().checkConnectivity();
bool isOnline = !connectivityResult.contains(ConnectivityResult.none);
bool isOfflineSession = authProvider.isLoggedIn && (authProvider.profileData?['token']?.startsWith("offline-session-") ?? false);
@ -235,6 +257,11 @@ class MarineInSituSamplingService {
String finalStatus = '';
bool isSessionKnownToBeExpired = false;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId; // Will hold the DB ID (e.g., 102) from the server
// data.reportId already contains the timestamp ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
try {
// 1. Submit Form Data
apiDataResult = await _submissionApiService.submitPost(
@ -245,15 +272,20 @@ class MarineInSituSamplingService {
if (apiDataResult['success'] == true) {
anyApiSuccess = true;
data.reportId = apiDataResult['data']?['man_id']?.toString(); // Correct ID key for In-Situ
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Store the server's database ID in a separate variable.
apiRecordId = apiDataResult['data']?['man_id']?.toString(); // Correct ID key for In-Situ
// --- END: MODIFIED TO USE TIMESTAMP ID ---
if (data.reportId != null) {
if (apiRecordId != null) {
if (finalImageFiles.isNotEmpty) {
// 2. Submit Images
apiImageResult = await _submissionApiService.submitMultipart(
moduleName: moduleName,
endpoint: 'marine/manual/images', // Correct endpoint for In-Situ images
fields: {'man_id': data.reportId!}, // Correct field key for In-Situ
// --- START: MODIFIED TO USE TIMESTAMP ID ---
fields: {'man_id': apiRecordId}, // Correct field key for In-Situ
// --- END: MODIFIED TO USE TIMESTAMP ID ---
files: finalImageFiles,
);
if (apiImageResult['success'] != true) {
@ -262,7 +294,9 @@ class MarineInSituSamplingService {
}
} else {
anyApiSuccess = false;
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a record ID.';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.';
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
}
// If apiDataResult['success'] is false, SubmissionApiService queued it.
@ -274,10 +308,12 @@ class MarineInSituSamplingService {
apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'};
// Manually queue API calls
await _retryService.addApiToQueue(endpoint: 'marine/manual/sample', method: 'POST', body: data.toApiFormData());
if (finalImageFiles.isNotEmpty && data.reportId != null) {
// --- START: MODIFIED TO USE TIMESTAMP ID ---
if (finalImageFiles.isNotEmpty && apiRecordId != null) {
// Also queue images if data call might have partially succeeded before expiry
await _retryService.addApiToQueue(endpoint: 'marine/manual/images', method: 'POST_MULTIPART', fields: {'man_id': data.reportId!}, files: finalImageFiles);
await _retryService.addApiToQueue(endpoint: 'marine/manual/images', method: 'POST_MULTIPART', fields: {'man_id': apiRecordId}, files: finalImageFiles);
}
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
// 3. Submit FTP Files
@ -286,7 +322,9 @@ class MarineInSituSamplingService {
if (isSessionKnownToBeExpired) {
debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks.");
// --- START: MODIFIED TO USE TIMESTAMP ID ---
final baseFileNameForQueue = _generateBaseFileName(data); // Use helper
// --- END: MODIFIED TO USE TIMESTAMP ID ---
// --- START FIX: Add ftpConfigId when queuing ---
final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? [];
@ -377,6 +415,9 @@ class MarineInSituSamplingService {
ftpStatuses: ftpResults['statuses'],
serverName: serverName,
finalImageFiles: finalImageFiles, // Pass the map of actual files
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiRecordId: apiRecordId, // Pass the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logDirectory: logDirectory,
);
@ -388,7 +429,7 @@ class MarineInSituSamplingService {
}
// --- END FIX ---
return {'success': overallSuccess, 'message': finalMessage, 'reportId': data.reportId};
return {'success': overallSuccess, 'message': finalMessage, 'reportId': data.reportId}; // Return timestamp ID
}
@ -428,8 +469,10 @@ class MarineInSituSamplingService {
if (savedLogPath == null) {
const message = "Failed to save submission to local device storage.";
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Use empty map for finalImageFiles as saving failed
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, logDirectory: logDirectory);
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, apiRecordId: null, logDirectory: logDirectory);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
return {'success': false, 'message': message};
}
@ -445,9 +488,9 @@ class MarineInSituSamplingService {
const successMessage = "Device offline. Submission has been saved locally and queued for automatic retry when connection is restored.";
// Log final queued state to central DB
// await _logAndSave(data: data, status: 'Queued', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, logDirectory: savedLogPath);
// await _logAndSave(data: data, status: 'Queued', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, apiRecordId: null, logDirectory: savedLogPath);
return {'success': true, 'message': successMessage, 'reportId': null}; // No report ID yet
return {'success': true, 'message': successMessage, 'reportId': data.reportId}; // Return timestamp ID
}
// --- START: MODIFIED _generateBaseFileName ---
@ -455,14 +498,13 @@ class MarineInSituSamplingService {
String _generateBaseFileName(InSituSamplingData data) {
final stationCode = data.selectedStation?['man_station_code'] ?? 'NA';
// Check if reportId (timestamp) is available.
if (data.reportId != null && data.reportId!.isNotEmpty) {
return '${stationCode}_${data.reportId}';
} else {
// Fallback to old method if reportId is not available (e.g., offline queue)
final fileTimestamp = "${data.samplingDate}_${data.samplingTime}".replaceAll(':', '-').replaceAll(' ', '_');
return '${stationCode}_$fileTimestamp';
// We now always use data.reportId, which we set as the timestamp.
if (data.reportId == null || data.reportId!.isEmpty) {
// This is a safety fallback, but should not happen if submitData is used.
debugPrint("Warning: reportId is null in _generateBaseFileName. Using current timestamp.");
return '${stationCode}_${DateTime.now().millisecondsSinceEpoch.toString()}';
}
return '${stationCode}_${data.reportId}';
}
// --- END: MODIFIED _generateBaseFileName ---
@ -542,6 +584,9 @@ class MarineInSituSamplingService {
required List<Map<String, dynamic>> ftpStatuses,
required String serverName,
required Map<String, File> finalImageFiles, // Changed to Map<String, File>
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId, // This is the server DB ID (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
String? logDirectory,
}) async {
data.submissionStatus = status;
@ -560,7 +605,10 @@ class MarineInSituSamplingService {
// Add submission metadata
logMapData['submissionStatus'] = status;
logMapData['submissionMessage'] = message;
logMapData['reportId'] = data.reportId;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// data.reportId (the timestamp) is already in the map from toMap()
logMapData['apiRecordId'] = apiRecordId; // Add the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logMapData['serverConfigName'] = serverName;
logMapData['api_status'] = jsonEncode(apiResults.where((r) => r.isNotEmpty).toList());
logMapData['ftp_status'] = jsonEncode(ftpStatuses);
@ -577,12 +625,14 @@ class MarineInSituSamplingService {
// Save to central DB log
final logData = {
'submission_id': data.reportId ?? baseFileName, // Use helper result
// --- START: MODIFIED TO USE TIMESTAMP ID ---
'submission_id': data.reportId ?? baseFileName, // This is the timestamp ID
'module': 'marine',
'type': 'In-Situ', // Correct type
'status': status,
'message': message,
'report_id': data.reportId,
'report_id': apiRecordId, // This is the server DB ID (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
'created_at': DateTime.now().toIso8601String(),
'form_data': jsonEncode(logMapData), // Log the comprehensive map with paths
'image_data': jsonEncode(finalImageFiles.values.map((f) => f.path).toList()), // List of paths for files actually submitted/zipped

View File

@ -171,6 +171,22 @@ class MarineInvestigativeSamplingService {
_serialManager.dispose();
}
// --- START: NEW HELPER METHOD ---
/// Generates a unique timestamp ID from the sampling date and time.
String _generateTimestampId(String? date, String? time) {
final String dateTimeString = "${date ?? ''} ${time ?? ''}";
try {
// Time format from model is HH:mm
final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString);
return samplingDateTime.millisecondsSinceEpoch.toString();
} catch (e) {
// Fallback: if parsing fails, use the current time in milliseconds
debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e");
return DateTime.now().millisecondsSinceEpoch.toString();
}
}
// --- END: NEW HELPER METHOD ---
Future<Map<String, dynamic>> submitInvestigativeSample({
required MarineInvesManualSamplingData data,
required List<Map<String, dynamic>>? appSettings,
@ -180,6 +196,12 @@ class MarineInvestigativeSamplingService {
}) async {
const String moduleName = 'marine_investigative';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Generate the unique timestamp ID and assign it immediately.
final String timestampId = _generateTimestampId(data.samplingDate, data.samplingTime);
data.reportId = timestampId; // This is the primary ID now.
// --- END: MODIFIED TO USE TIMESTAMP ID ---
final connectivityResult = await Connectivity().checkConnectivity();
bool isOnline = !connectivityResult.contains(ConnectivityResult.none);
bool isOfflineSession = authProvider.isLoggedIn && (authProvider.profileData?['token']?.startsWith("offline-session-") ?? false);
@ -241,6 +263,11 @@ class MarineInvestigativeSamplingService {
String finalStatus = '';
bool isSessionKnownToBeExpired = false;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId; // Will hold the DB ID (e.g., 102) from the server
// data.reportId already contains the timestamp ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
try {
// 1. Submit Form Data
apiDataResult = await _submissionApiService.submitPost(
@ -251,15 +278,20 @@ class MarineInvestigativeSamplingService {
if (apiDataResult['success'] == true) {
anyApiSuccess = true;
data.reportId = apiDataResult['data']?['man_inves_id']?.toString();
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Store the server's database ID in a separate variable.
apiRecordId = apiDataResult['data']?['man_inves_id']?.toString();
// --- END: MODIFIED TO USE TIMESTAMP ID ---
if (data.reportId != null) {
if (apiRecordId != null) {
if (finalImageFiles.isNotEmpty) {
// 2. Submit Images
apiImageResult = await _submissionApiService.submitMultipart(
moduleName: moduleName,
endpoint: 'marine-investigative/images',
fields: {'man_inves_id': data.reportId!},
// --- START: MODIFIED TO USE TIMESTAMP ID ---
fields: {'man_inves_id': apiRecordId}, // Use server's ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
files: finalImageFiles,
);
if (apiImageResult['success'] != true) {
@ -268,7 +300,9 @@ class MarineInvestigativeSamplingService {
}
} else {
anyApiSuccess = false;
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a record ID.';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.';
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
}
} on SessionExpiredException catch (_) {
@ -278,10 +312,12 @@ class MarineInvestigativeSamplingService {
apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'};
// Manually queue the API call since SubmissionApiService was never called or failed internally due to session
await _retryService.addApiToQueue(endpoint: 'marine-investigative/sample', method: 'POST', body: data.toApiFormData());
if (finalImageFiles.isNotEmpty && data.reportId != null) {
// --- START: MODIFIED TO USE TIMESTAMP ID ---
if (finalImageFiles.isNotEmpty && apiRecordId != null) {
// Also queue images if data call might have partially succeeded before expiry
await _retryService.addApiToQueue(endpoint: 'marine-investigative/images', method: 'POST_MULTIPART', fields: {'man_inves_id': data.reportId!}, files: finalImageFiles);
await _retryService.addApiToQueue(endpoint: 'marine-investigative/images', method: 'POST_MULTIPART', fields: {'man_inves_id': apiRecordId}, files: finalImageFiles);
}
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
// We no longer catch SocketException or TimeoutException here.
@ -291,7 +327,9 @@ class MarineInvestigativeSamplingService {
if (isSessionKnownToBeExpired) {
debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks.");
// --- START: MODIFIED TO USE TIMESTAMP ID ---
final baseFileNameForQueue = _generateBaseFileName(data);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? [];
@ -372,6 +410,9 @@ class MarineInvestigativeSamplingService {
ftpStatuses: ftpResults['statuses'],
serverName: serverName,
finalImageFiles: finalImageFiles,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiRecordId: apiRecordId, // Pass the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logDirectory: logDirectory,
);
@ -383,7 +424,7 @@ class MarineInvestigativeSamplingService {
_handleInvestigativeSuccessAlert(data, appSettings, isDataOnly: finalImageFiles.isEmpty, isSessionExpired: isSessionKnownToBeExpired);
}
return {'success': overallSuccess, 'message': finalMessage, 'reportId': data.reportId};
return {'success': overallSuccess, 'message': finalMessage, 'reportId': data.reportId}; // Return timestamp ID
}
@ -416,7 +457,9 @@ class MarineInvestigativeSamplingService {
if (savedLogPath == null) {
const message = "Failed to save submission to local device storage.";
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, logDirectory: logDirectory);
// --- START: MODIFIED TO USE TIMESTAMP ID ---
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, apiRecordId: null, logDirectory: logDirectory);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
return {'success': false, 'message': message};
}
@ -430,9 +473,10 @@ class MarineInvestigativeSamplingService {
);
const successMessage = "Device offline. Submission has been saved locally and queued for automatic retry when connection is restored.";
return {'success': true, 'message': successMessage, 'reportId': null}; // No report ID yet
return {'success': true, 'message': successMessage, 'reportId': data.reportId}; // Return timestamp ID
}
// --- START: MODIFIED _generateBaseFileName ---
String _generateBaseFileName(MarineInvesManualSamplingData data) {
String stationCode = 'NA';
if (data.stationTypeSelection == 'Existing Manual Station') {
@ -443,16 +487,15 @@ class MarineInvestigativeSamplingService {
stationCode = data.newStationCode ?? 'NEW_NA';
}
// --- START: MODIFIED (from in-situ) ---
// Use reportId if available, otherwise fall back to timestamp
if (data.reportId != null && data.reportId!.isNotEmpty) {
// We now always use data.reportId, which we set as the timestamp.
if (data.reportId == null || data.reportId!.isEmpty) {
// This is a safety fallback, but should not happen.
debugPrint("Warning: reportId is null in _generateBaseFileName. Using current timestamp.");
return '${stationCode}_${DateTime.now().millisecondsSinceEpoch.toString()}';
}
return '${stationCode}_${data.reportId}';
} else {
final fileTimestamp = "${data.samplingDate}_${data.samplingTime}".replaceAll(':', '-').replaceAll(' ', '_');
return '${stationCode}_$fileTimestamp';
}
// --- END: MODIFIED ---
}
// --- END: MODIFIED _generateBaseFileName ---
Future<Map<String, dynamic>> _generateAndUploadFtpFiles(MarineInvesManualSamplingData data, Map<String, File> imageFiles, String serverName, String moduleName) async {
@ -532,11 +575,14 @@ class MarineInvestigativeSamplingService {
required List<Map<String, dynamic>> ftpStatuses,
required String serverName,
required Map<String, File> finalImageFiles, // Use final images map
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId, // The server's DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
String? logDirectory, // Existing log directory path if updating
}) async {
data.submissionStatus = status;
data.submissionMessage = message;
final baseFileName = _generateBaseFileName(data);
final baseFileName = _generateBaseFileName(data); // This now uses the timestamp ID
Map<String, dynamic> logMapData = data.toDbJson();
final imageFileMap = data.toApiImageFiles();
@ -545,7 +591,10 @@ class MarineInvestigativeSamplingService {
});
logMapData['submissionStatus'] = status;
logMapData['submissionMessage'] = message;
logMapData['reportId'] = data.reportId;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// data.reportId (the timestamp) is already in the map from toDbJson()
logMapData['apiRecordId'] = apiRecordId; // Add the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logMapData['serverConfigName'] = serverName;
logMapData['api_status'] = jsonEncode(apiResults.where((r) => r.isNotEmpty).toList());
logMapData['ftp_status'] = jsonEncode(ftpStatuses);
@ -559,12 +608,14 @@ class MarineInvestigativeSamplingService {
}
final logData = {
'submission_id': data.reportId ?? baseFileName,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
'submission_id': data.reportId ?? baseFileName, // This is the timestamp ID
'module': 'marine',
'type': 'Investigative',
'status': status,
'message': message,
'report_id': data.reportId,
'report_id': apiRecordId, // This is the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
'created_at': DateTime.now().toIso8601String(),
'form_data': jsonEncode(logMapData), // Log comprehensive map
'image_data': jsonEncode(finalImageFiles.values.map((f) => f.path).toList()),

View File

@ -37,6 +37,22 @@ class MarineTarballSamplingService {
MarineTarballSamplingService(this._telegramService);
// --- START: NEW HELPER METHOD ---
/// Generates a unique timestamp ID from the sampling date and time.
String _generateTimestampId(String? date, String? time) {
final String dateTimeString = "${date ?? ''} ${time ?? ''}";
try {
// Time format from model is HH:mm
final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString);
return samplingDateTime.millisecondsSinceEpoch.toString();
} catch (e) {
// Fallback: if parsing fails, use the current time in milliseconds
debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e");
return DateTime.now().millisecondsSinceEpoch.toString();
}
}
// --- END: NEW HELPER METHOD ---
Future<Map<String, dynamic>> submitTarballSample({
required TarballSamplingData data,
required List<Map<String, dynamic>>? appSettings,
@ -46,6 +62,13 @@ class MarineTarballSamplingService {
String? logDirectory, // Added for retry consistency
}) async {
const String moduleName = 'marine_tarball';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Generate the unique timestamp ID and assign it immediately.
final String timestampId = _generateTimestampId(data.samplingDate, data.samplingTime);
data.reportId = timestampId; // This is the primary ID now.
// --- END: MODIFIED TO USE TIMESTAMP ID ---
// --- START FIX: Handle nullable context ---
final authProvider = context != null ? Provider.of<AuthProvider>(context, listen: false) : null;
// Need a fallback mechanism if context is null (e.g., during retry)
@ -118,6 +141,11 @@ class MarineTarballSamplingService {
String finalStatus = '';
bool isSessionKnownToBeExpired = false;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId; // Will hold the DB ID (e.g., 102) from the server
// data.reportId already contains the timestamp ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
try {
// 1. Submit Form Data
apiDataResult = await _submissionApiService.submitPost(
@ -128,15 +156,20 @@ class MarineTarballSamplingService {
if (apiDataResult['success'] == true) {
anyApiSuccess = true;
data.reportId = apiDataResult['data']?['autoid']?.toString(); // Correct ID key
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Store the server's database ID in a separate variable.
apiRecordId = apiDataResult['data']?['autoid']?.toString(); // Correct ID key
// --- END: MODIFIED TO USE TIMESTAMP ID ---
if (data.reportId != null) {
if (apiRecordId != null) {
if (finalImageFiles.isNotEmpty) {
// 2. Submit Images
apiImageResult = await _submissionApiService.submitMultipart(
moduleName: moduleName,
endpoint: 'marine/tarball/images', // Correct endpoint
fields: {'autoid': data.reportId!}, // Correct field key
// --- START: MODIFIED TO USE TIMESTAMP ID ---
fields: {'autoid': apiRecordId}, // Correct field key
// --- END: MODIFIED TO USE TIMESTAMP ID ---
files: finalImageFiles,
);
if (apiImageResult['success'] != true) {
@ -146,7 +179,9 @@ class MarineTarballSamplingService {
// If data succeeded but no images, API part is still successful
} else {
anyApiSuccess = false;
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a record ID.';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.';
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
}
// If apiDataResult['success'] is false, SubmissionApiService queued it.
@ -158,10 +193,12 @@ class MarineTarballSamplingService {
apiDataResult = {'success': false, 'message': 'Session expired. API submission queued.'};
// Manually queue API calls
await _retryService.addApiToQueue(endpoint: 'marine/tarball/sample', method: 'POST', body: data.toFormData());
if (finalImageFiles.isNotEmpty && data.reportId != null) {
// --- START: MODIFIED TO USE TIMESTAMP ID ---
if (finalImageFiles.isNotEmpty && apiRecordId != null) {
// Queue images if data might have partially succeeded
await _retryService.addApiToQueue(endpoint: 'marine/tarball/images', method: 'POST_MULTIPART', fields: {'autoid': data.reportId!}, files: finalImageFiles);
await _retryService.addApiToQueue(endpoint: 'marine/tarball/images', method: 'POST_MULTIPART', fields: {'autoid': apiRecordId}, files: finalImageFiles);
}
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
// 3. Submit FTP Files
@ -170,7 +207,9 @@ class MarineTarballSamplingService {
if (isSessionKnownToBeExpired) {
debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks.");
// --- START: MODIFIED TO USE TIMESTAMP ID ---
final baseFileNameForQueue = _generateBaseFileName(data); // Use helper
// --- END: MODIFIED TO USE TIMESTAMP ID ---
// --- START FIX: Add ftpConfigId when queuing ---
final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? [];
@ -259,6 +298,9 @@ class MarineTarballSamplingService {
ftpStatuses: ftpResults['statuses'],
serverName: serverName,
finalImageFiles: finalImageFiles,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiRecordId: apiRecordId, // Pass the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logDirectory: logDirectory, // Pass logDirectory for potential update
);
@ -267,7 +309,7 @@ class MarineTarballSamplingService {
_handleTarballSuccessAlert(data, appSettings, isDataOnly: finalImageFiles.isEmpty, isSessionExpired: isSessionKnownToBeExpired);
}
return {'success': overallSuccess, 'message': finalMessage, 'reportId': data.reportId};
return {'success': overallSuccess, 'message': finalMessage, 'reportId': data.reportId}; // Return timestamp ID
}
Future<Map<String, dynamic>> _performOfflineQueuing({
@ -296,8 +338,10 @@ class MarineTarballSamplingService {
if (savedLogPath == null) {
const message = "Failed to save submission to local device storage.";
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Log failure state if saving fails
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, logDirectory: logDirectory);
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, apiRecordId: null, logDirectory: logDirectory);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
return {'success': false, 'message': message};
}
@ -313,17 +357,25 @@ class MarineTarballSamplingService {
const successMessage = "Device offline. Submission has been saved locally and queued for automatic retry when connection is restored.";
// Log final queued state to central DB
// await _logAndSave(data: data, status: 'Queued', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, logDirectory: savedLogPath);
// await _logAndSave(data: data, status: 'Queued', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, finalImageFiles: {}, apiRecordId: null, logDirectory: savedLogPath);
return {'success': true, 'message': successMessage, 'reportId': null}; // No report ID yet
return {'success': true, 'message': successMessage, 'reportId': data.reportId}; // Return timestamp ID
}
// --- START: MODIFIED _generateBaseFileName ---
/// Helper to generate the base filename for ZIP files.
String _generateBaseFileName(TarballSamplingData data) {
final stationCode = data.selectedStation?['tbl_station_code'] ?? 'NA';
final fileTimestamp = "${data.samplingDate}_${data.samplingTime}".replaceAll(':', '-').replaceAll(' ', '_');
return '${stationCode}_$fileTimestamp';
// We now always use data.reportId, which we set as the timestamp.
if (data.reportId == null || data.reportId!.isEmpty) {
// This is a safety fallback, but should not happen if submitData is used.
debugPrint("Warning: reportId is null in _generateBaseFileName. Using current timestamp.");
return '${stationCode}_${DateTime.now().millisecondsSinceEpoch.toString()}';
}
return '${stationCode}_${data.reportId}';
}
// --- END: MODIFIED _generateBaseFileName ---
/// Generates data and image ZIP files and uploads them using SubmissionFtpService.
Future<Map<String, dynamic>> _generateAndUploadFtpFiles(TarballSamplingData data, Map<String, File> imageFiles, String serverName, String moduleName) async {
@ -334,7 +386,9 @@ class MarineTarballSamplingService {
module: 'marine',
subModule: 'marine_tarball_sampling', // Correct sub-module
);
final folderName = data.reportId ?? baseFileName;
// --- START: MODIFIED folderName ---
final folderName = baseFileName; // Use the timestamp-based filename
// --- END: MODIFIED folderName ---
final Directory? localSubmissionDir = logDirectory != null ? Directory(p.join(logDirectory.path, folderName)) : null;
if (localSubmissionDir != null && !await localSubmissionDir.exists()) {
@ -393,6 +447,9 @@ class MarineTarballSamplingService {
required List<Map<String, dynamic>> ftpStatuses,
required String serverName,
required Map<String, File> finalImageFiles,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId, // This is the server DB ID (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
String? logDirectory, // Added for potential update
}) async {
data.submissionStatus = status;
@ -408,7 +465,10 @@ class MarineTarballSamplingService {
// Add submission metadata
logMapData['submissionStatus'] = status;
logMapData['submissionMessage'] = message;
logMapData['reportId'] = data.reportId;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// data.reportId (the timestamp) is already in the map from toDbJson()
logMapData['apiRecordId'] = apiRecordId; // Add the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logMapData['serverConfigName'] = serverName;
logMapData['api_status'] = jsonEncode(apiResults.where((r) => r.isNotEmpty).toList());
logMapData['ftp_status'] = jsonEncode(ftpStatuses);
@ -423,12 +483,14 @@ class MarineTarballSamplingService {
// Save a record to the central SQLite submission log table
final logData = {
'submission_id': data.reportId ?? baseFileName, // Use helper result
// --- START: MODIFIED TO USE TIMESTAMP ID ---
'submission_id': data.reportId ?? baseFileName, // This is the timestamp ID
'module': 'marine', // Correct module
'type': 'Tarball', // Correct type
'status': status,
'message': message,
'report_id': data.reportId,
'report_id': apiRecordId, // This is the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
'created_at': DateTime.now().toIso8601String(),
'form_data': jsonEncode(logMapData), // Log the comprehensive map with paths
'image_data': jsonEncode(finalImageFiles.values.map((f) => f.path).toList()),

View File

@ -163,6 +163,22 @@ class RiverInSituSamplingService {
_serialManager.dispose();
}
// --- START: NEW HELPER METHOD ---
/// Generates a unique timestamp ID from the sampling date and time.
String _generateTimestampId(String? date, String? time) {
final String dateTimeString = "${date ?? ''} ${time ?? ''}";
try {
// Time format from model is HH:mm
final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString);
return samplingDateTime.millisecondsSinceEpoch.toString();
} catch (e) {
// Fallback: if parsing fails, use the current time in milliseconds
debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e");
return DateTime.now().millisecondsSinceEpoch.toString();
}
}
// --- END: NEW HELPER METHOD ---
Future<Map<String, dynamic>> submitData({
required RiverInSituSamplingData data,
required List<Map<String, dynamic>>? appSettings,
@ -171,6 +187,13 @@ class RiverInSituSamplingService {
}) async {
const String moduleName = 'river_in_situ';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Generate the unique timestamp ID and assign it immediately.
// This ID will be used for filenames and as the primary submission ID.
final String timestampId = _generateTimestampId(data.samplingDate, data.samplingTime);
data.reportId = timestampId; // This is the primary ID now.
// --- END: MODIFIED TO USE TIMESTAMP ID ---
final connectivityResult = await Connectivity().checkConnectivity();
bool isOnline = !connectivityResult.contains(ConnectivityResult.none);
bool isOfflineSession = authProvider.isLoggedIn && (authProvider.profileData?['token']?.startsWith("offline-session-") ?? false);
@ -232,6 +255,11 @@ class RiverInSituSamplingService {
String finalStatus = '';
bool isSessionKnownToBeExpired = false;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId; // Will hold the DB ID (e.g., 102) from the server
// data.reportId already contains the timestamp ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
try {
// 1. Submit Form Data
apiDataResult = await _submissionApiService.submitPost(
@ -242,15 +270,21 @@ class RiverInSituSamplingService {
if (apiDataResult['success'] == true) {
anyApiSuccess = true;
data.reportId = apiDataResult['data']?['r_man_id']?.toString(); // Correct ID key
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Store the server's database ID in a separate variable.
// data.reportId (the timestamp) REMAINS UNCHANGED.
apiRecordId = apiDataResult['data']?['r_man_id']?.toString(); // Correct ID key
// --- END: MODIFIED TO USE TIMESTAMP ID ---
if (data.reportId != null) {
if (apiRecordId != null) { // Check if server returned an ID
if (finalImageFiles.isNotEmpty) {
// 2. Submit Images
apiImageResult = await _submissionApiService.submitMultipart(
moduleName: moduleName,
endpoint: 'river/manual/images', // Correct endpoint
fields: {'r_man_id': data.reportId!}, // Correct field key
// --- START: MODIFIED TO USE TIMESTAMP ID ---
fields: {'r_man_id': apiRecordId}, // Use server's ID for relation
// --- END: MODIFIED TO USE TIMESTAMP ID ---
files: finalImageFiles,
);
if (apiImageResult['success'] != true) {
@ -259,7 +293,9 @@ class RiverInSituSamplingService {
}
} else {
anyApiSuccess = false;
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a record ID.';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.';
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
}
// If apiDataResult['success'] is false, SubmissionApiService queued it.
@ -271,9 +307,11 @@ class RiverInSituSamplingService {
apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'};
// Manually queue API calls
await _retryService.addApiToQueue(endpoint: 'river/manual/sample', method: 'POST', body: data.toApiFormData());
if (finalImageFiles.isNotEmpty && data.reportId != null) {
if (finalImageFiles.isNotEmpty && apiRecordId != null) {
// Also queue images if data call might have partially succeeded before expiry
await _retryService.addApiToQueue(endpoint: 'river/manual/images', method: 'POST_MULTIPART', fields: {'r_man_id': data.reportId!}, files: finalImageFiles);
// --- START: MODIFIED TO USE TIMESTAMP ID ---
await _retryService.addApiToQueue(endpoint: 'river/manual/images', method: 'POST_MULTIPART', fields: {'r_man_id': apiRecordId}, files: finalImageFiles);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
}
@ -283,7 +321,10 @@ class RiverInSituSamplingService {
if (isSessionKnownToBeExpired) {
debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks.");
final baseFileNameForQueue = _generateBaseFileName(data); // Use helper
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// We can now safely call _generateBaseFileName, as data.reportId is the timestamp
final baseFileNameForQueue = _generateBaseFileName(data);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
// --- START FIX: Add ftpConfigId when queuing ---
// Get all potential FTP configs
@ -371,6 +412,9 @@ class RiverInSituSamplingService {
apiResults: [apiDataResult, apiImageResult],
ftpStatuses: ftpResults['statuses'],
serverName: serverName,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiRecordId: apiRecordId, // Pass the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logDirectory: logDirectory,
);
@ -387,7 +431,9 @@ class RiverInSituSamplingService {
'status': finalStatus,
'success': overallSuccess,
'message': finalMessage,
'reportId': data.reportId
// --- START: MODIFIED TO USE TIMESTAMP ID ---
'reportId': data.reportId // This is now the timestamp ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
};
}
@ -417,7 +463,9 @@ class RiverInSituSamplingService {
if (savedLogPath == null) {
const message = "Failed to save submission to local device storage.";
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, logDirectory: logDirectory);
// --- START: MODIFIED TO USE TIMESTAMP ID ---
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: logDirectory);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
return {'status': 'Error', 'success': false, 'message': message};
}
@ -432,9 +480,9 @@ class RiverInSituSamplingService {
const successMessage = "Device offline. Submission has been saved locally and queued for automatic retry when connection is restored.";
// Log final queued state to central DB
// await _logAndSave(data: data, status: 'Queued', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, logDirectory: savedLogPath);
// await _logAndSave(data: data, status: 'Queued', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: savedLogPath);
return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': null};
return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': data.reportId}; // Return timestamp ID
}
// --- START: MODIFIED _generateBaseFileName ---
@ -442,14 +490,13 @@ class RiverInSituSamplingService {
String _generateBaseFileName(RiverInSituSamplingData data) {
final stationCode = data.selectedStation?['sampling_station_code'] ?? 'UNKNOWN';
// Check if reportId (timestamp) is available.
if (data.reportId != null && data.reportId!.isNotEmpty) {
return '${stationCode}_${data.reportId}';
} else {
// Fallback to old method if reportId is not available (e.g., offline queue)
final fileTimestamp = "${data.samplingDate}_${data.samplingTime}".replaceAll(':', '-').replaceAll(' ', '_');
return "${stationCode}_$fileTimestamp";
// We now always use data.reportId, which we set as the timestamp.
if (data.reportId == null || data.reportId!.isEmpty) {
// This is a safety fallback, but should not happen if submitData is used.
debugPrint("Warning: reportId is null in _generateBaseFileName. Using current timestamp.");
return '${stationCode}_${DateTime.now().millisecondsSinceEpoch.toString()}';
}
return '${stationCode}_${data.reportId}';
}
// --- END: MODIFIED _generateBaseFileName ---
@ -514,18 +561,25 @@ class RiverInSituSamplingService {
required List<Map<String, dynamic>> apiResults,
required List<Map<String, dynamic>> ftpStatuses,
required String serverName,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId, // This is the server DB ID (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
String? logDirectory,
}) async {
data.submissionStatus = status;
data.submissionMessage = message;
final baseFileName = _generateBaseFileName(data); // Use helper
final baseFileName = _generateBaseFileName(data); // This now uses the timestamp ID
// Prepare log data map using toMap()
final Map<String, dynamic> logMapData = data.toMap();
// Add submission metadata
logMapData['submissionStatus'] = status;
logMapData['submissionMessage'] = message;
logMapData['reportId'] = data.reportId;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// data.reportId (which is the timestamp) is already in the map from toMap()
// We add the server's DB ID separately for the log file
logMapData['apiRecordId'] = apiRecordId; // (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logMapData['serverConfigName'] = serverName;
logMapData['api_status'] = jsonEncode(apiResults.where((r) => r.isNotEmpty).toList());
logMapData['ftp_status'] = jsonEncode(ftpStatuses);
@ -542,12 +596,14 @@ class RiverInSituSamplingService {
// Save to central DB log
final imagePaths = data.toApiImageFiles().values.whereType<File>().map((f) => f.path).toList();
final centralLogData = {
'submission_id': data.reportId ?? baseFileName, // Use helper result
// --- START: MODIFIED TO USE TIMESTAMP ID ---
'submission_id': data.reportId ?? baseFileName, // This is the timestamp ID
'module': 'river',
'type': data.samplingType ?? 'In-Situ', // Correct type
'status': status,
'message': message,
'report_id': data.reportId,
'report_id': apiRecordId, // This is the server DB ID (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
'created_at': DateTime.now().toIso8601String(),
'form_data': jsonEncode(logMapData), // Log the comprehensive map
'image_data': jsonEncode(imagePaths),

View File

@ -176,6 +176,22 @@ class RiverInvestigativeSamplingService { // Renamed class
_serialManager.dispose();
}
// --- START: NEW HELPER METHOD ---
/// Generates a unique timestamp ID from the sampling date and time.
String _generateTimestampId(String? date, String? time) {
final String dateTimeString = "${date ?? ''} ${time ?? ''}";
try {
// Time format from model is HH:mm
final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString);
return samplingDateTime.millisecondsSinceEpoch.toString();
} catch (e) {
// Fallback: if parsing fails, use the current time in milliseconds
debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e");
return DateTime.now().millisecondsSinceEpoch.toString();
}
}
// --- END: NEW HELPER METHOD ---
// Adapted Submission Logic for Investigative
Future<Map<String, dynamic>> submitData({
required RiverInvesManualSamplingData data, // Updated model type
@ -186,6 +202,12 @@ class RiverInvestigativeSamplingService { // Renamed class
// *** MODIFIED: Module name changed ***
const String moduleName = 'river_investigative';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Generate the unique timestamp ID and assign it immediately.
final String timestampId = _generateTimestampId(data.samplingDate, data.samplingTime);
data.reportId = timestampId; // This is the primary ID now.
// --- END: MODIFIED TO USE TIMESTAMP ID ---
final connectivityResult = await Connectivity().checkConnectivity();
bool isOnline = !connectivityResult.contains(ConnectivityResult.none);
bool isOfflineSession = authProvider.isLoggedIn && (authProvider.profileData?['token']?.startsWith("offline-session-") ?? false);
@ -246,6 +268,11 @@ class RiverInvestigativeSamplingService { // Renamed class
String finalStatus = '';
bool isSessionKnownToBeExpired = false;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId; // Will hold the DB ID (e.g., 102) from the server
// data.reportId already contains the timestamp ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
try {
// 1. Submit Form Data (using Investigative endpoint and data)
apiDataResult = await _submissionApiService.submitPost(
@ -257,18 +284,22 @@ class RiverInvestigativeSamplingService { // Renamed class
if (apiDataResult['success'] == true) {
anyApiSuccess = true;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// *** MODIFIED: Extract report ID using assumed key ***
data.reportId = apiDataResult['data']?['r_inv_id']?.toString(); // Assumed key for investigative ID
apiRecordId = apiDataResult['data']?['r_inv_id']?.toString(); // Assumed key for investigative ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
if (data.reportId != null) {
if (apiRecordId != null) {
if (finalImageFiles.isNotEmpty) {
// 2. Submit Images (using Investigative endpoint)
apiImageResult = await _submissionApiService.submitMultipart(
moduleName: moduleName, // 'river_investigative'
// *** MODIFIED: API Endpoint ***
endpoint: 'river/investigative/images', // Assumed endpoint for investigative images
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// *** MODIFIED: Field key for ID ***
fields: {'r_inv_id': data.reportId!}, // Use assumed investigative ID key
fields: {'r_inv_id': apiRecordId}, // Use assumed investigative ID key
// --- END: MODIFIED TO USE TIMESTAMP ID ---
files: finalImageFiles,
);
if (apiImageResult['success'] != true) {
@ -281,7 +312,9 @@ class RiverInvestigativeSamplingService { // Renamed class
// API succeeded but didn't return an ID - treat as failure
anyApiSuccess = false;
apiDataResult['success'] = false; // Mark as failed
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a record ID.';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.';
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
}
// If apiDataResult['success'] is false initially, SubmissionApiService queued it.
@ -294,10 +327,12 @@ class RiverInvestigativeSamplingService { // Renamed class
// Manually queue API calls
// *** MODIFIED: Use Investigative endpoints for queueing ***
await _retryService.addApiToQueue(endpoint: 'river/investigative/sample', method: 'POST', body: data.toApiFormData());
if (finalImageFiles.isNotEmpty && data.reportId != null) {
// --- START: MODIFIED TO USE TIMESTAMP ID ---
if (finalImageFiles.isNotEmpty && apiRecordId != null) {
// Queue images only if we might have gotten an ID before expiry
await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {'r_inv_id': data.reportId!}, files: finalImageFiles);
} else if (finalImageFiles.isNotEmpty && data.reportId == null) {
await _retryService.addApiToQueue(endpoint: 'river/investigative/images', method: 'POST_MULTIPART', fields: {'r_inv_id': apiRecordId}, files: finalImageFiles);
} else if (finalImageFiles.isNotEmpty && apiRecordId == null) {
// --- END: MODIFIED TO USE TIMESTAMP ID ---
// If data call failed before getting ID, queue images without ID - might need manual linking later or separate retry logic
debugPrint("Queueing investigative images without report ID due to session expiry during data submission.");
// How to handle this depends on backend capabilities or manual intervention needs.
@ -312,7 +347,9 @@ class RiverInvestigativeSamplingService { // Renamed class
if (isSessionKnownToBeExpired) {
debugPrint("Skipping FTP attempt for River Investigative due to known expired session. Manually queuing FTP tasks."); // Log context update
// --- START: MODIFIED TO USE TIMESTAMP ID ---
final baseFileNameForQueue = _generateBaseFileName(data); // Use helper
// --- END: MODIFIED TO USE TIMESTAMP ID ---
// --- START FIX: Add ftpConfigId when queuing --- (Copied from In-Situ, ensure DB structure matches)
final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? [];
@ -402,6 +439,9 @@ class RiverInvestigativeSamplingService { // Renamed class
apiResults: [apiDataResult, apiImageResult].where((r) => r.isNotEmpty).toList(), // Filter out empty results
ftpStatuses: ftpResults['statuses'] ?? [],
serverName: serverName,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiRecordId: apiRecordId, // Pass the server ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logDirectory: logDirectory,
);
@ -415,7 +455,9 @@ class RiverInvestigativeSamplingService { // Renamed class
'status': finalStatus,
'success': overallSuccess, // Reflects if *any* part succeeded now
'message': finalMessage,
'reportId': data.reportId // May be null if API failed
// --- START: MODIFIED TO USE TIMESTAMP ID ---
'reportId': data.reportId // This is now the timestamp ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
};
}
@ -449,7 +491,9 @@ class RiverInvestigativeSamplingService { // Renamed class
// If saving the log itself failed
const message = "Failed to save River Investigative submission to local device storage."; // Log context update
// Log failure to central DB log if possible
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, logDirectory: logDirectory);
// --- START: MODIFIED TO USE TIMESTAMP ID ---
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: logDirectory);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
return {'status': 'Error', 'success': false, 'message': message};
}
@ -469,174 +513,14 @@ class RiverInvestigativeSamplingService { // Renamed class
data.submissionStatus = 'L1'; // Final queued status
data.submissionMessage = successMessage;
// Log final queued state to central DB log
await _logAndSave(data: data, status: 'L1', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, logDirectory: savedLogPath); // Ensure log reflects final state
// --- START: MODIFIED TO USE TIMESTAMP ID ---
await _logAndSave(data: data, status: 'L1', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: savedLogPath); // Ensure log reflects final state
// --- END: MODIFIED TO USE TIMESTAMP ID ---
return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': null};
}
/// Helper to generate the base filename for ZIP files (Investigative).
String _generateBaseFileName(RiverInvesManualSamplingData data) { // Updated model type
// Use the determined station code helper
final stationCode = data.getDeterminedStationCode() ?? 'UNKNOWN';
final fileTimestamp = "${data.samplingDate}_${data.samplingTime}".replaceAll(':', '-').replaceAll(' ', '_');
return "${stationCode}_$fileTimestamp"; // Consistent format
}
/// Generates data and image ZIP files and uploads them using SubmissionFtpService (Investigative).
Future<Map<String, dynamic>> _generateAndUploadFtpFiles(RiverInvesManualSamplingData data, Map<String, File> imageFiles, String serverName, String moduleName) async { // Updated model type
final baseFileName = _generateBaseFileName(data); // Use helper
// *** MODIFIED: Use correct base dir getter ***
final Directory? logDirectory = await _localStorageService.getRiverInvestigativeBaseDir(serverName: serverName); // NEW GETTER
// Determine the specific folder for this submission log within the base directory
final folderName = data.reportId ?? baseFileName; // Use report ID if available, else generated name
final Directory? localSubmissionDir = logDirectory != null ? Directory(p.join(logDirectory.path, folderName)) : null;
if (localSubmissionDir != null && !await localSubmissionDir.exists()) {
await localSubmissionDir.create(recursive: true); // Create if doesn't exist
}
// Create and upload data ZIP (with multiple JSON files specific to River Investigative)
final dataZip = await _zippingService.createDataZip(
jsonDataMap: {
// *** MODIFIED: Use Investigative model's JSON methods and filenames ***
'db.json': jsonEncode(data.toDbJson()), // Main data structure
'river_inves_basic_form.json': data.toBasicFormJson(),
'river_inves_reading.json': data.toReadingJson(),
'river_inves_manual_info.json': data.toManualInfoJson(),
},
baseFileName: baseFileName,
destinationDir: localSubmissionDir, // Save ZIP in the specific log folder
);
Map<String, dynamic> ftpDataResult = {'success': true, 'statuses': []}; // Default success if no file
if (dataZip != null) {
ftpDataResult = await _submissionFtpService.submit(
moduleName: moduleName, // 'river_investigative'
fileToUpload: dataZip,
remotePath: '/${p.basename(dataZip.path)}' // Standard remote path
);
}
// Create and upload image ZIP (if images exist)
Map<String, dynamic> ftpImageResult = {'success': true, 'statuses': []}; // Default success if no images
if (imageFiles.isNotEmpty) {
final imageZip = await _zippingService.createImageZip(
imageFiles: imageFiles.values.toList(),
baseFileName: baseFileName,
destinationDir: localSubmissionDir, // Save ZIP in the specific log folder
);
if (imageZip != null) {
ftpImageResult = await _submissionFtpService.submit(
moduleName: moduleName, // 'river_investigative'
fileToUpload: imageZip,
remotePath: '/${p.basename(imageZip.path)}' // Standard remote path
);
}
}
// Combine statuses from both uploads
return {
'statuses': <Map<String, dynamic>>[
...(ftpDataResult['statuses'] as List? ?? []), // Use null-aware spread
...(ftpImageResult['statuses'] as List? ?? []), // Use null-aware spread
],
};
}
/// Saves or updates the local log file and saves a record to the central DB log (Investigative).
Future<void> _logAndSave({
required RiverInvesManualSamplingData data, // Updated model type
required String status,
required String message,
required List<Map<String, dynamic>> apiResults,
required List<Map<String, dynamic>> ftpStatuses,
required String serverName,
String? logDirectory, // Can be null initially, gets populated on first save
}) async {
data.submissionStatus = status;
data.submissionMessage = message;
final baseFileName = _generateBaseFileName(data); // Use helper for consistent naming
// Prepare log data map using toMap()
final Map<String, dynamic> logMapData = data.toMap();
// Add submission metadata that might not be in toMap() or needs overriding
logMapData['submissionStatus'] = status;
logMapData['submissionMessage'] = message;
logMapData['reportId'] = data.reportId;
logMapData['serverConfigName'] = serverName;
// Store API/FTP results as JSON strings
logMapData['api_status'] = jsonEncode(apiResults); // Ensure apiResults is a list
logMapData['ftp_status'] = jsonEncode(ftpStatuses); // Ensure ftpStatuses is a list
String? savedLogPath = logDirectory;
// Save or Update local log file (data.json)
if (savedLogPath != null && savedLogPath.isNotEmpty) {
// Update existing log
logMapData['logDirectory'] = savedLogPath; // Ensure logDirectory path is in the map for update method
// *** MODIFIED: Use correct update method ***
await _localStorageService.updateRiverInvestigativeLog(logMapData); // NEW UPDATE METHOD
} else {
// Save new log and get the path
// *** MODIFIED: Use correct save method ***
savedLogPath = await _localStorageService.saveRiverInvestigativeSamplingData(data, serverName: serverName); // NEW SAVE METHOD
if (savedLogPath != null) {
logMapData['logDirectory'] = savedLogPath; // Add the new path for central log
} else {
debugPrint("Failed to save River Investigative log locally, central DB log might be incomplete.");
// Handle case where local save failed? Maybe skip central log or log with error?
}
}
// Save record to central DB log (submission_log table)
final imagePaths = data.toApiImageFiles().values.whereType<File>().map((f) => f.path).toList();
final centralLogData = {
'submission_id': data.reportId ?? baseFileName, // Use report ID or generated name as unique ID
// *** MODIFIED: Module and Type ***
'module': 'river', // Keep main module as 'river'
'type': 'Investigative', // Specific type
'status': status,
'message': message,
'report_id': data.reportId,
'created_at': DateTime.now().toIso8601String(),
'form_data': jsonEncode(logMapData), // Log the comprehensive map including paths and status
'image_data': jsonEncode(imagePaths), // Log original image paths used for submission attempt
'server_name': serverName,
'api_status': jsonEncode(apiResults), // Log API results
'ftp_status': jsonEncode(ftpStatuses), // Log FTP results
};
try {
await _dbHelper.saveSubmissionLog(centralLogData);
} catch (e) {
debugPrint("Error saving River Investigative submission log to DB: $e"); // Log context update
}
}
/// Handles sending or queuing the Telegram alert for River Investigative submissions.
Future<void> _handleSuccessAlert(RiverInvesManualSamplingData data, List<Map<String, dynamic>>? appSettings, {required bool isDataOnly, bool isSessionExpired = false}) async { // Updated model type
try {
final message = await _generateInvestigativeAlertMessage(data, isDataOnly: isDataOnly); // Call specific helper
// *** MODIFIED: Telegram key ***
final alertKey = 'river_investigative'; // Specific key for this module
if (isSessionExpired) {
debugPrint("Session is expired; queuing River Investigative Telegram alert directly for $alertKey."); // Log context update
await _telegramService.queueMessage(alertKey, message, appSettings);
} else {
final bool wasSent = await _telegramService.sendAlertImmediately(alertKey, message, appSettings);
if (!wasSent) {
// Fallback to queueing if immediate send fails
await _telegramService.queueMessage(alertKey, message, appSettings);
}
}
} catch (e) {
debugPrint("Failed to handle River Investigative Telegram alert: $e"); // Log context update
}
return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': data.reportId}; // Return timestamp ID
}
// --- START: NEW HELPER METHOD (for timestamp ID) ---
/// Generates the specific Telegram alert message content for River Investigative.
Future<String> _generateInvestigativeAlertMessage(RiverInvesManualSamplingData data, {required bool isDataOnly}) async { // Updated model type
final submissionType = isDataOnly ? "(Data Only)" : "(Data & Images)";
@ -692,6 +576,188 @@ class RiverInvestigativeSamplingService { // Renamed class
return buffer.toString();
}
// --- END: NEW HELPER METHOD ---
// --- START: MODIFIED _generateBaseFileName ---
/// Helper to generate the base filename for ZIP files (Investigative).
String _generateBaseFileName(RiverInvesManualSamplingData data) { // Updated model type
// Use the determined station code helper
final stationCode = data.getDeterminedStationCode() ?? 'UNKNOWN';
// We now always use data.reportId, which we set as the timestamp.
if (data.reportId == null || data.reportId!.isEmpty) {
// This is a safety fallback, but should not happen if submitData is used.
debugPrint("Warning: reportId is null in _generateBaseFileName. Using current timestamp.");
return '${stationCode}_${DateTime.now().millisecondsSinceEpoch.toString()}';
}
return "${stationCode}_${data.reportId}"; // Consistent format
}
// --- END: MODIFIED _generateBaseFileName ---
/// Generates data and image ZIP files and uploads them using SubmissionFtpService (Investigative).
Future<Map<String, dynamic>> _generateAndUploadFtpFiles(RiverInvesManualSamplingData data, Map<String, File> imageFiles, String serverName, String moduleName) async { // Updated model type
final baseFileName = _generateBaseFileName(data); // Use helper
// *** MODIFIED: Use correct base dir getter ***
final Directory? logDirectory = await _localStorageService.getRiverInvestigativeBaseDir(serverName: serverName); // NEW GETTER
// Determine the specific folder for this submission log within the base directory
// --- START: MODIFIED folderName ---
final folderName = baseFileName; // Use the timestamp-based filename
// --- END: MODIFIED folderName ---
final Directory? localSubmissionDir = logDirectory != null ? Directory(p.join(logDirectory.path, folderName)) : null;
if (localSubmissionDir != null && !await localSubmissionDir.exists()) {
await localSubmissionDir.create(recursive: true); // Create if doesn't exist
}
// Create and upload data ZIP (with multiple JSON files specific to River Investigative)
final dataZip = await _zippingService.createDataZip(
jsonDataMap: {
// *** MODIFIED: Use Investigative model's JSON methods and filenames ***
'db.json': data.toDbJson(), // Main data structure
'river_inves_basic_form.json': data.toBasicFormJson(),
'river_inves_reading.json': data.toReadingJson(),
'river_inves_manual_info.json': data.toManualInfoJson(),
},
baseFileName: baseFileName,
destinationDir: localSubmissionDir, // Save ZIP in the specific log folder
);
Map<String, dynamic> ftpDataResult = {'success': true, 'statuses': []}; // Default success if no file
if (dataZip != null) {
ftpDataResult = await _submissionFtpService.submit(
moduleName: moduleName, // 'river_investigative'
fileToUpload: dataZip,
remotePath: '/${p.basename(dataZip.path)}' // Standard remote path
);
}
// Create and upload image ZIP (if images exist)
Map<String, dynamic> ftpImageResult = {'success': true, 'statuses': []}; // Default success if no images
if (imageFiles.isNotEmpty) {
final imageZip = await _zippingService.createImageZip(
imageFiles: imageFiles.values.toList(),
baseFileName: baseFileName,
destinationDir: localSubmissionDir, // Save ZIP in the specific log folder
);
if (imageZip != null) {
ftpImageResult = await _submissionFtpService.submit(
moduleName: moduleName, // 'river_investigative'
fileToUpload: imageZip,
remotePath: '/${p.basename(imageZip.path)}' // Standard remote path
);
}
}
// Combine statuses from both uploads
return {
'statuses': <Map<String, dynamic>>[
...(ftpDataResult['statuses'] as List? ?? []), // Use null-aware spread
...(ftpImageResult['statuses'] as List? ?? []), // Use null-aware spread
],
};
}
/// Saves or updates the local log file and saves a record to the central DB log (Investigative).
Future<void> _logAndSave({
required RiverInvesManualSamplingData data, // Updated model type
required String status,
required String message,
required List<Map<String, dynamic>> apiResults,
required List<Map<String, dynamic>> ftpStatuses,
required String serverName,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId, // The server's DB ID (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
String? logDirectory, // Can be null initially, gets populated on first save
}) async {
data.submissionStatus = status;
data.submissionMessage = message;
final baseFileName = _generateBaseFileName(data); // Use helper for consistent naming
// Prepare log data map using toMap()
final Map<String, dynamic> logMapData = data.toMap();
// Add submission metadata that might not be in toMap() or needs overriding
logMapData['submissionStatus'] = status;
logMapData['submissionMessage'] = message;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// data.reportId (the timestamp) is already in the map from toMap()
logMapData['apiRecordId'] = apiRecordId; // Add the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logMapData['serverConfigName'] = serverName;
// Store API/FTP results as JSON strings
logMapData['api_status'] = jsonEncode(apiResults); // Ensure apiResults is a list
logMapData['ftp_status'] = jsonEncode(ftpStatuses); // Ensure ftpStatuses is a list
String? savedLogPath = logDirectory;
// Save or Update local log file (data.json)
if (savedLogPath != null && savedLogPath.isNotEmpty) {
// Update existing log
logMapData['logDirectory'] = savedLogPath; // Ensure logDirectory path is in the map for update method
// *** MODIFIED: Use correct update method ***
await _localStorageService.updateRiverInvestigativeLog(logMapData); // NEW UPDATE METHOD
} else {
// Save new log and get the path
// *** MODIFIED: Use correct save method ***
savedLogPath = await _localStorageService.saveRiverInvestigativeSamplingData(data, serverName: serverName); // NEW SAVE METHOD
if (savedLogPath != null) {
logMapData['logDirectory'] = savedLogPath; // Add the new path for central log
} else {
debugPrint("Failed to save River Investigative log locally, central DB log might be incomplete.");
// Handle case where local save failed? Maybe skip central log or log with error?
}
}
// Save record to central DB log (submission_log table)
final imagePaths = data.toApiImageFiles().values.whereType<File>().map((f) => f.path).toList();
final centralLogData = {
// --- START: MODIFIED TO USE TIMESTAMP ID ---
'submission_id': data.reportId ?? baseFileName, // Use timestamp ID
// *** MODIFIED: Module and Type ***
'module': 'river', // Keep main module as 'river'
'type': 'Investigative', // Specific type
'status': status,
'message': message,
'report_id': apiRecordId, // Use server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
'created_at': DateTime.now().toIso8601String(),
'form_data': jsonEncode(logMapData), // Log the comprehensive map including paths and status
'image_data': jsonEncode(imagePaths), // Log original image paths used for submission attempt
'server_name': serverName,
'api_status': jsonEncode(apiResults), // Log API results
'ftp_status': jsonEncode(ftpStatuses), // Log FTP results
};
try {
await _dbHelper.saveSubmissionLog(centralLogData);
} catch (e) {
debugPrint("Error saving River Investigative submission log to DB: $e"); // Log context update
}
}
/// Handles sending or queuing the Telegram alert for River Investigative submissions.
Future<void> _handleSuccessAlert(RiverInvesManualSamplingData data, List<Map<String, dynamic>>? appSettings, {required bool isDataOnly, bool isSessionExpired = false}) async { // Updated model type
try {
final message = await _generateInvestigativeAlertMessage(data, isDataOnly: isDataOnly); // Call specific helper
// *** MODIFIED: Telegram key ***
final alertKey = 'river_investigative'; // Specific key for this module
if (isSessionExpired) {
debugPrint("Session is expired; queuing River Investigative Telegram alert directly for $alertKey."); // Log context update
await _telegramService.queueMessage(alertKey, message, appSettings);
} else {
final bool wasSent = await _telegramService.sendAlertImmediately(alertKey, message, appSettings);
if (!wasSent) {
// Fallback to queueing if immediate send fails
await _telegramService.queueMessage(alertKey, message, appSettings);
}
}
} catch (e) {
debugPrint("Failed to handle River Investigative Telegram alert: $e"); // Log context update
}
}
/// Helper to generate the parameter limit alert section for Telegram (River Investigative).
Future<String> _getOutOfBoundsAlertSection(RiverInvesManualSamplingData data) async { // Updated model type

View File

@ -163,6 +163,22 @@ class RiverManualTriennialSamplingService {
_serialManager.dispose();
}
// --- START: NEW HELPER METHOD ---
/// Generates a unique timestamp ID from the sampling date and time.
String _generateTimestampId(String? date, String? time) {
final String dateTimeString = "${date ?? ''} ${time ?? ''}";
try {
// Time format from model is HH:mm
final DateTime samplingDateTime = DateFormat('yyyy-MM-dd HH:mm').parse(dateTimeString);
return samplingDateTime.millisecondsSinceEpoch.toString();
} catch (e) {
// Fallback: if parsing fails, use the current time in milliseconds
debugPrint("Could not parse '$dateTimeString' for timestamp ID, using current time. Error: $e");
return DateTime.now().millisecondsSinceEpoch.toString();
}
}
// --- END: NEW HELPER METHOD ---
Future<Map<String, dynamic>> submitData({
required RiverManualTriennialSamplingData data,
required List<Map<String, dynamic>>? appSettings,
@ -171,6 +187,13 @@ class RiverManualTriennialSamplingService {
}) async {
const String moduleName = 'river_triennial'; // Correct module name
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Generate the unique timestamp ID and assign it immediately.
// This ID will be used for filenames and as the primary submission ID.
final String timestampId = _generateTimestampId(data.samplingDate, data.samplingTime);
data.reportId = timestampId; // This is the primary ID now.
// --- END: MODIFIED TO USE TIMESTAMP ID ---
final connectivityResult = await Connectivity().checkConnectivity();
bool isOnline = !connectivityResult.contains(ConnectivityResult.none);
bool isOfflineSession = authProvider.isLoggedIn && (authProvider.profileData?['token']?.startsWith("offline-session-") ?? false);
@ -228,6 +251,11 @@ class RiverManualTriennialSamplingService {
String finalStatus = '';
bool isSessionKnownToBeExpired = false;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId; // Will hold the DB ID (e.g., 102) from the server
// data.reportId already contains the timestamp ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
try {
// 1. Submit Form Data
apiDataResult = await _submissionApiService.submitPost(
@ -238,15 +266,20 @@ class RiverManualTriennialSamplingService {
if (apiDataResult['success'] == true) {
anyApiSuccess = true;
data.reportId = apiDataResult['data']?['r_tri_id']?.toString(); // Correct ID key
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// Store the server's database ID in a separate variable.
apiRecordId = apiDataResult['data']?['r_tri_id']?.toString(); // Correct ID key
// --- END: MODIFIED TO USE TIMESTAMP ID ---
if (data.reportId != null) {
if (apiRecordId != null) {
if (finalImageFiles.isNotEmpty) {
// 2. Submit Images
apiImageResult = await _submissionApiService.submitMultipart(
moduleName: moduleName,
endpoint: 'river/triennial/images', // Correct endpoint
fields: {'r_tri_id': data.reportId!}, // Correct field key
// --- START: MODIFIED TO USE TIMESTAMP ID ---
fields: {'r_tri_id': apiRecordId}, // Correct field key
// --- END: MODIFIED TO USE TIMESTAMP ID ---
files: finalImageFiles,
);
if (apiImageResult['success'] != true) {
@ -255,7 +288,9 @@ class RiverManualTriennialSamplingService {
}
} else {
anyApiSuccess = false;
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a record ID.';
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiDataResult['message'] = 'API Error: Submission succeeded but did not return a server record ID.';
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
}
// If apiDataResult['success'] is false, SubmissionApiService queued it.
@ -267,9 +302,11 @@ class RiverManualTriennialSamplingService {
apiDataResult = {'success': false, 'message': 'Session expired and re-login failed. API submission queued.'};
// Manually queue API calls
await _retryService.addApiToQueue(endpoint: 'river/triennial/sample', method: 'POST', body: data.toApiFormData());
if (finalImageFiles.isNotEmpty && data.reportId != null) {
if (finalImageFiles.isNotEmpty && apiRecordId != null) {
// Also queue images if data call might have partially succeeded before expiry
await _retryService.addApiToQueue(endpoint: 'river/triennial/images', method: 'POST_MULTIPART', fields: {'r_tri_id': data.reportId!}, files: finalImageFiles);
// --- START: MODIFIED TO USE TIMESTAMP ID ---
await _retryService.addApiToQueue(endpoint: 'river/triennial/images', method: 'POST_MULTIPART', fields: {'r_tri_id': apiRecordId}, files: finalImageFiles);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
}
}
@ -279,7 +316,9 @@ class RiverManualTriennialSamplingService {
if (isSessionKnownToBeExpired) {
debugPrint("Skipping FTP attempt due to known expired session. Manually queuing FTP tasks.");
// --- START: MODIFIED TO USE TIMESTAMP ID ---
final baseFileNameForQueue = _generateBaseFileName(data); // Use helper
// --- END: MODIFIED TO USE TIMESTAMP ID ---
// --- START FIX: Add ftpConfigId when queuing ---
// Get all potential FTP configs
@ -365,6 +404,9 @@ class RiverManualTriennialSamplingService {
apiResults: [apiDataResult, apiImageResult],
ftpStatuses: ftpResults['statuses'],
serverName: serverName,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
apiRecordId: apiRecordId, // Pass the server DB ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logDirectory: logDirectory,
);
@ -378,7 +420,9 @@ class RiverManualTriennialSamplingService {
'status': finalStatus,
'success': overallSuccess,
'message': finalMessage,
'reportId': data.reportId
// --- START: MODIFIED TO USE TIMESTAMP ID ---
'reportId': data.reportId // This is now the timestamp ID
// --- END: MODIFIED TO USE TIMESTAMP ID ---
};
}
@ -408,7 +452,9 @@ class RiverManualTriennialSamplingService {
if (savedLogPath == null) {
const message = "Failed to save submission to local device storage.";
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, logDirectory: logDirectory);
// --- START: MODIFIED TO USE TIMESTAMP ID ---
await _logAndSave(data: data, status: 'Error', message: message, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: logDirectory);
// --- END: MODIFIED TO USE TIMESTAMP ID ---
return {'status': 'Error', 'success': false, 'message': message};
}
@ -423,17 +469,25 @@ class RiverManualTriennialSamplingService {
const successMessage = "Device offline. Submission has been saved locally and queued for automatic retry when connection is restored.";
// Log final queued state to central DB
// await _logAndSave(data: data, status: 'Queued', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, logDirectory: savedLogPath);
// await _logAndSave(data: data, status: 'Queued', message: successMessage, apiResults: [], ftpStatuses: [], serverName: serverName, apiRecordId: null, logDirectory: savedLogPath);
return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': null};
return {'status': 'Queued', 'success': true, 'message': successMessage, 'reportId': data.reportId}; // Return timestamp ID
}
// --- START: MODIFIED _generateBaseFileName ---
/// Helper to generate the base filename for ZIP files.
String _generateBaseFileName(RiverManualTriennialSamplingData data) {
final stationCode = data.selectedStation?['sampling_station_code'] ?? 'UNKNOWN';
final fileTimestamp = "${data.samplingDate}_${data.samplingTime}".replaceAll(':', '-').replaceAll(' ', '_');
return "${stationCode}_$fileTimestamp";
// We now always use data.reportId, which we set as the timestamp.
if (data.reportId == null || data.reportId!.isEmpty) {
// This is a safety fallback, but should not happen if submitData is used.
debugPrint("Warning: reportId is null in _generateBaseFileName. Using current timestamp.");
return '${stationCode}_${DateTime.now().millisecondsSinceEpoch.toString()}';
}
return '${stationCode}_${data.reportId}';
}
// --- END: MODIFIED _generateBaseFileName ---
/// Generates data and image ZIP files and uploads them using SubmissionFtpService.
@ -446,7 +500,9 @@ class RiverManualTriennialSamplingService {
subModule: 'river_triennial_sampling', // Correct sub-module path
);
final folderName = data.reportId ?? baseFileName;
// --- START: MODIFIED folderName ---
final folderName = baseFileName; // Use the timestamp-based filename
// --- END: MODIFIED folderName ---
final Directory? localSubmissionDir = logDirectory != null ? Directory(p.join(logDirectory.path, folderName)) : null;
if (localSubmissionDir != null && !await localSubmissionDir.exists()) {
await localSubmissionDir.create(recursive: true);
@ -492,18 +548,25 @@ class RiverManualTriennialSamplingService {
required List<Map<String, dynamic>> apiResults,
required List<Map<String, dynamic>> ftpStatuses,
required String serverName,
// --- START: MODIFIED TO USE TIMESTAMP ID ---
String? apiRecordId, // This is the server DB ID (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
String? logDirectory,
}) async {
data.submissionStatus = status;
data.submissionMessage = message;
final baseFileName = _generateBaseFileName(data); // Use helper
final baseFileName = _generateBaseFileName(data); // This now uses the timestamp ID
// Prepare log data map using toMap()
final Map<String, dynamic> logMapData = data.toMap();
// Add submission metadata
logMapData['submissionStatus'] = status;
logMapData['submissionMessage'] = message;
logMapData['reportId'] = data.reportId;
// --- START: MODIFIED TO USE TIMESTAMP ID ---
// data.reportId (which is the timestamp) is already in the map from toMap()
// We add the server's DB ID separately for the log file
logMapData['apiRecordId'] = apiRecordId; // (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
logMapData['serverConfigName'] = serverName;
logMapData['api_status'] = jsonEncode(apiResults.where((r) => r.isNotEmpty).toList());
logMapData['ftp_status'] = jsonEncode(ftpStatuses);
@ -521,12 +584,14 @@ class RiverManualTriennialSamplingService {
// Save to central DB log
final imagePaths = data.toApiImageFiles().values.whereType<File>().map((f) => f.path).toList();
final centralLogData = {
'submission_id': data.reportId ?? baseFileName, // Use helper result
// --- START: MODIFIED TO USE TIMESTAMP ID ---
'submission_id': data.reportId ?? baseFileName, // This is the timestamp ID
'module': 'river',
'type': data.samplingType ?? 'Triennial', // Correct type
'status': status,
'message': message,
'report_id': data.reportId,
'report_id': apiRecordId, // This is the server DB ID (e.g., 102)
// --- END: MODIFIED TO USE TIMESTAMP ID ---
'created_at': DateTime.now().toIso8601String(),
'form_data': jsonEncode(logMapData), // Log the comprehensive map
'image_data': jsonEncode(imagePaths),

View File

@ -1,4 +1,5 @@
import 'dart:io';
import 'dart:convert'; // Added to ensure correct UTF-8 encoding
import 'package:flutter/foundation.dart';
import 'package:archive/archive_io.dart';
import 'package:path_provider/path_provider.dart';
@ -30,8 +31,15 @@ class ZippingService {
final fileName = entry.key;
final jsonContent = entry.value;
// --- MODIFIED: The codeUnits property is already a List<int>. No need to wrap it in a Stream. ---
final archiveFile = ArchiveFile(fileName, jsonContent.length, jsonContent.codeUnits);
// --- MODIFIED: Ensure UTF-8 encoding ---
// 1. Encode the string content into UTF-8 bytes
final utf8Bytes = utf8.encode(jsonContent);
// 2. Use the UTF-8 bytes and their correct length for the archive
// (This replaces the original: jsonContent.length, jsonContent.codeUnits)
final archiveFile = ArchiveFile(fileName, utf8Bytes.length, utf8Bytes);
// --- END MODIFICATION ---
encoder.addArchiveFile(archiveFile);
debugPrint("Added $fileName to data ZIP.");