environment_monitoring_app/lib/services/retry_service.dart

544 lines
24 KiB
Dart

// lib/services/retry_service.dart
import 'dart:convert';
import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:path/path.dart' as p;
import 'package:environment_monitoring_app/models/in_situ_sampling_data.dart';
import 'package:environment_monitoring_app/services/marine_in_situ_sampling_service.dart';
import 'package:environment_monitoring_app/models/river_in_situ_sampling_data.dart';
import 'package:environment_monitoring_app/services/river_in_situ_sampling_service.dart';
// *** ADDED: Import River Investigative Model and Service ***
import 'package:environment_monitoring_app/models/river_inves_manual_sampling_data.dart';
import 'package:environment_monitoring_app/services/river_investigative_sampling_service.dart';
// *** END ADDED ***
import 'package:environment_monitoring_app/models/marine_inves_manual_sampling_data.dart';
import 'package:environment_monitoring_app/services/marine_investigative_sampling_service.dart';
import 'package:environment_monitoring_app/models/tarball_data.dart';
import 'package:environment_monitoring_app/services/marine_tarball_sampling_service.dart';
import 'package:environment_monitoring_app/services/api_service.dart';
import 'package:environment_monitoring_app/services/base_api_service.dart';
import 'package:environment_monitoring_app/services/ftp_service.dart';
import 'package:environment_monitoring_app/services/server_config_service.dart';
import 'package:environment_monitoring_app/auth_provider.dart';
/// A dedicated service to manage the queue of failed API, FTP, and complex submission tasks.
class RetryService {
final DatabaseHelper _dbHelper = DatabaseHelper();
final BaseApiService _baseApiService = BaseApiService();
final FtpService _ftpService = FtpService();
final ServerConfigService _serverConfigService = ServerConfigService();
bool _isProcessing = false;
MarineInSituSamplingService? _marineInSituService;
RiverInSituSamplingService? _riverInSituService;
MarineInvestigativeSamplingService? _marineInvestigativeService;
MarineTarballSamplingService? _marineTarballService;
// *** ADDED: River Investigative Service member ***
RiverInvestigativeSamplingService? _riverInvestigativeService;
// *** END ADDED ***
AuthProvider? _authProvider;
// *** MODIFIED: Added riverInvestigativeService to initialize ***
void initialize({
required MarineInSituSamplingService marineInSituService,
required RiverInSituSamplingService riverInSituService,
required MarineInvestigativeSamplingService marineInvestigativeService,
required RiverInvestigativeSamplingService riverInvestigativeService, // <-- Added parameter
required MarineTarballSamplingService marineTarballService,
required AuthProvider authProvider,
}) {
_marineInSituService = marineInSituService;
_riverInSituService = riverInSituService;
_marineInvestigativeService = marineInvestigativeService;
_riverInvestigativeService = riverInvestigativeService; // <-- Assign parameter
_marineTarballService = marineTarballService;
_authProvider = authProvider;
}
// *** END MODIFIED ***
/// Adds a generic, complex task to the queue, to be handled by a background processor.
Future<void> queueTask({
required String type,
required Map<String, dynamic> payload,
}) async {
await _dbHelper.queueFailedRequest({
'type': type,
'endpoint_or_path': 'N/A', // Not applicable for complex tasks initially
'payload': jsonEncode(payload),
'timestamp': DateTime.now().toIso8601String(),
'status': 'pending',
});
debugPrint("Task of type '$type' has been queued for background processing.");
}
/// Adds a failed API request to the local database queue.
Future<void> addApiToQueue({
required String endpoint,
required String method,
Map<String, dynamic>? body,
Map<String, String>? fields,
Map<String, File>? files,
}) async {
// Convert File objects to paths for JSON serialization
final serializableFiles = files?.map((key, value) => MapEntry(key, value.path));
final payload = {
'method': method,
'body': body,
'fields': fields,
'files': serializableFiles, // Store paths instead of File objects
};
await _dbHelper.queueFailedRequest({
'type': 'api',
'endpoint_or_path': endpoint,
'payload': jsonEncode(payload),
'timestamp': DateTime.now().toIso8601String(),
'status': 'pending',
});
debugPrint("API request for endpoint '$endpoint' has been queued for retry.");
}
/// Adds a failed FTP upload to the local database queue.
Future<void> addFtpToQueue({
required String localFilePath,
required String remotePath,
required int ftpConfigId, // Added to specify which destination failed
}) async {
final payload = {
'localFilePath': localFilePath,
'ftpConfigId': ftpConfigId, // Store the specific config ID
};
await _dbHelper.queueFailedRequest({
'type': 'ftp',
'endpoint_or_path': remotePath,
'payload': jsonEncode(payload),
'timestamp': DateTime.now().toIso8601String(),
'status': 'pending',
});
debugPrint("FTP upload for file '$localFilePath' to config ID $ftpConfigId has been queued for retry.");
}
/// Retrieves all tasks currently in the 'pending' state from the queue.
Future<List<Map<String, dynamic>>> getPendingTasks() {
return _dbHelper.getPendingRequests();
}
/// Processes the entire queue of pending tasks.
Future<void> processRetryQueue() async {
if (_isProcessing) {
debugPrint("[RetryService] ⏳ Queue is already being processed. Skipping.");
return;
}
_isProcessing = true;
debugPrint("[RetryService] ▶️ Starting to process main retry queue...");
final pendingTasks = await getPendingTasks();
if (pendingTasks.isEmpty) {
debugPrint("[RetryService] ⏹️ Queue is empty. Nothing to process.");
_isProcessing = false;
return;
}
// Check internet connection *before* processing
if (_authProvider == null || !await _authProvider!.isConnected()) {
debugPrint("[RetryService] ❌ No internet connection. Aborting queue processing.");
_isProcessing = false;
return;
}
debugPrint("[RetryService] 🔎 Found ${pendingTasks.length} pending tasks.");
// Process tasks one by one
for (final task in pendingTasks) {
// Add safety check in case a task is deleted mid-processing by another call
if (await _dbHelper.getRequestById(task['id'] as int) != null) {
await retryTask(task['id'] as int);
}
// Optional: Add a small delay between tasks if needed
// await Future.delayed(Duration(seconds: 1));
}
debugPrint("[RetryService] ⏹️ Finished processing retry queue.");
_isProcessing = false;
}
/// Attempts to re-execute a single failed task from the queue.
/// Returns `true` on success, `false` on failure.
Future<bool> retryTask(int taskId) async {
final task = await _dbHelper.getRequestById(taskId);
if (task == null) {
debugPrint("Retry failed: Task with ID $taskId not found in the queue (might have been processed already).");
return false; // Task doesn't exist or was processed elsewhere
}
bool success = false;
Map<String, dynamic> payload; // Declare outside try-catch
final String taskType = task['type'] as String; // Get type early for logging
try {
payload = jsonDecode(task['payload'] as String); // Decode payload inside try
} catch (e) {
debugPrint("Error decoding payload for task $taskId (Type: $taskType): $e. Removing invalid task.");
await _dbHelper.deleteRequestFromQueue(taskId);
return false; // Cannot process without valid payload
}
try {
// Ensure AuthProvider is initialized and we are online (checked in processRetryQueue)
if (_authProvider == null) {
debugPrint("RetryService has not been initialized. Cannot process task $taskId.");
return false;
}
// --- Complex Task Handlers ---
if (taskType == 'insitu_submission') {
debugPrint("Retrying complex task 'insitu_submission' with ID $taskId.");
if (_marineInSituService == null) {
debugPrint("Retry failed: MarineInSituSamplingService not initialized.");
return false;
}
final String logDirectoryPath = payload['localLogPath']; // Path to the directory
final jsonFilePath = p.join(logDirectoryPath, 'data.json');
final file = File(jsonFilePath);
if (!await file.exists()) {
debugPrint("Retry failed: Source log file no longer exists at $jsonFilePath");
await _dbHelper.deleteRequestFromQueue(taskId); // Remove invalid task
return false;
}
final content = await file.readAsString();
final jsonData = jsonDecode(content) as Map<String, dynamic>;
final InSituSamplingData dataToResubmit = InSituSamplingData.fromJson(jsonData);
// Re-run the original submission logic, passing the log directory
final result = await _marineInSituService!.submitInSituSample(
data: dataToResubmit,
appSettings: _authProvider!.appSettings, // Get current settings
authProvider: _authProvider!,
logDirectory: logDirectoryPath, // Pass directory to update log
);
success = result['success'];
} else if (taskType == 'river_insitu_submission') {
debugPrint("Retrying complex task 'river_insitu_submission' with ID $taskId.");
if (_riverInSituService == null) {
debugPrint("Retry failed: RiverInSituSamplingService not initialized.");
return false;
}
final String jsonFilePath = payload['localLogPath']; // Path to the JSON file
final file = File(jsonFilePath);
if (!await file.exists()) {
debugPrint("Retry failed: Source log file no longer exists at $jsonFilePath");
await _dbHelper.deleteRequestFromQueue(taskId);
return false;
}
final String logDirectoryPath = p.dirname(jsonFilePath); // Get directory from file path
final content = await file.readAsString();
final jsonData = jsonDecode(content) as Map<String, dynamic>;
final RiverInSituSamplingData dataToResubmit = RiverInSituSamplingData.fromJson(jsonData);
final result = await _riverInSituService!.submitData(
data: dataToResubmit,
appSettings: _authProvider!.appSettings,
authProvider: _authProvider!,
logDirectory: logDirectoryPath,
);
success = result['success'];
// *** ADDED: Handler for river_investigative_submission ***
} else if (taskType == 'river_investigative_submission') {
debugPrint("Retrying complex task 'river_investigative_submission' with ID $taskId.");
if (_riverInvestigativeService == null) {
debugPrint("Retry failed: RiverInvestigativeSamplingService not initialized.");
return false;
}
final String jsonFilePath = payload['localLogPath']; // Path to the JSON file
final file = File(jsonFilePath);
if (!await file.exists()) {
debugPrint("Retry failed: Source log file no longer exists at $jsonFilePath");
await _dbHelper.deleteRequestFromQueue(taskId);
return false;
}
final String logDirectoryPath = p.dirname(jsonFilePath); // Get directory from file path
final content = await file.readAsString();
final jsonData = jsonDecode(content) as Map<String, dynamic>;
// Use the correct Investigative data model
final RiverInvesManualSamplingData dataToResubmit = RiverInvesManualSamplingData.fromJson(jsonData);
// Call the submitData method from the Investigative service
final result = await _riverInvestigativeService!.submitData(
data: dataToResubmit,
appSettings: _authProvider!.appSettings,
authProvider: _authProvider!,
logDirectory: logDirectoryPath,
);
success = result['success'];
// *** END ADDED ***
} else if (taskType == 'investigative_submission') {
debugPrint("Retrying complex task 'investigative_submission' with ID $taskId.");
if (_marineInvestigativeService == null) {
debugPrint("Retry failed: MarineInvestigativeSamplingService not initialized.");
return false;
}
final String logDirectoryPath = payload['localLogPath']; // Path to the directory
final jsonFilePath = p.join(logDirectoryPath, 'data.json');
final file = File(jsonFilePath);
if (!await file.exists()) {
debugPrint("Retry failed: Source log file no longer exists at $jsonFilePath");
await _dbHelper.deleteRequestFromQueue(taskId);
return false;
}
final content = await file.readAsString();
final jsonData = jsonDecode(content) as Map<String, dynamic>;
final MarineInvesManualSamplingData dataToResubmit = MarineInvesManualSamplingData.fromJson(jsonData);
final result = await _marineInvestigativeService!.submitInvestigativeSample(
data: dataToResubmit,
appSettings: _authProvider!.appSettings,
authProvider: _authProvider!,
logDirectory: logDirectoryPath,
);
success = result['success'];
} else if (taskType == 'tarball_submission') {
debugPrint("Retrying complex task 'tarball_submission' with ID $taskId.");
if (_marineTarballService == null) {
debugPrint("Retry failed: MarineTarballSamplingService not initialized.");
return false;
}
final String logDirectoryPath = payload['localLogPath']; // Path to the directory
final jsonFilePath = p.join(logDirectoryPath, 'data.json');
final file = File(jsonFilePath);
if (!await file.exists()) {
debugPrint("Retry failed: Source log file no longer exists at $jsonFilePath");
await _dbHelper.deleteRequestFromQueue(taskId);
return false;
}
final content = await file.readAsString();
final jsonData = jsonDecode(content) as Map<String, dynamic>;
// Recreate File objects from paths
File? fileFromJson(dynamic path) => (path is String && path.isNotEmpty) ? File(path) : null;
final TarballSamplingData dataToResubmit = TarballSamplingData()
// Reconstruct the object from JSON data
..firstSampler = jsonData['firstSampler']
..firstSamplerUserId = jsonData['firstSamplerUserId']
..secondSampler = jsonData['secondSampler']
..samplingDate = jsonData['samplingDate']
..samplingTime = jsonData['samplingTime']
..selectedStateName = jsonData['selectedStateName']
..selectedCategoryName = jsonData['selectedCategoryName']
..selectedStation = jsonData['selectedStation']
..stationLatitude = jsonData['stationLatitude']
..stationLongitude = jsonData['stationLongitude']
..currentLatitude = jsonData['currentLatitude']
..currentLongitude = jsonData['currentLongitude']
..distanceDifference = jsonData['distanceDifference'] is num ? (jsonData['distanceDifference'] as num).toDouble() : null // Safe cast
..distanceDifferenceRemarks = jsonData['distanceDifferenceRemarks']
..classificationId = jsonData['classificationId'] is num ? (jsonData['classificationId'] as num).toInt() : null // Safe cast
..selectedClassification = jsonData['selectedClassification']
..leftCoastalViewImage = fileFromJson(jsonData['leftCoastalViewImage'])
..rightCoastalViewImage = fileFromJson(jsonData['rightCoastalViewImage'])
..verticalLinesImage = fileFromJson(jsonData['verticalLinesImage'])
..horizontalLineImage = fileFromJson(jsonData['horizontalLineImage'])
..optionalImage1 = fileFromJson(jsonData['optionalImage1'])
..optionalRemark1 = jsonData['optionalRemark1']
..optionalImage2 = fileFromJson(jsonData['optionalImage2'])
..optionalRemark2 = jsonData['optionalRemark2']
..optionalImage3 = fileFromJson(jsonData['optionalImage3'])
..optionalRemark3 = jsonData['optionalRemark3']
..optionalImage4 = fileFromJson(jsonData['optionalImage4'])
..optionalRemark4 = jsonData['optionalRemark4']
..reportId = jsonData['reportId'] // Preserve reportId if it exists
..submissionStatus = jsonData['submissionStatus'] // Preserve status info
..submissionMessage = jsonData['submissionMessage'];
debugPrint("Retrying Tarball submission...");
// Pass null for BuildContext, and the logDirectory path
final result = await _marineTarballService!.submitTarballSample(
data: dataToResubmit,
appSettings: _authProvider!.appSettings,
context: null, // Pass null for BuildContext during retry
logDirectory: logDirectoryPath, // Pass the directory path for potential update
);
success = result['success'];
// --- Simple Task Handlers ---
} else if (taskType == 'api') {
final endpoint = task['endpoint_or_path'] as String;
final method = payload['method'] as String;
final baseUrl = await _serverConfigService.getActiveApiUrl(); // Get current active URL
debugPrint("Retrying API task $taskId: $method to $baseUrl/$endpoint");
Map<String, dynamic> result;
if (method == 'POST_MULTIPART') {
final Map<String, String> fields = Map<String, String>.from(payload['fields'] ?? {});
// Recreate File objects from paths stored in the payload
final Map<String, File> files = (payload['files'] as Map<String, dynamic>?)
?.map((key, value) => MapEntry(key, File(value as String))) ?? {};
// Check if files still exist before attempting upload
bool allFilesExist = true;
List<String> missingFiles = []; // Keep track of missing files
for (var entry in files.entries) {
File file = entry.value;
if (!await file.exists()) {
debugPrint("Retry failed for API task $taskId: File ${file.path} (key: ${entry.key}) no longer exists.");
allFilesExist = false;
missingFiles.add(entry.key);
// break; // Stop checking further if one is missing
}
}
// If some files are missing, fail the entire task.
if (!allFilesExist) {
debugPrint("API Multipart retry failed for task $taskId because files are missing: ${missingFiles.join(', ')}. Removing task.");
await _dbHelper.deleteRequestFromQueue(taskId); // Remove invalid task
return false;
}
result = await _baseApiService.postMultipart(baseUrl: baseUrl, endpoint: endpoint, fields: fields, files: files);
} else { // Standard POST
final Map<String, dynamic> body = Map<String, dynamic>.from(payload['body'] ?? {});
result = await _baseApiService.post(baseUrl, endpoint, body);
}
success = result['success'];
} else if (taskType == 'ftp') {
final remotePath = task['endpoint_or_path'] as String;
final localFile = File(payload['localFilePath'] as String);
final int? ftpConfigId = payload['ftpConfigId'] as int?;
debugPrint("Retrying FTP task $taskId: Uploading ${localFile.path} to $remotePath using config ID $ftpConfigId");
if (ftpConfigId == null) {
debugPrint("Retry failed for FTP task $taskId: Missing FTP configuration ID in payload.");
await _dbHelper.deleteRequestFromQueue(taskId); // Remove invalid task
return false;
}
if (await localFile.exists()) {
final ftpConfigs = await _dbHelper.loadFtpConfigs() ?? [];
final config = ftpConfigs.firstWhere((c) => c['ftp_config_id'] == ftpConfigId, orElse: () => <String, dynamic>{}); // Use explicit type
if (config.isEmpty) {
debugPrint("Retry failed for FTP task $taskId: FTP configuration with ID $ftpConfigId not found.");
return false; // Fail the retry attempt, keep in queue
}
// Attempt upload using the specific config
final result = await _ftpService.uploadFile(config: config, fileToUpload: localFile, remotePath: remotePath);
success = result['success'];
} else {
debugPrint("Retry failed for FTP task $taskId: Source file no longer exists at ${localFile.path}");
await _dbHelper.deleteRequestFromQueue(taskId); // Remove task if file is gone
return false; // Explicitly return false as success is false
}
} else {
debugPrint("Unknown task type '$taskType' for task ID $taskId. Cannot retry. Removing task.");
await _dbHelper.deleteRequestFromQueue(taskId);
}
} on SessionExpiredException catch (e) {
debugPrint("Session expired during retry attempt for task $taskId (Type: $taskType): $e. Task remains in queue.");
success = false; // Session expiry during retry means failure for this attempt
} catch (e, stacktrace) { // Catch potential exceptions during processing
debugPrint("A critical error occurred while retrying task $taskId (Type: $taskType): $e");
debugPrint("Stacktrace: $stacktrace"); // Log stacktrace for detailed debugging
success = false; // Ensure success is false on exception
}
// Post-processing: Remove successful tasks from queue
if (success) {
debugPrint("Task $taskId (Type: $taskType) completed successfully. Removing from queue.");
await _dbHelper.deleteRequestFromQueue(taskId);
// If it was a complex task involving temporary ZIP files, attempt to delete them
if (taskType.endsWith('_submission') && payload['localLogPath'] != null) {
// Assume localLogPath points to the JSON file, get directory for cleanup
String pathToCheck = payload['localLogPath'];
// Check if it's a directory path already (for older marine insitu logs)
bool isDirectory = await Directory(pathToCheck).exists();
if (!isDirectory && pathToCheck.endsWith('.json')) {
pathToCheck = p.dirname(pathToCheck); // Get directory if it's a file path
isDirectory = true; // Now we are checking the directory
}
_cleanUpTemporaryZipFiles(pathToCheck, isDirectory: isDirectory);
}
// If it was an FTP task, attempt to delete the temporary ZIP file
if (taskType == 'ftp' && payload['localFilePath'] != null && (payload['localFilePath'] as String).endsWith('.zip')) {
_cleanUpTemporaryZipFiles(payload['localFilePath'], isDirectory: false);
}
} else {
debugPrint("Retry attempt for task $taskId (Type: $taskType) failed. It will remain in the queue.");
// Optional: Implement a retry limit here. If retries > X, mark task as 'failed' instead of 'pending'.
// e.g., await _dbHelper.updateTaskStatus(taskId, 'failed');
}
return success;
}
/// Helper function to delete temporary zip files after successful retry.
void _cleanUpTemporaryZipFiles(String path, {required bool isDirectory}) async {
try {
if (isDirectory) {
final dir = Directory(path);
if (await dir.exists()) {
final filesInDir = dir.listSync();
for (var entity in filesInDir) {
// Delete only ZIP files within the log directory
if (entity is File && entity.path.endsWith('.zip')) {
debugPrint("Deleting temporary zip file from directory: ${entity.path}");
await entity.delete();
}
}
// Optional: Delete the directory itself if now empty, ONLY if safe.
// Be cautious as data.json might still be needed or other files exist.
// if (await dir.listSync().isEmpty) {
// await dir.delete();
// debugPrint("Deleted empty log directory: ${dir.path}");
// }
} else {
debugPrint("Log directory not found for cleanup: $path");
}
} else {
// If it's a specific file path (like from FTP task)
final file = File(path);
if (await file.exists() && path.endsWith('.zip')) { // Ensure it's a zip file
debugPrint("Deleting temporary zip file: ${file.path}");
await file.delete();
} else if (!path.endsWith('.zip')) {
debugPrint("Skipping cleanup for non-zip file path: $path");
} else {
debugPrint("Temporary zip file not found for cleanup: $path");
}
}
} catch (e) {
debugPrint("Error cleaning up temporary zip file(s) for path $path: $e");
}
}
} // End of RetryService class