865 lines
32 KiB
Dart
865 lines
32 KiB
Dart
// lib/services/local_storage_service.dart
|
|
|
|
import 'dart:io';
|
|
import 'dart:convert';
|
|
import 'package:flutter/foundation.dart';
|
|
import 'package:path_provider/path_provider.dart';
|
|
import 'package:permission_handler/permission_handler.dart';
|
|
import 'package:path/path.dart' as p;
|
|
import 'package:dio/dio.dart';
|
|
import 'package:geolocator/geolocator.dart';
|
|
|
|
import '../models/air_installation_data.dart';
|
|
import '../models/air_collection_data.dart';
|
|
import '../models/tarball_data.dart';
|
|
import '../models/in_situ_sampling_data.dart';
|
|
import '../models/marine_manual_npe_report_data.dart';
|
|
import '../models/river_in_situ_sampling_data.dart';
|
|
import '../models/river_manual_triennial_sampling_data.dart';
|
|
|
|
class LocalStorageService {
|
|
|
|
// =======================================================================
|
|
// Part 1: Public Storage Setup
|
|
// =======================================================================
|
|
|
|
Future<bool> _requestPermissions() async {
|
|
var status = await Permission.manageExternalStorage.request();
|
|
return status.isGranted;
|
|
}
|
|
|
|
Future<Directory?> _getPublicMMSV4Directory({required String serverName}) async {
|
|
if (await _requestPermissions()) {
|
|
final Directory? externalDir = await getExternalStorageDirectory();
|
|
if (externalDir != null) {
|
|
final publicRootPath = externalDir.path.split('/Android/')[0];
|
|
final mmsv4Dir = Directory(p.join(publicRootPath, 'MMSV4', serverName));
|
|
if (!await mmsv4Dir.exists()) {
|
|
await mmsv4Dir.create(recursive: true);
|
|
}
|
|
return mmsv4Dir;
|
|
}
|
|
}
|
|
debugPrint("LocalStorageService: Manage External Storage permission was not granted.");
|
|
return null;
|
|
}
|
|
|
|
Future<Directory?> getLogDirectory({required String serverName, required String module, required String subModule}) async {
|
|
final mmsv4Dir = await _getPublicMMSV4Directory(serverName: serverName);
|
|
if (mmsv4Dir == null) return null;
|
|
final logDir = Directory(p.join(mmsv4Dir.path, module, subModule));
|
|
if (!await logDir.exists()) {
|
|
await logDir.create(recursive: true);
|
|
}
|
|
return logDir;
|
|
}
|
|
|
|
// =======================================================================
|
|
// Part 2: Air Manual Sampling Methods (LOGGING RESTORED)
|
|
// =======================================================================
|
|
|
|
Future<Directory?> _getAirManualBaseDir({required String serverName}) async {
|
|
final mmsv4Dir = await _getPublicMMSV4Directory(serverName: serverName);
|
|
if (mmsv4Dir == null) return null;
|
|
|
|
final airDir = Directory(p.join(mmsv4Dir.path, 'air', 'air_manual_sampling'));
|
|
if (!await airDir.exists()) {
|
|
await airDir.create(recursive: true);
|
|
}
|
|
return airDir;
|
|
}
|
|
|
|
Future<String?> saveAirSamplingRecord(Map<String, dynamic> data, String refID, {required String serverName}) async {
|
|
final baseDir = await _getAirManualBaseDir(serverName: serverName);
|
|
if (baseDir == null) {
|
|
debugPrint("Could not get public storage directory for Air Manual. Check permissions.");
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
final eventDir = Directory(p.join(baseDir.path, refID));
|
|
if (!await eventDir.exists()) {
|
|
await eventDir.create(recursive: true);
|
|
}
|
|
|
|
Future<String?> copyImageToLocal(dynamic imageFile) async {
|
|
if (imageFile is! File) return null;
|
|
try {
|
|
if (p.dirname(imageFile.path) == eventDir.path) {
|
|
return imageFile.path;
|
|
}
|
|
final String fileName = p.basename(imageFile.path);
|
|
final File newFile = await imageFile.copy(p.join(eventDir.path, fileName));
|
|
return newFile.path;
|
|
} catch (e) {
|
|
debugPrint("Error copying file ${imageFile.path}: $e");
|
|
return null;
|
|
}
|
|
}
|
|
|
|
final Map<String, dynamic> serializableData = Map.from(data);
|
|
serializableData['serverConfigName'] = serverName;
|
|
|
|
final installationImageKeys = ['imageFront', 'imageBack', 'imageLeft', 'imageRight', 'optionalImage1', 'optionalImage2', 'optionalImage3', 'optionalImage4'];
|
|
|
|
for (final key in installationImageKeys) {
|
|
if (serializableData.containsKey(key) && serializableData[key] is File) {
|
|
final newPath = await copyImageToLocal(serializableData[key]);
|
|
serializableData['${key}Path'] = newPath;
|
|
}
|
|
}
|
|
|
|
if (serializableData['collectionData'] is Map) {
|
|
final collectionMap = Map<String, dynamic>.from(serializableData['collectionData']);
|
|
final collectionImageKeys = ['imageFront', 'imageBack', 'imageLeft', 'imageRight', 'imageChart', 'imageFilterPaper', 'optionalImage1', 'optionalImage2', 'optionalImage3', 'optionalImage4'];
|
|
|
|
for (final key in collectionImageKeys) {
|
|
if (collectionMap.containsKey(key) && collectionMap[key] is File) {
|
|
final newPath = await copyImageToLocal(collectionMap[key]);
|
|
collectionMap['${key}Path'] = newPath;
|
|
}
|
|
}
|
|
serializableData['collectionData'] = collectionMap;
|
|
}
|
|
|
|
final Map<String, dynamic> finalData = Map.from(serializableData);
|
|
|
|
void cleanMap(Map<String, dynamic> map) {
|
|
map.removeWhere((key, value) => value is File);
|
|
map.forEach((key, value) {
|
|
if (value is Map) cleanMap(value as Map<String, dynamic>);
|
|
});
|
|
}
|
|
|
|
cleanMap(finalData);
|
|
|
|
final jsonFile = File(p.join(eventDir.path, 'data.json'));
|
|
await jsonFile.writeAsString(jsonEncode(finalData));
|
|
debugPrint("Air sampling log and images saved to: ${eventDir.path}");
|
|
|
|
return eventDir.path;
|
|
|
|
} catch (e, s) {
|
|
debugPrint("Error saving air sampling log to local storage: $e");
|
|
debugPrint("Stack trace: $s");
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<List<Map<String, dynamic>>> getAllAirSamplingLogs() async {
|
|
final mmsv4Root = await _getPublicMMSV4Directory(serverName: '');
|
|
if (mmsv4Root == null || !await mmsv4Root.exists()) return [];
|
|
|
|
final List<Map<String, dynamic>> allLogs = [];
|
|
final serverDirs = mmsv4Root.listSync().whereType<Directory>();
|
|
|
|
for (var serverDir in serverDirs) {
|
|
final baseDir = Directory(p.join(serverDir.path, 'air', 'air_manual_sampling'));
|
|
if (!await baseDir.exists()) continue;
|
|
|
|
try {
|
|
final entities = baseDir.listSync();
|
|
for (var entity in entities) {
|
|
if (entity is Directory) {
|
|
final jsonFile = File(p.join(entity.path, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
final content = await jsonFile.readAsString();
|
|
final data = jsonDecode(content) as Map<String, dynamic>;
|
|
data['logDirectory'] = entity.path;
|
|
allLogs.add(data);
|
|
}
|
|
}
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error reading air logs from ${baseDir.path}: $e");
|
|
}
|
|
}
|
|
return allLogs;
|
|
}
|
|
|
|
// =======================================================================
|
|
// Part 3: Tarball Specific Methods (LOGGING RESTORED)
|
|
// =======================================================================
|
|
|
|
Future<Directory?> _getTarballBaseDir({required String serverName}) async {
|
|
final mmsv4Dir = await _getPublicMMSV4Directory(serverName: serverName);
|
|
if (mmsv4Dir == null) return null;
|
|
|
|
final tarballDir = Directory(p.join(mmsv4Dir.path, 'marine', 'marine_tarball_sampling'));
|
|
if (!await tarballDir.exists()) {
|
|
await tarballDir.create(recursive: true);
|
|
}
|
|
return tarballDir;
|
|
}
|
|
|
|
Future<String?> saveTarballSamplingData(TarballSamplingData data, {required String serverName}) async {
|
|
final baseDir = await _getTarballBaseDir(serverName: serverName);
|
|
if (baseDir == null) {
|
|
debugPrint("Could not get public storage directory. Check permissions.");
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
final stationCode = data.selectedStation?['tbl_station_code'] ?? 'UNKNOWN_STATION';
|
|
final timestamp = "${data.samplingDate}_${data.samplingTime?.replaceAll(':', '-')}";
|
|
final eventFolderName = "${stationCode}_$timestamp";
|
|
final eventDir = Directory(p.join(baseDir.path, eventFolderName));
|
|
|
|
if (!await eventDir.exists()) {
|
|
await eventDir.create(recursive: true);
|
|
}
|
|
|
|
final Map<String, dynamic> jsonData = { ...data.toFormData(), 'submissionStatus': data.submissionStatus, 'submissionMessage': data.submissionMessage, 'reportId': data.reportId };
|
|
jsonData['serverConfigName'] = serverName;
|
|
jsonData['selectedStation'] = data.selectedStation;
|
|
|
|
jsonData['selectedClassification'] = data.selectedClassification;
|
|
jsonData['secondSampler'] = data.secondSampler;
|
|
|
|
final imageFiles = data.toImageFiles();
|
|
for (var entry in imageFiles.entries) {
|
|
final File? imageFile = entry.value;
|
|
if (imageFile != null && imageFile.path.isNotEmpty) {
|
|
try {
|
|
if (p.dirname(imageFile.path) == eventDir.path) {
|
|
jsonData[entry.key] = imageFile.path;
|
|
} else {
|
|
final String originalFileName = p.basename(imageFile.path);
|
|
final File newFile = await imageFile.copy(p.join(eventDir.path, originalFileName));
|
|
jsonData[entry.key] = newFile.path;
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error processing image file ${imageFile.path}: $e");
|
|
jsonData[entry.key] = null;
|
|
}
|
|
}
|
|
}
|
|
|
|
final jsonFile = File(p.join(eventDir.path, 'data.json'));
|
|
await jsonFile.writeAsString(jsonEncode(jsonData));
|
|
debugPrint("Tarball log saved to: ${jsonFile.path}");
|
|
|
|
return eventDir.path;
|
|
|
|
} catch (e) {
|
|
debugPrint("Error saving tarball log to local storage: $e");
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<List<Map<String, dynamic>>> getAllTarballLogs() async {
|
|
final mmsv4Root = await _getPublicMMSV4Directory(serverName: '');
|
|
if (mmsv4Root == null || !await mmsv4Root.exists()) return [];
|
|
|
|
final List<Map<String, dynamic>> allLogs = [];
|
|
final serverDirs = mmsv4Root.listSync().whereType<Directory>();
|
|
|
|
for (var serverDir in serverDirs) {
|
|
final baseDir = Directory(p.join(serverDir.path, 'marine', 'marine_tarball_sampling'));
|
|
if (!await baseDir.exists()) continue;
|
|
try {
|
|
final entities = baseDir.listSync();
|
|
for (var entity in entities) {
|
|
if (entity is Directory) {
|
|
final jsonFile = File(p.join(entity.path, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
final content = await jsonFile.readAsString();
|
|
final data = jsonDecode(content) as Map<String, dynamic>;
|
|
data['logDirectory'] = entity.path;
|
|
allLogs.add(data);
|
|
}
|
|
}
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error reading tarball logs from ${baseDir.path}: $e");
|
|
}
|
|
}
|
|
return allLogs;
|
|
}
|
|
|
|
Future<void> updateTarballLog(Map<String, dynamic> updatedLogData) async {
|
|
final logDir = updatedLogData['logDirectory'];
|
|
if (logDir == null) {
|
|
debugPrint("Cannot update log: logDirectory key is missing.");
|
|
return;
|
|
}
|
|
|
|
try {
|
|
final jsonFile = File(p.join(logDir, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
updatedLogData.remove('isResubmitting');
|
|
await jsonFile.writeAsString(jsonEncode(updatedLogData));
|
|
debugPrint("Log updated successfully at: ${jsonFile.path}");
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error updating tarball log: $e");
|
|
}
|
|
}
|
|
|
|
// =======================================================================
|
|
// Part 4: Marine In-Situ Specific Methods (LOGGING RESTORED)
|
|
// =======================================================================
|
|
|
|
Future<Directory?> getInSituBaseDir({required String serverName}) async {
|
|
final mmsv4Dir = await _getPublicMMSV4Directory(serverName: serverName);
|
|
if (mmsv4Dir == null) return null;
|
|
|
|
final inSituDir = Directory(p.join(mmsv4Dir.path, 'marine', 'marine_in_situ_sampling'));
|
|
if (!await inSituDir.exists()) {
|
|
await inSituDir.create(recursive: true);
|
|
}
|
|
return inSituDir;
|
|
}
|
|
|
|
Future<String?> saveInSituSamplingData(InSituSamplingData data, {required String serverName}) async {
|
|
final baseDir = await getInSituBaseDir(serverName: serverName);
|
|
if (baseDir == null) {
|
|
debugPrint("Could not get public storage directory for In-Situ. Check permissions.");
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
final stationCode = data.selectedStation?['man_station_code'] ?? 'UNKNOWN_STATION';
|
|
final timestamp = "${data.samplingDate}_${data.samplingTime?.replaceAll(':', '-')}";
|
|
final eventFolderName = "${stationCode}_$timestamp";
|
|
final eventDir = Directory(p.join(baseDir.path, eventFolderName));
|
|
|
|
if (!await eventDir.exists()) {
|
|
await eventDir.create(recursive: true);
|
|
}
|
|
|
|
final Map<String, dynamic> jsonData = data.toDbJson();
|
|
jsonData['submissionStatus'] = data.submissionStatus;
|
|
jsonData['submissionMessage'] = data.submissionMessage;
|
|
|
|
jsonData['serverConfigName'] = serverName;
|
|
|
|
final imageFiles = data.toApiImageFiles();
|
|
for (var entry in imageFiles.entries) {
|
|
final File? imageFile = entry.value;
|
|
if (imageFile != null && imageFile.path.isNotEmpty) {
|
|
try {
|
|
if (p.dirname(imageFile.path) == eventDir.path) {
|
|
jsonData[entry.key] = imageFile.path;
|
|
} else {
|
|
final String originalFileName = p.basename(imageFile.path);
|
|
final File newFile = await imageFile.copy(p.join(eventDir.path, originalFileName));
|
|
jsonData[entry.key] = newFile.path;
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error processing In-Situ image file ${imageFile.path}: $e");
|
|
jsonData[entry.key] = null;
|
|
}
|
|
}
|
|
}
|
|
|
|
final jsonFile = File(p.join(eventDir.path, 'data.json'));
|
|
await jsonFile.writeAsString(jsonEncode(jsonData));
|
|
debugPrint("In-Situ log saved to: ${jsonFile.path}");
|
|
|
|
return eventDir.path;
|
|
|
|
} catch (e) {
|
|
debugPrint("Error saving In-Situ log to local storage: $e");
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<List<Map<String, dynamic>>> getAllInSituLogs() async {
|
|
final mmsv4Root = await _getPublicMMSV4Directory(serverName: '');
|
|
if (mmsv4Root == null || !await mmsv4Root.exists()) return [];
|
|
|
|
final List<Map<String, dynamic>> allLogs = [];
|
|
final serverDirs = mmsv4Root.listSync().whereType<Directory>();
|
|
|
|
for (var serverDir in serverDirs) {
|
|
final baseDir = Directory(p.join(serverDir.path, 'marine', 'marine_in_situ_sampling'));
|
|
if (!await baseDir.exists()) continue;
|
|
try {
|
|
final entities = baseDir.listSync();
|
|
for (var entity in entities) {
|
|
if (entity is Directory) {
|
|
final jsonFile = File(p.join(entity.path, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
final content = await jsonFile.readAsString();
|
|
final data = jsonDecode(content) as Map<String, dynamic>;
|
|
data['logDirectory'] = entity.path;
|
|
allLogs.add(data);
|
|
}
|
|
}
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error reading in-situ logs from ${baseDir.path}: $e");
|
|
}
|
|
}
|
|
return allLogs;
|
|
}
|
|
|
|
Future<void> updateInSituLog(Map<String, dynamic> updatedLogData) async {
|
|
final logDir = updatedLogData['logDirectory'];
|
|
if (logDir == null) {
|
|
debugPrint("Cannot update log: logDirectory key is missing.");
|
|
return;
|
|
}
|
|
|
|
try {
|
|
final jsonFile = File(p.join(logDir, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
updatedLogData.remove('isResubmitting');
|
|
await jsonFile.writeAsString(jsonEncode(updatedLogData));
|
|
debugPrint("Log updated successfully at: ${jsonFile.path}");
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error updating in-situ log: $e");
|
|
}
|
|
}
|
|
|
|
Future<List<InSituSamplingData>> getRecentNearbySamples({
|
|
required double latitude,
|
|
required double longitude,
|
|
required double radiusKm,
|
|
required int withinHours,
|
|
}) async {
|
|
final allLogs = await getAllInSituLogs();
|
|
final List<InSituSamplingData> recentNearbySamples = [];
|
|
final cutoffDateTime = DateTime.now().subtract(Duration(hours: withinHours));
|
|
final double radiusInMeters = radiusKm * 1000;
|
|
|
|
for (var log in allLogs) {
|
|
try {
|
|
final sampleData = InSituSamplingData.fromJson(log);
|
|
|
|
if (sampleData.samplingDate == null || sampleData.samplingTime == null) {
|
|
continue;
|
|
}
|
|
final sampleDateTime = DateTime.tryParse('${sampleData.samplingDate} ${sampleData.samplingTime}');
|
|
if (sampleDateTime == null || sampleDateTime.isBefore(cutoffDateTime)) {
|
|
continue;
|
|
}
|
|
|
|
final sampleLat = double.tryParse(sampleData.currentLatitude ?? '');
|
|
final sampleLon = double.tryParse(sampleData.currentLongitude ?? '');
|
|
if (sampleLat == null || sampleLon == null) {
|
|
continue;
|
|
}
|
|
|
|
final distanceInMeters = Geolocator.distanceBetween(
|
|
latitude,
|
|
longitude,
|
|
sampleLat,
|
|
sampleLon,
|
|
);
|
|
|
|
if (distanceInMeters <= radiusInMeters) {
|
|
recentNearbySamples.add(sampleData);
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error processing in-situ log for nearby search: $e");
|
|
}
|
|
}
|
|
recentNearbySamples.sort((a, b) {
|
|
final dtA = DateTime.tryParse('${a.samplingDate} ${a.samplingTime}');
|
|
final dtB = DateTime.tryParse('${b.samplingDate} ${b.samplingTime}');
|
|
if (dtA == null || dtB == null) return 0;
|
|
return dtB.compareTo(dtA);
|
|
});
|
|
return recentNearbySamples;
|
|
}
|
|
|
|
// --- ADDED: Part 4.5: Marine NPE Report Specific Methods ---
|
|
|
|
Future<Directory?> _getNpeBaseDir({required String serverName}) async {
|
|
final mmsv4Dir = await _getPublicMMSV4Directory(serverName: serverName);
|
|
if (mmsv4Dir == null) return null;
|
|
|
|
final npeDir = Directory(p.join(mmsv4Dir.path, 'marine', 'marine_npe_report'));
|
|
if (!await npeDir.exists()) {
|
|
await npeDir.create(recursive: true);
|
|
}
|
|
return npeDir;
|
|
}
|
|
|
|
Future<String?> saveNpeReportData(MarineManualNpeReportData data, {required String serverName}) async {
|
|
final baseDir = await _getNpeBaseDir(serverName: serverName);
|
|
if (baseDir == null) {
|
|
debugPrint("Could not get public storage directory for NPE. Check permissions.");
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
final stationCode = data.selectedStation?['man_station_code'] ?? data.selectedStation?['tbl_station_code'] ?? 'CUSTOM_LOC';
|
|
final timestamp = "${data.eventDate}_${data.eventTime?.replaceAll(':', '-')}";
|
|
final eventFolderName = "${stationCode}_${timestamp}_NPE";
|
|
final eventDir = Directory(p.join(baseDir.path, eventFolderName));
|
|
|
|
if (!await eventDir.exists()) {
|
|
await eventDir.create(recursive: true);
|
|
}
|
|
|
|
final Map<String, dynamic> jsonData = data.toDbJson();
|
|
jsonData['serverConfigName'] = serverName;
|
|
|
|
final imageFiles = data.toApiImageFiles();
|
|
for (var entry in imageFiles.entries) {
|
|
final File? imageFile = entry.value;
|
|
if (imageFile != null && imageFile.path.isNotEmpty) {
|
|
try {
|
|
final String originalFileName = p.basename(imageFile.path);
|
|
final File newFile = await imageFile.copy(p.join(eventDir.path, originalFileName));
|
|
jsonData[entry.key] = newFile.path;
|
|
} catch (e) {
|
|
debugPrint("Error processing NPE image file ${imageFile.path}: $e");
|
|
}
|
|
}
|
|
}
|
|
|
|
final jsonFile = File(p.join(eventDir.path, 'data.json'));
|
|
await jsonFile.writeAsString(jsonEncode(jsonData));
|
|
debugPrint("NPE Report log saved to: ${jsonFile.path}");
|
|
|
|
return eventDir.path;
|
|
|
|
} catch (e) {
|
|
debugPrint("Error saving NPE report to local storage: $e");
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<List<Map<String, dynamic>>> getAllNpeLogs() async {
|
|
final mmsv4Root = await _getPublicMMSV4Directory(serverName: '');
|
|
if (mmsv4Root == null || !await mmsv4Root.exists()) return [];
|
|
|
|
final List<Map<String, dynamic>> allLogs = [];
|
|
final serverDirs = mmsv4Root.listSync().whereType<Directory>();
|
|
|
|
for (var serverDir in serverDirs) {
|
|
final baseDir = Directory(p.join(serverDir.path, 'marine', 'marine_npe_report'));
|
|
if (!await baseDir.exists()) continue;
|
|
try {
|
|
final entities = baseDir.listSync();
|
|
for (var entity in entities) {
|
|
if (entity is Directory) {
|
|
final jsonFile = File(p.join(entity.path, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
final content = await jsonFile.readAsString();
|
|
final data = jsonDecode(content) as Map<String, dynamic>;
|
|
data['logDirectory'] = entity.path;
|
|
allLogs.add(data);
|
|
}
|
|
}
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error reading NPE logs from ${baseDir.path}: $e");
|
|
}
|
|
}
|
|
return allLogs;
|
|
}
|
|
|
|
Future<void> updateNpeLog(Map<String, dynamic> updatedLogData) async {
|
|
final logDir = updatedLogData['logDirectory'];
|
|
if (logDir == null) return;
|
|
|
|
try {
|
|
final jsonFile = File(p.join(logDir, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
updatedLogData.remove('isResubmitting');
|
|
await jsonFile.writeAsString(jsonEncode(updatedLogData));
|
|
debugPrint("NPE Log updated successfully at: ${jsonFile.path}");
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error updating NPE log: $e");
|
|
}
|
|
}
|
|
|
|
// =======================================================================
|
|
// Part 5: River In-Situ Specific Methods (LOGGING RESTORED)
|
|
// =======================================================================
|
|
|
|
Future<Directory?> getRiverInSituBaseDir(String? samplingType, {required String serverName}) async {
|
|
final mmsv4Dir = await _getPublicMMSV4Directory(serverName: serverName);
|
|
if (mmsv4Dir == null) return null;
|
|
|
|
String subfolderName;
|
|
if (samplingType == 'Schedule' || samplingType == 'Triennial') {
|
|
subfolderName = samplingType!;
|
|
} else {
|
|
subfolderName = 'Others';
|
|
}
|
|
|
|
final inSituDir = Directory(p.join(mmsv4Dir.path, 'river', 'river_in_situ_sampling', subfolderName));
|
|
if (!await inSituDir.exists()) {
|
|
await inSituDir.create(recursive: true);
|
|
}
|
|
return inSituDir;
|
|
}
|
|
|
|
Future<String?> saveRiverInSituSamplingData(RiverInSituSamplingData data, {required String serverName}) async {
|
|
final baseDir = await getRiverInSituBaseDir(data.samplingType, serverName: serverName);
|
|
if (baseDir == null) {
|
|
debugPrint("Could not get public storage directory for River In-Situ. Check permissions.");
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
final stationCode = data.selectedStation?['sampling_station_code'] ?? 'UNKNOWN_STATION';
|
|
final timestamp = "${data.samplingDate}_${data.samplingTime?.replaceAll(':', '-')}";
|
|
final eventFolderName = "${stationCode}_$timestamp";
|
|
final eventDir = Directory(p.join(baseDir.path, eventFolderName));
|
|
|
|
if (!await eventDir.exists()) {
|
|
await eventDir.create(recursive: true);
|
|
}
|
|
|
|
final Map<String, dynamic> jsonData = data.toMap();
|
|
jsonData['serverConfigName'] = serverName;
|
|
|
|
final imageFiles = data.toApiImageFiles();
|
|
for (var entry in imageFiles.entries) {
|
|
final File? imageFile = entry.value;
|
|
if (imageFile != null) {
|
|
final String originalFileName = p.basename(imageFile.path);
|
|
if (p.dirname(imageFile.path) == eventDir.path) {
|
|
jsonData[entry.key] = imageFile.path;
|
|
} else {
|
|
final File newFile = await imageFile.copy(p.join(eventDir.path, originalFileName));
|
|
jsonData[entry.key] = newFile.path;
|
|
}
|
|
}
|
|
}
|
|
|
|
final jsonFile = File(p.join(eventDir.path, 'data.json'));
|
|
await jsonFile.writeAsString(jsonEncode(jsonData));
|
|
debugPrint("River In-Situ log saved to: ${jsonFile.path}");
|
|
|
|
return eventDir.path;
|
|
|
|
} catch (e) {
|
|
debugPrint("Error saving River In-Situ log to local storage: $e");
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<List<Map<String, dynamic>>> getAllRiverInSituLogs() async {
|
|
final mmsv4Root = await _getPublicMMSV4Directory(serverName: '');
|
|
if (mmsv4Root == null || !await mmsv4Root.exists()) return [];
|
|
|
|
final List<Map<String, dynamic>> allLogs = [];
|
|
final serverDirs = mmsv4Root.listSync().whereType<Directory>();
|
|
|
|
for (var serverDir in serverDirs) {
|
|
final topLevelDir = Directory(p.join(serverDir.path, 'river', 'river_in_situ_sampling'));
|
|
if (!await topLevelDir.exists()) continue;
|
|
try {
|
|
final typeSubfolders = topLevelDir.listSync();
|
|
for (var typeSubfolder in typeSubfolders) {
|
|
if (typeSubfolder is Directory) {
|
|
final eventFolders = typeSubfolder.listSync();
|
|
for (var eventFolder in eventFolders) {
|
|
if (eventFolder is Directory) {
|
|
final jsonFile = File(p.join(eventFolder.path, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
final content = await jsonFile.readAsString();
|
|
final data = jsonDecode(content) as Map<String, dynamic>;
|
|
data['logDirectory'] = eventFolder.path;
|
|
allLogs.add(data);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error getting all river in-situ logs from ${topLevelDir.path}: $e");
|
|
}
|
|
}
|
|
return allLogs;
|
|
}
|
|
|
|
Future<void> updateRiverInSituLog(Map<String, dynamic> updatedLogData) async {
|
|
final logDir = updatedLogData['logDirectory'];
|
|
if (logDir == null) {
|
|
debugPrint("Cannot update log: logDirectory key is missing.");
|
|
return;
|
|
}
|
|
|
|
try {
|
|
final jsonFile = File(p.join(logDir, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
updatedLogData.remove('isResubmitting');
|
|
await jsonFile.writeAsString(jsonEncode(updatedLogData));
|
|
debugPrint("Log updated successfully at: ${jsonFile.path}");
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error updating river in-situ log: $e");
|
|
}
|
|
}
|
|
|
|
// =======================================================================
|
|
// Part 6: River Triennial Specific Methods
|
|
// =======================================================================
|
|
|
|
Future<Directory?> _getRiverTriennialBaseDir({required String serverName}) async {
|
|
final mmsv4Dir = await _getPublicMMSV4Directory(serverName: serverName);
|
|
if (mmsv4Dir == null) return null;
|
|
|
|
final triennialDir = Directory(p.join(mmsv4Dir.path, 'river', 'river_triennial_sampling'));
|
|
if (!await triennialDir.exists()) {
|
|
await triennialDir.create(recursive: true);
|
|
}
|
|
return triennialDir;
|
|
}
|
|
|
|
Future<String?> saveRiverManualTriennialSamplingData(RiverManualTriennialSamplingData data, {required String serverName}) async {
|
|
final baseDir = await _getRiverTriennialBaseDir(serverName: serverName);
|
|
if (baseDir == null) {
|
|
debugPrint("Could not get public storage directory for River Triennial. Check permissions.");
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
final stationCode = data.selectedStation?['sampling_station_code'] ?? 'UNKNOWN_STATION';
|
|
final timestamp = "${data.samplingDate}_${data.samplingTime?.replaceAll(':', '-')}";
|
|
final eventFolderName = "${stationCode}_$timestamp";
|
|
final eventDir = Directory(p.join(baseDir.path, eventFolderName));
|
|
|
|
if (!await eventDir.exists()) {
|
|
await eventDir.create(recursive: true);
|
|
}
|
|
|
|
final Map<String, dynamic> jsonData = data.toMap();
|
|
jsonData['serverConfigName'] = serverName;
|
|
|
|
final imageFiles = data.toApiImageFiles();
|
|
for (var entry in imageFiles.entries) {
|
|
final File? imageFile = entry.value;
|
|
if (imageFile != null) {
|
|
final String originalFileName = p.basename(imageFile.path);
|
|
if (p.dirname(imageFile.path) == eventDir.path) {
|
|
jsonData[entry.key] = imageFile.path;
|
|
} else {
|
|
final File newFile = await imageFile.copy(p.join(eventDir.path, originalFileName));
|
|
jsonData[entry.key] = newFile.path;
|
|
}
|
|
}
|
|
}
|
|
|
|
final jsonFile = File(p.join(eventDir.path, 'data.json'));
|
|
await jsonFile.writeAsString(jsonEncode(jsonData));
|
|
debugPrint("River Triennial log saved to: ${jsonFile.path}");
|
|
|
|
return eventDir.path;
|
|
|
|
} catch (e) {
|
|
debugPrint("Error saving River Triennial log to local storage: $e");
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<List<Map<String, dynamic>>> getAllRiverManualTriennialLogs() async {
|
|
final mmsv4Root = await _getPublicMMSV4Directory(serverName: '');
|
|
if (mmsv4Root == null || !await mmsv4Root.exists()) return [];
|
|
|
|
final List<Map<String, dynamic>> allLogs = [];
|
|
final serverDirs = mmsv4Root.listSync().whereType<Directory>();
|
|
|
|
for (var serverDir in serverDirs) {
|
|
final baseDir = Directory(p.join(serverDir.path, 'river', 'river_triennial_sampling'));
|
|
if (!await baseDir.exists()) continue;
|
|
try {
|
|
final entities = baseDir.listSync();
|
|
for (var entity in entities) {
|
|
if (entity is Directory) {
|
|
final jsonFile = File(p.join(entity.path, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
final content = await jsonFile.readAsString();
|
|
final data = jsonDecode(content) as Map<String, dynamic>;
|
|
data['logDirectory'] = entity.path;
|
|
allLogs.add(data);
|
|
}
|
|
}
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error reading triennial logs from ${baseDir.path}: $e");
|
|
}
|
|
}
|
|
return allLogs;
|
|
}
|
|
|
|
Future<void> updateRiverManualTriennialLog(Map<String, dynamic> updatedLogData) async {
|
|
final logDir = updatedLogData['logDirectory'];
|
|
if (logDir == null) {
|
|
debugPrint("Cannot update log: logDirectory key is missing.");
|
|
return;
|
|
}
|
|
|
|
try {
|
|
final jsonFile = File(p.join(logDir, 'data.json'));
|
|
if (await jsonFile.exists()) {
|
|
updatedLogData.remove('isResubmitting');
|
|
await jsonFile.writeAsString(jsonEncode(updatedLogData));
|
|
debugPrint("Log updated successfully at: ${jsonFile.path}");
|
|
}
|
|
} catch (e) {
|
|
debugPrint("Error updating river triennial log: $e");
|
|
}
|
|
}
|
|
|
|
|
|
// =======================================================================
|
|
// --- ADDED: Part 7: Info Centre Document Management ---
|
|
// =======================================================================
|
|
|
|
final Dio _dio = Dio();
|
|
|
|
Future<Directory?> _getInfoCentreDocumentsDirectory() async {
|
|
final mmsv4Dir = await _getPublicMMSV4Directory(serverName: '');
|
|
if (mmsv4Dir == null) return null;
|
|
|
|
final docDir = Directory(p.join(mmsv4Dir.path, 'info_centre_documents'));
|
|
if (!await docDir.exists()) {
|
|
await docDir.create(recursive: true);
|
|
}
|
|
return docDir;
|
|
}
|
|
|
|
Future<String?> getLocalDocumentPath(String docUrl) async {
|
|
final docDir = await _getInfoCentreDocumentsDirectory();
|
|
if (docDir == null) return null;
|
|
|
|
final fileName = p.basename(docUrl);
|
|
return p.join(docDir.path, fileName);
|
|
}
|
|
|
|
Future<bool> isDocumentDownloaded(String docUrl) async {
|
|
final filePath = await getLocalDocumentPath(docUrl);
|
|
if (filePath == null) return false;
|
|
return await File(filePath).exists();
|
|
}
|
|
|
|
Future<void> downloadDocument({
|
|
required String docUrl,
|
|
required Function(double) onReceiveProgress,
|
|
}) async {
|
|
final filePath = await getLocalDocumentPath(docUrl);
|
|
if (filePath == null) {
|
|
throw Exception("Could not get local storage path. Check permissions.");
|
|
}
|
|
|
|
try {
|
|
await _dio.download(
|
|
docUrl,
|
|
filePath,
|
|
onReceiveProgress: (received, total) {
|
|
if (total != -1) {
|
|
onReceiveProgress(received / total);
|
|
}
|
|
},
|
|
);
|
|
} catch (e) {
|
|
final file = File(filePath);
|
|
if (await file.exists()) {
|
|
await file.delete();
|
|
}
|
|
throw Exception("Download failed: $e");
|
|
}
|
|
}
|
|
} |