2021-04-10 06:28:12 +02:00
|
|
|
import 'dart:convert';
|
|
|
|
import 'dart:typed_data';
|
|
|
|
|
2021-04-15 20:44:25 +02:00
|
|
|
import 'package:equatable/equatable.dart';
|
2021-04-10 06:28:12 +02:00
|
|
|
import 'package:flutter/foundation.dart';
|
|
|
|
import 'package:idb_sqflite/idb_sqflite.dart';
|
|
|
|
import 'package:logging/logging.dart';
|
|
|
|
import 'package:nc_photos/account.dart';
|
|
|
|
import 'package:nc_photos/api/api.dart';
|
|
|
|
import 'package:nc_photos/app_db.dart';
|
|
|
|
import 'package:nc_photos/entity/exif.dart';
|
|
|
|
import 'package:nc_photos/entity/webdav_response_parser.dart';
|
|
|
|
import 'package:nc_photos/exception.dart';
|
2021-04-26 12:59:08 +02:00
|
|
|
import 'package:nc_photos/int_util.dart' as int_util;
|
2021-04-10 06:28:12 +02:00
|
|
|
import 'package:nc_photos/iterable_extension.dart';
|
|
|
|
import 'package:nc_photos/string_extension.dart';
|
|
|
|
import 'package:path/path.dart' as path;
|
2021-04-26 12:59:08 +02:00
|
|
|
import 'package:quiver/iterables.dart';
|
2021-04-10 06:28:12 +02:00
|
|
|
import 'package:xml/xml.dart';
|
|
|
|
|
|
|
|
int compareFileDateTimeDescending(File x, File y) {
|
|
|
|
final xDate = x.metadata?.exif?.dateTimeOriginal ?? x.lastModified;
|
|
|
|
final yDate = y.metadata?.exif?.dateTimeOriginal ?? y.lastModified;
|
|
|
|
final tmp = yDate.compareTo(xDate);
|
|
|
|
if (tmp != 0) {
|
|
|
|
return tmp;
|
|
|
|
} else {
|
|
|
|
// compare file name if files are modified at the same time
|
|
|
|
return x.path.compareTo(y.path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Immutable object that hold metadata of a [File]
|
2021-04-15 20:44:25 +02:00
|
|
|
class Metadata with EquatableMixin {
|
2021-04-10 06:28:12 +02:00
|
|
|
Metadata({
|
|
|
|
DateTime lastUpdated,
|
|
|
|
this.fileEtag,
|
|
|
|
this.imageWidth,
|
|
|
|
this.imageHeight,
|
|
|
|
this.exif,
|
|
|
|
}) : this.lastUpdated = (lastUpdated ?? DateTime.now()).toUtc();
|
|
|
|
|
|
|
|
/// Parse Metadata from [json]
|
|
|
|
///
|
|
|
|
/// If the version saved in json does not match the active one, the
|
|
|
|
/// corresponding upgrader will be called one by one to upgrade the json,
|
|
|
|
/// version by version until it reached the active version. If any upgrader
|
|
|
|
/// in the chain is null, the upgrade process will fail
|
|
|
|
factory Metadata.fromJson(
|
|
|
|
Map<String, dynamic> json, {
|
|
|
|
MetadataUpgraderV1 upgraderV1,
|
2021-04-15 00:57:23 +02:00
|
|
|
MetadataUpgraderV2 upgraderV2,
|
2021-04-10 06:28:12 +02:00
|
|
|
}) {
|
|
|
|
final jsonVersion = json["version"];
|
|
|
|
if (jsonVersion < 2) {
|
|
|
|
json = upgraderV1?.call(json);
|
|
|
|
if (json == null) {
|
|
|
|
_log.info("[fromJson] Version $jsonVersion not compatible");
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
2021-04-15 00:57:23 +02:00
|
|
|
if (jsonVersion < 3) {
|
|
|
|
json = upgraderV2?.call(json);
|
|
|
|
if (json == null) {
|
|
|
|
_log.info("[fromJson] Version $jsonVersion not compatible");
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
2021-04-10 06:28:12 +02:00
|
|
|
return Metadata(
|
|
|
|
lastUpdated: json["lastUpdated"] == null
|
|
|
|
? null
|
|
|
|
: DateTime.parse(json["lastUpdated"]),
|
|
|
|
fileEtag: json["fileEtag"],
|
|
|
|
imageWidth: json["imageWidth"],
|
|
|
|
imageHeight: json["imageHeight"],
|
|
|
|
exif: json["exif"] == null
|
|
|
|
? null
|
|
|
|
: Exif.fromJson(json["exif"].cast<String, dynamic>()),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
Map<String, dynamic> toJson() {
|
|
|
|
return {
|
|
|
|
"version": version,
|
|
|
|
"lastUpdated": lastUpdated.toIso8601String(),
|
|
|
|
if (fileEtag != null) "fileEtag": fileEtag,
|
|
|
|
if (imageWidth != null) "imageWidth": imageWidth,
|
|
|
|
if (imageHeight != null) "imageHeight": imageHeight,
|
|
|
|
if (exif != null) "exif": exif.toJson(),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
toString() {
|
|
|
|
var product = "$runtimeType {"
|
|
|
|
"lastUpdated: $lastUpdated, ";
|
|
|
|
if (fileEtag != null) {
|
|
|
|
product += "fileEtag: $fileEtag, ";
|
|
|
|
}
|
|
|
|
if (imageWidth != null) {
|
|
|
|
product += "imageWidth: $imageWidth, ";
|
|
|
|
}
|
|
|
|
if (imageHeight != null) {
|
|
|
|
product += "imageHeight: $imageHeight, ";
|
|
|
|
}
|
|
|
|
if (exif != null) {
|
|
|
|
product += "exif: $exif, ";
|
|
|
|
}
|
|
|
|
return product + "}";
|
|
|
|
}
|
|
|
|
|
2021-04-15 20:44:25 +02:00
|
|
|
@override
|
|
|
|
get props => [
|
|
|
|
lastUpdated,
|
|
|
|
fileEtag,
|
|
|
|
imageWidth,
|
|
|
|
imageHeight,
|
|
|
|
exif,
|
|
|
|
];
|
|
|
|
|
2021-04-10 06:28:12 +02:00
|
|
|
final DateTime lastUpdated;
|
|
|
|
|
|
|
|
/// Etag of the parent file when the metadata is saved
|
|
|
|
final String fileEtag;
|
|
|
|
final int imageWidth;
|
|
|
|
final int imageHeight;
|
|
|
|
final Exif exif;
|
|
|
|
|
|
|
|
/// versioning of this class, use to upgrade old persisted metadata
|
2021-04-15 00:57:23 +02:00
|
|
|
static const version = 3;
|
2021-04-10 06:28:12 +02:00
|
|
|
|
|
|
|
static final _log = Logger("entity.file.Metadata");
|
|
|
|
}
|
|
|
|
|
|
|
|
abstract class MetadataUpgrader {
|
|
|
|
Map<String, dynamic> call(Map<String, dynamic> json);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Upgrade v1 Metadata to v2
|
|
|
|
class MetadataUpgraderV1 implements MetadataUpgrader {
|
|
|
|
MetadataUpgraderV1({
|
|
|
|
@required this.fileContentType,
|
2021-04-15 00:57:23 +02:00
|
|
|
this.logFilePath,
|
2021-04-10 06:28:12 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
Map<String, dynamic> call(Map<String, dynamic> json) {
|
|
|
|
if (fileContentType == "image/webp") {
|
|
|
|
// Version 1 metadata for webp is bugged, drop it
|
2021-04-15 00:57:23 +02:00
|
|
|
_log.fine("[call] Upgrade v1 metadata for file: $logFilePath");
|
2021-04-10 06:28:12 +02:00
|
|
|
return null;
|
|
|
|
} else {
|
|
|
|
return json;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
final String fileContentType;
|
2021-04-15 00:57:23 +02:00
|
|
|
|
|
|
|
/// File path for logging only
|
|
|
|
final String logFilePath;
|
|
|
|
|
|
|
|
static final _log = Logger("entity.file.MetadataUpgraderV1");
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Upgrade v2 Metadata to v3
|
|
|
|
class MetadataUpgraderV2 implements MetadataUpgrader {
|
|
|
|
MetadataUpgraderV2({
|
|
|
|
@required this.fileContentType,
|
|
|
|
this.logFilePath,
|
|
|
|
});
|
|
|
|
|
|
|
|
Map<String, dynamic> call(Map<String, dynamic> json) {
|
|
|
|
if (fileContentType == "image/jpeg") {
|
|
|
|
// Version 2 metadata for jpeg doesn't consider orientation
|
|
|
|
if (json["exif"] != null && json["exif"].containsKey("Orientation")) {
|
|
|
|
// Check orientation
|
|
|
|
final orientation = json["exif"]["Orientation"];
|
|
|
|
if (orientation >= 5 && orientation <= 8) {
|
|
|
|
_log.fine("[call] Upgrade v2 metadata for file: $logFilePath");
|
|
|
|
final temp = json["imageWidth"];
|
|
|
|
json["imageWidth"] = json["imageHeight"];
|
|
|
|
json["imageHeight"] = temp;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return json;
|
|
|
|
}
|
|
|
|
|
|
|
|
final String fileContentType;
|
|
|
|
|
|
|
|
/// File path for logging only
|
|
|
|
final String logFilePath;
|
|
|
|
|
|
|
|
static final _log = Logger("entity.file.MetadataUpgraderV2");
|
2021-04-10 06:28:12 +02:00
|
|
|
}
|
|
|
|
|
2021-04-15 20:44:25 +02:00
|
|
|
class File with EquatableMixin {
|
2021-04-10 06:28:12 +02:00
|
|
|
File({
|
|
|
|
@required String path,
|
|
|
|
this.contentLength,
|
|
|
|
this.contentType,
|
|
|
|
this.etag,
|
|
|
|
this.lastModified,
|
|
|
|
this.isCollection,
|
|
|
|
this.usedBytes,
|
|
|
|
this.hasPreview,
|
|
|
|
this.metadata,
|
|
|
|
}) : this.path = path.trimRightAny("/");
|
|
|
|
|
|
|
|
factory File.fromJson(Map<String, dynamic> json) {
|
|
|
|
return File(
|
|
|
|
path: json["path"],
|
|
|
|
contentLength: json["contentLength"],
|
|
|
|
contentType: json["contentType"],
|
|
|
|
etag: json["etag"],
|
|
|
|
lastModified: json["lastModified"] == null
|
|
|
|
? null
|
|
|
|
: DateTime.parse(json["lastModified"]),
|
|
|
|
isCollection: json["isCollection"],
|
|
|
|
usedBytes: json["usedBytes"],
|
|
|
|
hasPreview: json["hasPreview"],
|
|
|
|
metadata: json["metadata"] == null
|
|
|
|
? null
|
|
|
|
: Metadata.fromJson(
|
|
|
|
json["metadata"].cast<String, dynamic>(),
|
|
|
|
upgraderV1: MetadataUpgraderV1(
|
|
|
|
fileContentType: json["contentType"],
|
2021-04-15 00:57:23 +02:00
|
|
|
logFilePath: json["path"],
|
|
|
|
),
|
|
|
|
upgraderV2: MetadataUpgraderV2(
|
|
|
|
fileContentType: json["contentType"],
|
|
|
|
logFilePath: json["path"],
|
2021-04-10 06:28:12 +02:00
|
|
|
),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
toString() {
|
|
|
|
var product = "$runtimeType {"
|
|
|
|
"path: '$path', ";
|
|
|
|
if (contentLength != null) {
|
|
|
|
product += "contentLength: $contentLength, ";
|
|
|
|
}
|
|
|
|
if (contentType != null) {
|
|
|
|
product += "contentType: '$contentType', ";
|
|
|
|
}
|
|
|
|
if (etag != null) {
|
|
|
|
product += "etag: '$etag', ";
|
|
|
|
}
|
|
|
|
if (lastModified != null) {
|
|
|
|
product += "lastModified: $lastModified, ";
|
|
|
|
}
|
|
|
|
if (isCollection != null) {
|
|
|
|
product += "isCollection: $isCollection, ";
|
|
|
|
}
|
|
|
|
if (usedBytes != null) {
|
|
|
|
product += "usedBytes: $usedBytes, ";
|
|
|
|
}
|
|
|
|
if (hasPreview != null) {
|
|
|
|
product += "hasPreview: $hasPreview, ";
|
|
|
|
}
|
|
|
|
if (metadata != null) {
|
|
|
|
product += "metadata: $metadata, ";
|
|
|
|
}
|
|
|
|
return product + "}";
|
|
|
|
}
|
|
|
|
|
|
|
|
Map<String, dynamic> toJson() {
|
|
|
|
return {
|
|
|
|
"path": path,
|
|
|
|
if (contentLength != null) "contentLength": contentLength,
|
|
|
|
if (contentType != null) "contentType": contentType,
|
|
|
|
if (etag != null) "etag": etag,
|
|
|
|
if (lastModified != null) "lastModified": lastModified.toIso8601String(),
|
|
|
|
if (isCollection != null) "isCollection": isCollection,
|
|
|
|
if (usedBytes != null) "usedBytes": usedBytes,
|
|
|
|
if (hasPreview != null) "hasPreview": hasPreview,
|
|
|
|
if (metadata != null) "metadata": metadata.toJson(),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
File copyWith({
|
|
|
|
String path,
|
|
|
|
int contentLength,
|
|
|
|
String contentType,
|
|
|
|
String etag,
|
|
|
|
DateTime lastModified,
|
|
|
|
bool isCollection,
|
|
|
|
int usedBytes,
|
|
|
|
bool hasPreview,
|
|
|
|
Metadata metadata,
|
|
|
|
}) {
|
|
|
|
return File(
|
|
|
|
path: path ?? this.path,
|
|
|
|
contentLength: contentLength ?? this.contentLength,
|
|
|
|
contentType: contentType ?? this.contentType,
|
|
|
|
etag: etag ?? this.etag,
|
|
|
|
lastModified: lastModified ?? this.lastModified,
|
|
|
|
isCollection: isCollection ?? this.isCollection,
|
|
|
|
usedBytes: usedBytes ?? this.usedBytes,
|
|
|
|
hasPreview: hasPreview ?? this.hasPreview,
|
|
|
|
metadata: metadata ?? this.metadata,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
File withoutMetadata() {
|
|
|
|
return File(
|
|
|
|
path: path,
|
|
|
|
contentLength: contentLength,
|
|
|
|
contentType: contentType,
|
|
|
|
etag: etag,
|
|
|
|
lastModified: lastModified,
|
|
|
|
isCollection: isCollection,
|
|
|
|
usedBytes: usedBytes,
|
|
|
|
hasPreview: hasPreview,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Return the path of this file with the DAV part stripped
|
|
|
|
String get strippedPath {
|
|
|
|
// WebDAV path: remote.php/dav/files/{username}/{path}
|
|
|
|
if (path.contains("remote.php/dav/files")) {
|
|
|
|
return path
|
|
|
|
.substring(path.indexOf("/", "remote.php/dav/files/".length) + 1);
|
|
|
|
} else {
|
|
|
|
return path;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-15 20:44:25 +02:00
|
|
|
@override
|
|
|
|
get props => [
|
|
|
|
path,
|
|
|
|
contentLength,
|
|
|
|
contentType,
|
|
|
|
etag,
|
|
|
|
lastModified,
|
|
|
|
isCollection,
|
|
|
|
usedBytes,
|
|
|
|
hasPreview,
|
|
|
|
metadata,
|
|
|
|
];
|
|
|
|
|
2021-04-10 06:28:12 +02:00
|
|
|
final String path;
|
|
|
|
final int contentLength;
|
|
|
|
final String contentType;
|
|
|
|
final String etag;
|
|
|
|
final DateTime lastModified;
|
|
|
|
final bool isCollection;
|
|
|
|
final int usedBytes;
|
|
|
|
final bool hasPreview;
|
|
|
|
// metadata
|
|
|
|
final Metadata metadata;
|
|
|
|
}
|
|
|
|
|
|
|
|
class FileRepo {
|
|
|
|
FileRepo(this.dataSrc);
|
|
|
|
|
|
|
|
/// See [FileDataSource.list]
|
|
|
|
Future<List<File>> list(Account account, File root) =>
|
|
|
|
this.dataSrc.list(account, root);
|
|
|
|
|
|
|
|
/// See [FileDataSource.remove]
|
|
|
|
Future<void> remove(Account account, File file) =>
|
|
|
|
this.dataSrc.remove(account, file);
|
|
|
|
|
|
|
|
/// See [FileDataSource.getBinary]
|
|
|
|
Future<Uint8List> getBinary(Account account, File file) =>
|
|
|
|
this.dataSrc.getBinary(account, file);
|
|
|
|
|
|
|
|
/// See [FileDataSource.putBinary]
|
|
|
|
Future<void> putBinary(Account account, String path, Uint8List content) =>
|
|
|
|
this.dataSrc.putBinary(account, path, content);
|
|
|
|
|
|
|
|
/// See [FileDataSource.updateMetadata]
|
|
|
|
Future<void> updateMetadata(Account account, File file, Metadata metadata) =>
|
|
|
|
this.dataSrc.updateMetadata(account, file, metadata);
|
|
|
|
|
|
|
|
final FileDataSource dataSrc;
|
|
|
|
}
|
|
|
|
|
|
|
|
abstract class FileDataSource {
|
|
|
|
/// List all files under [f]
|
|
|
|
Future<List<File>> list(Account account, File f);
|
|
|
|
|
|
|
|
/// Remove file
|
|
|
|
Future<void> remove(Account account, File f);
|
|
|
|
|
|
|
|
/// Read file as binary array
|
|
|
|
Future<Uint8List> getBinary(Account account, File f);
|
|
|
|
|
|
|
|
/// Upload content to [path]
|
|
|
|
Future<void> putBinary(Account account, String path, Uint8List content);
|
|
|
|
|
|
|
|
/// Update metadata for a file
|
|
|
|
///
|
|
|
|
/// This will completely replace the metadata of the file [f]. Partial update
|
|
|
|
/// is not supported
|
|
|
|
Future<void> updateMetadata(Account account, File f, Metadata metadata);
|
|
|
|
}
|
|
|
|
|
|
|
|
class FileWebdavDataSource implements FileDataSource {
|
|
|
|
@override
|
2021-04-24 17:01:23 +02:00
|
|
|
list(
|
|
|
|
Account account,
|
|
|
|
File f, {
|
|
|
|
int depth,
|
|
|
|
}) async {
|
2021-04-10 06:28:12 +02:00
|
|
|
_log.fine("[list] ${f.path}");
|
|
|
|
final response = await Api(account).files().propfind(
|
|
|
|
path: f.path,
|
2021-04-24 17:01:23 +02:00
|
|
|
depth: depth,
|
2021-04-10 06:28:12 +02:00
|
|
|
getlastmodified: 1,
|
|
|
|
resourcetype: 1,
|
|
|
|
getetag: 1,
|
|
|
|
getcontenttype: 1,
|
|
|
|
getcontentlength: 1,
|
|
|
|
hasPreview: 1,
|
|
|
|
customNamespaces: {
|
|
|
|
"com.nkming.nc_photos": "app",
|
|
|
|
},
|
|
|
|
customProperties: [
|
|
|
|
"app:metadata",
|
|
|
|
],
|
|
|
|
);
|
|
|
|
if (!response.isGood) {
|
|
|
|
_log.severe("[list] Failed requesting server: $response");
|
|
|
|
throw ApiException(
|
|
|
|
response: response,
|
|
|
|
message: "Failed communicating with server: ${response.statusCode}");
|
|
|
|
}
|
|
|
|
|
|
|
|
final xml = XmlDocument.parse(response.body);
|
|
|
|
final files = WebdavFileParser()(xml);
|
|
|
|
// _log.fine("[list] Parsed files: [$files]");
|
|
|
|
return files.map((e) {
|
|
|
|
if (e.metadata == null || e.metadata.fileEtag == e.etag) {
|
|
|
|
return e;
|
|
|
|
} else {
|
|
|
|
_log.info("[list] Ignore outdated metadata for ${e.path}");
|
|
|
|
return e.withoutMetadata();
|
|
|
|
}
|
|
|
|
}).toList();
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
remove(Account account, File f) async {
|
|
|
|
_log.info("[remove] ${f.path}");
|
|
|
|
final response = await Api(account).files().delete(path: f.path);
|
|
|
|
if (!response.isGood) {
|
|
|
|
_log.severe("[remove] Failed requesting server: $response");
|
|
|
|
throw ApiException(
|
|
|
|
response: response,
|
|
|
|
message: "Failed communicating with server: ${response.statusCode}");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
getBinary(Account account, File f) async {
|
|
|
|
_log.info("[getBinary] ${f.path}");
|
|
|
|
final response = await Api(account).files().get(path: f.path);
|
|
|
|
if (!response.isGood) {
|
|
|
|
_log.severe("[getBinary] Failed requesting server: $response");
|
|
|
|
throw ApiException(
|
|
|
|
response: response,
|
|
|
|
message: "Failed communicating with server: ${response.statusCode}");
|
|
|
|
}
|
|
|
|
return response.body;
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
putBinary(Account account, String path, Uint8List content) async {
|
|
|
|
_log.info("[putBinary] $path");
|
|
|
|
final response =
|
|
|
|
await Api(account).files().put(path: path, content: content);
|
|
|
|
if (!response.isGood) {
|
|
|
|
_log.severe("[putBinary] Failed requesting server: $response");
|
|
|
|
throw ApiException(
|
|
|
|
response: response,
|
|
|
|
message: "Failed communicating with server: ${response.statusCode}");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
updateMetadata(Account account, File f, Metadata metadata) async {
|
|
|
|
_log.info("[updateMetadata] ${f.path}");
|
|
|
|
if (metadata != null && metadata.fileEtag != f.etag) {
|
|
|
|
_log.warning(
|
|
|
|
"[updateMetadata] etag mismatch (metadata: ${metadata.fileEtag}, file: ${f.etag})");
|
|
|
|
}
|
|
|
|
final setProps = {
|
|
|
|
if (metadata != null) "app:metadata": jsonEncode(metadata.toJson()),
|
|
|
|
};
|
|
|
|
final removeProps = [
|
|
|
|
if (metadata == null) "app:metadata",
|
|
|
|
];
|
|
|
|
final response = await Api(account).files().proppatch(
|
|
|
|
path: f.path,
|
|
|
|
namespaces: {
|
|
|
|
"com.nkming.nc_photos": "app",
|
|
|
|
},
|
|
|
|
set: setProps.isNotEmpty ? setProps : null,
|
|
|
|
remove: removeProps.isNotEmpty ? removeProps : null,
|
|
|
|
);
|
|
|
|
if (!response.isGood) {
|
|
|
|
_log.severe("[updateMetadata] Failed requesting server: $response");
|
|
|
|
throw ApiException(
|
|
|
|
response: response,
|
|
|
|
message: "Failed communicating with server: ${response.statusCode}");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static final _log = Logger("entity.file.FileWebdavDataSource");
|
|
|
|
}
|
|
|
|
|
|
|
|
class FileAppDbDataSource implements FileDataSource {
|
|
|
|
@override
|
|
|
|
list(Account account, File f) {
|
|
|
|
_log.info("[list] ${f.path}");
|
|
|
|
return AppDb.use((db) async {
|
|
|
|
final transaction = db.transaction(AppDb.fileStoreName, idbModeReadOnly);
|
|
|
|
final store = transaction.objectStore(AppDb.fileStoreName);
|
|
|
|
return await _doList(store, account, f);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
remove(Account account, File f) {
|
|
|
|
_log.info("[remove] ${f.path}");
|
|
|
|
return AppDb.use((db) async {
|
|
|
|
final transaction = db.transaction(AppDb.fileStoreName, idbModeReadWrite);
|
|
|
|
final store = transaction.objectStore(AppDb.fileStoreName);
|
2021-04-26 12:59:08 +02:00
|
|
|
final index = store.index(AppDbFileEntry.indexName);
|
|
|
|
final path = AppDbFileEntry.toPath(account, f);
|
|
|
|
final range = KeyRange.bound([path, 0], [path, int_util.int32Max]);
|
|
|
|
final keys = await index
|
|
|
|
.openKeyCursor(range: range, autoAdvance: true)
|
|
|
|
.map((cursor) => cursor.primaryKey)
|
|
|
|
.toList();
|
|
|
|
for (final k in keys) {
|
|
|
|
_log.fine("[remove] Removing DB entry: $k");
|
|
|
|
await store.delete(k);
|
|
|
|
}
|
2021-04-10 06:28:12 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
getBinary(Account account, File f) {
|
|
|
|
_log.info("[getBinary] ${f.path}");
|
|
|
|
throw UnimplementedError();
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
putBinary(Account account, String path, Uint8List content) async {
|
|
|
|
_log.info("[putBinary] $path");
|
|
|
|
// do nothing, we currently don't store file contents locally
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
updateMetadata(Account account, File f, Metadata metadata) {
|
|
|
|
_log.info("[updateMetadata] ${f.path}");
|
|
|
|
return AppDb.use((db) async {
|
|
|
|
final transaction = db.transaction(AppDb.fileStoreName, idbModeReadWrite);
|
|
|
|
final store = transaction.objectStore(AppDb.fileStoreName);
|
|
|
|
final parentDir = File(path: path.dirname(f.path));
|
|
|
|
final parentList = await _doList(store, account, parentDir);
|
|
|
|
final jsonList = parentList.map((e) {
|
|
|
|
if (e.path == f.path) {
|
2021-04-26 12:59:08 +02:00
|
|
|
return e.copyWith(metadata: metadata);
|
2021-04-10 06:28:12 +02:00
|
|
|
} else {
|
2021-04-26 12:59:08 +02:00
|
|
|
return e;
|
2021-04-10 06:28:12 +02:00
|
|
|
}
|
2021-04-26 12:59:08 +02:00
|
|
|
});
|
|
|
|
await _cacheListResults(store, account, parentDir, jsonList);
|
2021-04-10 06:28:12 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
Future<List<File>> _doList(ObjectStore store, Account account, File f) async {
|
2021-04-26 12:59:08 +02:00
|
|
|
final index = store.index(AppDbFileEntry.indexName);
|
|
|
|
final path = AppDbFileEntry.toPath(account, f);
|
|
|
|
final range = KeyRange.bound([path, 0], [path, int_util.int32Max]);
|
|
|
|
final List results = await index.getAll(range);
|
|
|
|
if (results?.isNotEmpty == true) {
|
|
|
|
final entries = results
|
|
|
|
.map((e) => AppDbFileEntry.fromJson(e.cast<String, dynamic>()));
|
|
|
|
return entries.map((e) {
|
|
|
|
_log.info("[_doList] ${e.path}[${e.index}]");
|
|
|
|
return e.data;
|
|
|
|
}).reduce((value, element) => value + element);
|
2021-04-10 06:28:12 +02:00
|
|
|
} else {
|
2021-04-26 12:59:08 +02:00
|
|
|
throw CacheNotFoundException("No entry: $path");
|
2021-04-10 06:28:12 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static final _log = Logger("entity.file.FileAppDbDataSource");
|
|
|
|
}
|
|
|
|
|
|
|
|
class FileCachedDataSource implements FileDataSource {
|
|
|
|
@override
|
|
|
|
list(Account account, File f) async {
|
|
|
|
final trimmedRootPath = f.path.trimAny("/");
|
|
|
|
List<File> cache;
|
|
|
|
try {
|
|
|
|
cache = await _appDbSrc.list(account, f);
|
|
|
|
// compare the cached root
|
2021-04-24 17:01:23 +02:00
|
|
|
final cacheEtag = cache
|
|
|
|
.firstWhere((element) => element.path.trimAny("/") == trimmedRootPath)
|
|
|
|
.etag;
|
|
|
|
if (cacheEtag != null) {
|
|
|
|
// compare the etag to see if the content has been updated
|
|
|
|
var remoteEtag = f.etag;
|
|
|
|
if (remoteEtag == null) {
|
|
|
|
// no etag supplied, we need to query it form remote
|
|
|
|
final remote = await _remoteSrc.list(account, f, depth: 0);
|
|
|
|
assert(remote.length == 1);
|
|
|
|
remoteEtag = remote.first.etag;
|
|
|
|
}
|
|
|
|
if (cacheEtag == remoteEtag) {
|
|
|
|
// cache is good
|
2021-04-26 12:59:08 +02:00
|
|
|
_log.fine(
|
|
|
|
"[list] etag matched for ${AppDbFileEntry.toPath(account, f)}");
|
2021-04-24 17:01:23 +02:00
|
|
|
return cache;
|
|
|
|
}
|
2021-04-10 06:28:12 +02:00
|
|
|
}
|
2021-04-24 17:01:23 +02:00
|
|
|
_log.info(
|
2021-04-26 12:59:08 +02:00
|
|
|
"[list] Remote content updated for ${AppDbFileEntry.toPath(account, f)}");
|
2021-04-27 22:06:16 +02:00
|
|
|
} on CacheNotFoundException catch (_) {
|
|
|
|
// normal when there's no cache
|
2021-04-10 06:28:12 +02:00
|
|
|
} catch (e, stacktrace) {
|
2021-04-27 22:06:16 +02:00
|
|
|
_log.shout("[list] Cache failure", e, stacktrace);
|
2021-04-10 06:28:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// no cache
|
|
|
|
try {
|
|
|
|
final remote = await _remoteSrc.list(account, f);
|
|
|
|
await _cacheResult(account, f, remote);
|
|
|
|
if (cache != null) {
|
|
|
|
try {
|
2021-04-26 12:59:08 +02:00
|
|
|
await _cleanUpCachedDir(account, remote, cache);
|
2021-04-10 06:28:12 +02:00
|
|
|
} catch (e, stacktrace) {
|
2021-04-27 22:06:16 +02:00
|
|
|
_log.shout("[list] Failed while _cleanUpCachedList", e, stacktrace);
|
2021-04-10 06:28:12 +02:00
|
|
|
// ignore error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return remote;
|
|
|
|
} on ApiException catch (e) {
|
|
|
|
if (e.response.statusCode == 404) {
|
|
|
|
_log.info("[list] File removed: $f");
|
|
|
|
_appDbSrc.remove(account, f);
|
|
|
|
return [];
|
|
|
|
} else {
|
|
|
|
rethrow;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
remove(Account account, File f) async {
|
|
|
|
await _appDbSrc.remove(account, f);
|
|
|
|
await _remoteSrc.remove(account, f);
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
getBinary(Account account, File f) {
|
|
|
|
return _remoteSrc.getBinary(account, f);
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
putBinary(Account account, String path, Uint8List content) async {
|
|
|
|
await _remoteSrc.putBinary(account, path, content);
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
updateMetadata(Account account, File f, Metadata metadata) async {
|
|
|
|
await _remoteSrc
|
|
|
|
.updateMetadata(account, f, metadata)
|
|
|
|
.then((_) => _appDbSrc.updateMetadata(account, f, metadata));
|
|
|
|
}
|
|
|
|
|
|
|
|
Future<void> _cacheResult(Account account, File f, List<File> result) {
|
|
|
|
return AppDb.use((db) async {
|
|
|
|
final transaction = db.transaction(AppDb.fileStoreName, idbModeReadWrite);
|
|
|
|
final store = transaction.objectStore(AppDb.fileStoreName);
|
2021-04-26 12:59:08 +02:00
|
|
|
await _cacheListResults(store, account, f, result);
|
2021-04-10 06:28:12 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-04-26 12:59:08 +02:00
|
|
|
/// Remove dangling dir entries in the file object store
|
|
|
|
Future<void> _cleanUpCachedDir(
|
2021-04-10 06:28:12 +02:00
|
|
|
Account account, List<File> remoteResults, List<File> cachedResults) {
|
|
|
|
final removed = cachedResults
|
|
|
|
.where((cache) =>
|
|
|
|
!remoteResults.any((remote) => remote.path == cache.path))
|
|
|
|
.toList();
|
|
|
|
if (removed.isEmpty) {
|
|
|
|
return Future.delayed(Duration.zero);
|
|
|
|
}
|
|
|
|
return AppDb.use((db) async {
|
|
|
|
final transaction = db.transaction(AppDb.fileStoreName, idbModeReadWrite);
|
|
|
|
final store = transaction.objectStore(AppDb.fileStoreName);
|
2021-04-26 12:59:08 +02:00
|
|
|
final index = store.index(AppDbFileEntry.indexName);
|
2021-04-10 06:28:12 +02:00
|
|
|
for (final r in removed) {
|
2021-04-26 12:59:08 +02:00
|
|
|
final path = AppDbFileEntry.toPath(account, r);
|
|
|
|
final keys = [];
|
2021-04-10 06:28:12 +02:00
|
|
|
// delete the dir itself
|
2021-04-26 12:59:08 +02:00
|
|
|
final dirRange = KeyRange.bound([path, 0], [path, int_util.int32Max]);
|
2021-04-10 06:28:12 +02:00
|
|
|
// delete with KeyRange is not supported in idb_shim/idb_sqflite
|
2021-04-26 12:59:08 +02:00
|
|
|
// await store.delete(dirRange);
|
|
|
|
keys.addAll(await index
|
|
|
|
.openKeyCursor(range: dirRange, autoAdvance: true)
|
|
|
|
.map((cursor) => cursor.primaryKey)
|
|
|
|
.toList());
|
|
|
|
// then its children
|
|
|
|
final childrenRange =
|
|
|
|
KeyRange.bound(["$path/", 0], ["$path/\uffff", int_util.int32Max]);
|
|
|
|
keys.addAll(await index
|
|
|
|
.openKeyCursor(range: childrenRange, autoAdvance: true)
|
|
|
|
.map((cursor) => cursor.primaryKey)
|
|
|
|
.toList());
|
|
|
|
|
2021-04-10 06:28:12 +02:00
|
|
|
for (final k in keys) {
|
2021-04-26 12:59:08 +02:00
|
|
|
_log.fine("[_cleanUpCachedDir] Removing DB entry: $k");
|
2021-04-10 06:28:12 +02:00
|
|
|
await store.delete(k);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
final _remoteSrc = FileWebdavDataSource();
|
|
|
|
final _appDbSrc = FileAppDbDataSource();
|
|
|
|
|
|
|
|
static final _log = Logger("entity.file.FileCachedDataSource");
|
|
|
|
}
|
|
|
|
|
2021-04-26 12:59:08 +02:00
|
|
|
Future<void> _cacheListResults(
|
|
|
|
ObjectStore store, Account account, File f, Iterable<File> results) async {
|
|
|
|
final index = store.index(AppDbFileEntry.indexName);
|
|
|
|
final path = AppDbFileEntry.toPath(account, f);
|
|
|
|
final range = KeyRange.bound([path, 0], [path, int_util.int32Max]);
|
|
|
|
// count number of entries for this dir
|
|
|
|
final count = await index.count(range);
|
|
|
|
int newCount = 0;
|
|
|
|
for (final pair
|
|
|
|
in partition(results, AppDbFileEntry.maxDataSize).withIndex()) {
|
|
|
|
_log.info(
|
|
|
|
"[_cacheListResults] Caching $path[${pair.item1}], length: ${pair.item2.length}");
|
|
|
|
await store.put(
|
|
|
|
AppDbFileEntry(path, pair.item1, pair.item2).toJson(),
|
|
|
|
AppDbFileEntry.toPrimaryKey(account, f, pair.item1),
|
|
|
|
);
|
|
|
|
++newCount;
|
|
|
|
}
|
|
|
|
if (count > newCount) {
|
|
|
|
// index is 0-based
|
|
|
|
final rmRange = KeyRange.bound([path, newCount], [path, int_util.int32Max]);
|
|
|
|
final rmKeys = await index
|
|
|
|
.openKeyCursor(range: rmRange, autoAdvance: true)
|
|
|
|
.map((cursor) => cursor.primaryKey)
|
|
|
|
.toList();
|
|
|
|
for (final k in rmKeys) {
|
|
|
|
_log.fine("[_cacheListResults] Removing DB entry: $k");
|
|
|
|
await store.delete(k);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
final _log = Logger("entity.file");
|