add mapping of local and remote

This commit is contained in:
fyears 2021-10-25 00:41:13 +08:00
parent 5ef032523a
commit 69bc1f0e03
5 changed files with 234 additions and 51 deletions

View File

@ -1,10 +1,13 @@
import * as lf from "lovefield-ts/dist/es6/lf.js";
import { TAbstractFile, TFile, TFolder } from "obsidian";
import type { SUPPORTED_SERVICES_TYPE } from "./misc";
export type DatabaseConnection = lf.DatabaseConnection;
export const DEFAULT_DB_NAME = "saveremotedb";
export const DEFAULT_TBL_DELETE_HISTORY = "filefolderoperationhistory";
export const DEFAULT_TBL_SYNC_MAPPING = "syncmetadatahistory";
export interface FileFolderHistoryRecord {
key: string;
@ -17,7 +20,19 @@ export interface FileFolderHistoryRecord {
rename_to: string;
}
export const prepareDB = async () => {
export interface SyncMetaMappingRecord {
local_key: string;
remote_key: string;
local_size: number;
remote_size: number;
local_mtime: number;
remote_mtime: number;
remote_extra_key: string;
remote_type: SUPPORTED_SERVICES_TYPE;
key_type: "folder" | "file";
}
export const prepareDBs = async () => {
const schemaBuilder = lf.schema.create(DEFAULT_DB_NAME, 1);
schemaBuilder
.createTable(DEFAULT_TBL_DELETE_HISTORY)
@ -31,6 +46,28 @@ export const prepareDB = async () => {
.addColumn("key_type", lf.Type.STRING)
.addPrimaryKey(["id"], true)
.addIndex("idxKey", ["key"]);
schemaBuilder
.createTable(DEFAULT_TBL_SYNC_MAPPING)
.addColumn("id", lf.Type.INTEGER)
.addColumn("local_key", lf.Type.STRING)
.addColumn("remote_key", lf.Type.STRING)
.addColumn("local_size", lf.Type.INTEGER)
.addColumn("remote_size", lf.Type.INTEGER)
.addColumn("local_mtime", lf.Type.INTEGER)
.addColumn("remote_mtime", lf.Type.INTEGER)
.addColumn("key_type", lf.Type.STRING)
.addColumn("remote_extra_key", lf.Type.STRING)
.addColumn("remote_type", lf.Type.STRING)
.addNullable([
"remote_extra_key",
"remote_mtime",
"remote_size",
"local_mtime",
])
.addPrimaryKey(["id"], true)
.addIndex("idxkey", ["local_key", "remote_key"]);
const db = await schemaBuilder.connect({
storeType: lf.DataStoreType.INDEXED_DB,
});
@ -38,7 +75,7 @@ export const prepareDB = async () => {
return db;
};
export const destroyDB = async (db: lf.DatabaseConnection) => {
export const destroyDBs = async (db: lf.DatabaseConnection) => {
db.close();
const req = indexedDB.deleteDatabase(DEFAULT_DB_NAME);
req.onsuccess = (event) => {
@ -53,7 +90,9 @@ export const destroyDB = async (db: lf.DatabaseConnection) => {
};
};
export const loadHistoryTable = async (db: lf.DatabaseConnection) => {
export const loadDeleteRenameHistoryTable = async (
db: lf.DatabaseConnection
) => {
const schema = db.getSchema().table(DEFAULT_TBL_DELETE_HISTORY);
const tbl = db.getSchema().table(DEFAULT_TBL_DELETE_HISTORY);
@ -66,7 +105,7 @@ export const loadHistoryTable = async (db: lf.DatabaseConnection) => {
return records as FileFolderHistoryRecord[];
};
export const clearHistoryOfKey = async (
export const clearDeleteRenameHistoryOfKey = async (
db: lf.DatabaseConnection,
key: string
) => {
@ -155,9 +194,67 @@ export const insertRenameRecord = async (
await db.insertOrReplace().into(tbl).values([row]).exec();
};
export const getAllRecords = async (db: lf.DatabaseConnection) => {
export const getAllDeleteRenameRecords = async (db: lf.DatabaseConnection) => {
const schema = db.getSchema().table(DEFAULT_TBL_DELETE_HISTORY);
const res1 = await db.select().from(schema).exec();
const res2 = res1 as FileFolderHistoryRecord[];
return res2;
};
export const upsertSyncMetaMappingDataS3 = async (
db: lf.DatabaseConnection,
localKey: string,
localMTime: number,
localSize: number,
remoteKey: string,
remoteMTime: number,
remoteSize: number,
remoteExtraKey: string /* ETag from s3 */
) => {
const schema = db.getSchema().table(DEFAULT_TBL_SYNC_MAPPING);
const aggregratedInfo: SyncMetaMappingRecord = {
local_key: localKey,
local_mtime: localMTime,
local_size: localSize,
remote_key: remoteKey,
remote_mtime: remoteMTime,
remote_size: remoteSize,
remote_extra_key: remoteExtraKey,
remote_type: "s3",
key_type: localKey.endsWith("/") ? "folder" : "file",
};
const row = schema.createRow(aggregratedInfo);
await db.insertOrReplace().into(schema).values([row]).exec();
};
export const getSyncMetaMappingByRemoteKeyS3 = async (
db: lf.DatabaseConnection,
remoteKey: string,
remoteMTime: number,
remoteExtraKey: string
) => {
const schema = db.getSchema().table(DEFAULT_TBL_SYNC_MAPPING);
const tbl = db.getSchema().table(DEFAULT_TBL_SYNC_MAPPING);
const res = (await db
.select()
.from(tbl)
.where(
lf.op.and(
tbl.col("remote_key").eq(remoteKey),
tbl.col("remote_mtime").eq(remoteMTime),
tbl.col("remote_extra_key").eq(remoteExtraKey),
tbl.col("remote_type").eq("s3")
)
)
.exec()) as SyncMetaMappingRecord[];
if (res.length === 1) {
return res[0];
}
if (res.length === 0) {
return undefined;
}
throw Error("something bad in sync meta mapping!");
};

View File

@ -13,12 +13,12 @@ import {
import * as CodeMirror from "codemirror";
import type { DatabaseConnection } from "./localdb";
import {
prepareDB,
destroyDB,
loadHistoryTable,
prepareDBs,
destroyDBs,
loadDeleteRenameHistoryTable,
insertDeleteRecord,
insertRenameRecord,
getAllRecords,
getAllDeleteRenameRecords,
} from "./localdb";
import type { SyncStatusType } from "./sync";
@ -78,15 +78,16 @@ export default class SaveRemotePlugin extends Plugin {
const s3Client = getS3Client(this.settings.s3);
const remoteRsp = await listFromRemote(s3Client, this.settings.s3);
const local = this.app.vault.getAllLoadedFiles();
const localHistory = await loadHistoryTable(this.db);
const localHistory = await loadDeleteRenameHistoryTable(this.db);
// console.log(remoteRsp);
// console.log(local);
// console.log(localHistory);
const mixedStates = ensembleMixedStates(
const mixedStates = await ensembleMixedStates(
remoteRsp.Contents,
local,
localHistory
localHistory,
this.db
);
for (const [key, val] of Object.entries(mixedStates)) {
@ -100,7 +101,7 @@ export default class SaveRemotePlugin extends Plugin {
new Notice("Save Remote Sync data exchanging!");
doActualSync(
await doActualSync(
s3Client,
this.settings.s3,
this.db,
@ -130,7 +131,7 @@ export default class SaveRemotePlugin extends Plugin {
onunload() {
console.log("unloading plugin obsidian-save-remote");
this.destroyDB();
this.destroyDBs();
}
async loadSettings() {
@ -142,11 +143,11 @@ export default class SaveRemotePlugin extends Plugin {
}
async prepareDB() {
this.db = await prepareDB();
this.db = await prepareDBs();
}
destroyDB() {
destroyDB(this.db);
destroyDBs() {
destroyDBs(this.db);
}
}

View File

@ -1,6 +1,8 @@
import { Vault } from "obsidian";
import * as path from "path";
export type SUPPORTED_SERVICES_TYPE = "s3" | "webdav" | "ftp";
export const ignoreHiddenFiles = (item: string) => {
const basename = path.basename(item);
return basename === "." || basename[0] !== ".";

View File

@ -9,6 +9,7 @@ import {
PutObjectCommand,
GetObjectCommand,
DeleteObjectCommand,
HeadObjectCommand,
} from "@aws-sdk/client-s3";
import type { _Object } from "@aws-sdk/client-s3";
@ -46,6 +47,19 @@ export const getS3Client = (s3Config: S3Config) => {
return s3Client;
};
export const getRemoteMeta = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPath: string
) => {
return await s3Client.send(
new HeadObjectCommand({
Bucket: s3Config.s3BucketName,
Key: fileOrFolderPath,
})
);
};
export const uploadToRemote = async (
s3Client: S3Client,
s3Config: S3Config,
@ -62,7 +76,7 @@ export const uploadToRemote = async (
} else if (isFolder && !isRecursively) {
// folder
const contentType = DEFAULT_CONTENT_TYPE;
return await s3Client.send(
await s3Client.send(
new PutObjectCommand({
Bucket: s3Config.s3BucketName,
Key: fileOrFolderPath,
@ -70,6 +84,7 @@ export const uploadToRemote = async (
ContentType: contentType,
})
);
return await getRemoteMeta(s3Client, s3Config, fileOrFolderPath);
} else {
// file
// we ignore isRecursively parameter here
@ -78,7 +93,7 @@ export const uploadToRemote = async (
DEFAULT_CONTENT_TYPE;
const content = await vault.adapter.readBinary(fileOrFolderPath);
const body = Buffer.from(content);
return await s3Client.send(
await s3Client.send(
new PutObjectCommand({
Bucket: s3Config.s3BucketName,
Key: fileOrFolderPath,
@ -86,6 +101,7 @@ export const uploadToRemote = async (
ContentType: contentType,
})
);
return await getRemoteMeta(s3Client, s3Config, fileOrFolderPath);
}
};

View File

@ -1,10 +1,21 @@
import { TAbstractFile, TFolder, TFile, Vault } from "obsidian";
import { S3Client } from "@aws-sdk/client-s3";
import * as lf from "lovefield-ts/dist/es6/lf.js";
import { clearHistoryOfKey, FileFolderHistoryRecord } from "./localdb";
import { S3Config, S3ObjectType, uploadToRemote, deleteFromRemote } from "./s3";
import { downloadFromRemote } from "./s3";
import {
clearDeleteRenameHistoryOfKey,
FileFolderHistoryRecord,
upsertSyncMetaMappingDataS3,
getSyncMetaMappingByRemoteKeyS3,
} from "./localdb";
import {
S3Config,
S3ObjectType,
uploadToRemote,
deleteFromRemote,
downloadFromRemote,
} from "./s3";
import { mkdirpInVault } from "./misc";
type DecisionType =
@ -32,24 +43,43 @@ interface FileOrFolderMixedState {
size_remote?: number;
decision?: DecisionType;
syncDone?: "done";
decision_branch?: number;
}
export const ensembleMixedStates = (
export const ensembleMixedStates = async (
remote: S3ObjectType[],
local: TAbstractFile[],
deleteHistory: FileFolderHistoryRecord[]
deleteHistory: FileFolderHistoryRecord[],
db: lf.DatabaseConnection
) => {
const results = {} as Record<string, FileOrFolderMixedState>;
remote.forEach((entry) => {
for (const entry of remote) {
const backwardMapping = await getSyncMetaMappingByRemoteKeyS3(
db,
entry.Key,
entry.LastModified.valueOf(),
entry.ETag
);
let key = entry.Key;
let r = {} as FileOrFolderMixedState;
const key = entry.Key;
if (backwardMapping !== undefined) {
key = backwardMapping.local_key;
r = {
key: key,
exist_remote: true,
mtime_remote: backwardMapping.local_mtime,
size_remote: backwardMapping.local_size,
};
} else {
r = {
key: key,
exist_remote: true,
mtime_remote: entry.LastModified.valueOf(),
size_remote: entry.Size,
};
}
if (results.hasOwnProperty(key)) {
results[key].key = r.key;
results[key].exist_remote = r.exist_remote;
@ -58,15 +88,15 @@ export const ensembleMixedStates = (
} else {
results[key] = r;
}
});
}
local.forEach((entry) => {
for (const entry of local) {
let r = {} as FileOrFolderMixedState;
let key = entry.path;
if (entry.path === "/") {
// ignore
return;
continue;
} else if (entry instanceof TFile) {
r = {
key: entry.path,
@ -94,9 +124,9 @@ export const ensembleMixedStates = (
} else {
results[key] = r;
}
});
}
deleteHistory.forEach((entry) => {
for (const entry of deleteHistory) {
let key = entry.key;
if (entry.key_type === "folder") {
if (!entry.key.endsWith("/")) {
@ -119,7 +149,7 @@ export const ensembleMixedStates = (
} else {
results[key] = r;
}
});
}
return results;
};
@ -158,6 +188,7 @@ export const getOperation = (
r.mtime_remote > r.mtime_local
) {
r.decision = "download_clearhist";
r.decision_branch = 1;
} else if (
r.exist_remote &&
r.exist_local &&
@ -166,6 +197,7 @@ export const getOperation = (
r.mtime_remote < r.mtime_local
) {
r.decision = "upload_clearhist";
r.decision_branch = 2;
} else if (
r.exist_remote &&
r.exist_local &&
@ -175,26 +207,24 @@ export const getOperation = (
r.size_local === r.size_remote
) {
r.decision = "skip";
r.decision_branch = 3;
} else if (
r.exist_remote &&
r.exist_local &&
r.mtime_remote !== undefined &&
r.mtime_local !== undefined &&
r.mtime_remote === r.mtime_local &&
r.size_local === r.size_remote
r.size_local !== r.size_remote
) {
r.decision = "upload_clearhist";
} else if (
r.exist_remote &&
r.exist_local &&
r.mtime_remote !== undefined &&
r.mtime_local === undefined
) {
r.decision_branch = 4;
} else if (r.exist_remote && r.exist_local && r.mtime_local === undefined) {
// this must be a folder!
if (!r.key.endsWith("/")) {
throw Error(`${r.key} is not a folder but lacks local mtime`);
}
r.decision = "skip";
r.decision_branch = 5;
} else if (
r.exist_remote &&
!r.exist_local &&
@ -204,6 +234,7 @@ export const getOperation = (
r.mtime_remote >= r.delete_time_local
) {
r.decision = "download_clearhist";
r.decision_branch = 6;
} else if (
r.exist_remote &&
!r.exist_local &&
@ -213,6 +244,7 @@ export const getOperation = (
r.mtime_remote < r.delete_time_local
) {
r.decision = "delremote_clearhist";
r.decision_branch = 7;
} else if (
r.exist_remote &&
!r.exist_local &&
@ -221,8 +253,10 @@ export const getOperation = (
r.delete_time_local == undefined
) {
r.decision = "download";
r.decision_branch = 8;
} else if (!r.exist_remote && r.exist_local && r.mtime_remote === undefined) {
r.decision = "upload_clearhist";
r.decision_branch = 9;
} else if (
!r.exist_remote &&
!r.exist_local &&
@ -230,6 +264,7 @@ export const getOperation = (
r.mtime_local === undefined
) {
r.decision = "clearhist";
r.decision_branch = 10;
}
return r;
@ -264,10 +299,26 @@ export const doActualSync = async (
vault,
state.mtime_remote
);
await clearHistoryOfKey(db, state.key);
await clearDeleteRenameHistoryOfKey(db, state.key);
} else if (state.decision === "upload_clearhist") {
await uploadToRemote(s3Client, s3Config, state.key, vault, false);
await clearHistoryOfKey(db, state.key);
const remoteObjMeta = await uploadToRemote(
s3Client,
s3Config,
state.key,
vault,
false
);
await upsertSyncMetaMappingDataS3(
db,
state.key,
state.mtime_local,
state.size_local,
state.key,
remoteObjMeta.LastModified.valueOf(),
remoteObjMeta.ContentLength,
remoteObjMeta.ETag
);
await clearDeleteRenameHistoryOfKey(db, state.key);
} else if (state.decision === "download") {
await mkdirpInVault(state.key, vault);
await downloadFromRemote(
@ -279,11 +330,27 @@ export const doActualSync = async (
);
} else if (state.decision === "delremote_clearhist") {
await deleteFromRemote(s3Client, s3Config, state.key);
await clearHistoryOfKey(db, state.key);
await clearDeleteRenameHistoryOfKey(db, state.key);
} else if (state.decision === "upload") {
await uploadToRemote(s3Client, s3Config, state.key, vault, false);
const remoteObjMeta = await uploadToRemote(
s3Client,
s3Config,
state.key,
vault,
false
);
await upsertSyncMetaMappingDataS3(
db,
state.key,
state.mtime_local,
state.size_local,
state.key,
remoteObjMeta.LastModified.valueOf(),
remoteObjMeta.ContentLength,
remoteObjMeta.ETag
);
} else if (state.decision === "clearhist") {
await clearHistoryOfKey(db, state.key);
await clearDeleteRenameHistoryOfKey(db, state.key);
} else {
throw Error("this should never happen!");
}