add mapping of local and remote

This commit is contained in:
fyears 2021-10-25 00:41:13 +08:00
parent 5ef032523a
commit 69bc1f0e03
5 changed files with 234 additions and 51 deletions

View File

@ -1,10 +1,13 @@
import * as lf from "lovefield-ts/dist/es6/lf.js"; import * as lf from "lovefield-ts/dist/es6/lf.js";
import { TAbstractFile, TFile, TFolder } from "obsidian"; import { TAbstractFile, TFile, TFolder } from "obsidian";
import type { SUPPORTED_SERVICES_TYPE } from "./misc";
export type DatabaseConnection = lf.DatabaseConnection; export type DatabaseConnection = lf.DatabaseConnection;
export const DEFAULT_DB_NAME = "saveremotedb"; export const DEFAULT_DB_NAME = "saveremotedb";
export const DEFAULT_TBL_DELETE_HISTORY = "filefolderoperationhistory"; export const DEFAULT_TBL_DELETE_HISTORY = "filefolderoperationhistory";
export const DEFAULT_TBL_SYNC_MAPPING = "syncmetadatahistory";
export interface FileFolderHistoryRecord { export interface FileFolderHistoryRecord {
key: string; key: string;
@ -17,7 +20,19 @@ export interface FileFolderHistoryRecord {
rename_to: string; rename_to: string;
} }
export const prepareDB = async () => { export interface SyncMetaMappingRecord {
local_key: string;
remote_key: string;
local_size: number;
remote_size: number;
local_mtime: number;
remote_mtime: number;
remote_extra_key: string;
remote_type: SUPPORTED_SERVICES_TYPE;
key_type: "folder" | "file";
}
export const prepareDBs = async () => {
const schemaBuilder = lf.schema.create(DEFAULT_DB_NAME, 1); const schemaBuilder = lf.schema.create(DEFAULT_DB_NAME, 1);
schemaBuilder schemaBuilder
.createTable(DEFAULT_TBL_DELETE_HISTORY) .createTable(DEFAULT_TBL_DELETE_HISTORY)
@ -31,6 +46,28 @@ export const prepareDB = async () => {
.addColumn("key_type", lf.Type.STRING) .addColumn("key_type", lf.Type.STRING)
.addPrimaryKey(["id"], true) .addPrimaryKey(["id"], true)
.addIndex("idxKey", ["key"]); .addIndex("idxKey", ["key"]);
schemaBuilder
.createTable(DEFAULT_TBL_SYNC_MAPPING)
.addColumn("id", lf.Type.INTEGER)
.addColumn("local_key", lf.Type.STRING)
.addColumn("remote_key", lf.Type.STRING)
.addColumn("local_size", lf.Type.INTEGER)
.addColumn("remote_size", lf.Type.INTEGER)
.addColumn("local_mtime", lf.Type.INTEGER)
.addColumn("remote_mtime", lf.Type.INTEGER)
.addColumn("key_type", lf.Type.STRING)
.addColumn("remote_extra_key", lf.Type.STRING)
.addColumn("remote_type", lf.Type.STRING)
.addNullable([
"remote_extra_key",
"remote_mtime",
"remote_size",
"local_mtime",
])
.addPrimaryKey(["id"], true)
.addIndex("idxkey", ["local_key", "remote_key"]);
const db = await schemaBuilder.connect({ const db = await schemaBuilder.connect({
storeType: lf.DataStoreType.INDEXED_DB, storeType: lf.DataStoreType.INDEXED_DB,
}); });
@ -38,7 +75,7 @@ export const prepareDB = async () => {
return db; return db;
}; };
export const destroyDB = async (db: lf.DatabaseConnection) => { export const destroyDBs = async (db: lf.DatabaseConnection) => {
db.close(); db.close();
const req = indexedDB.deleteDatabase(DEFAULT_DB_NAME); const req = indexedDB.deleteDatabase(DEFAULT_DB_NAME);
req.onsuccess = (event) => { req.onsuccess = (event) => {
@ -53,7 +90,9 @@ export const destroyDB = async (db: lf.DatabaseConnection) => {
}; };
}; };
export const loadHistoryTable = async (db: lf.DatabaseConnection) => { export const loadDeleteRenameHistoryTable = async (
db: lf.DatabaseConnection
) => {
const schema = db.getSchema().table(DEFAULT_TBL_DELETE_HISTORY); const schema = db.getSchema().table(DEFAULT_TBL_DELETE_HISTORY);
const tbl = db.getSchema().table(DEFAULT_TBL_DELETE_HISTORY); const tbl = db.getSchema().table(DEFAULT_TBL_DELETE_HISTORY);
@ -66,7 +105,7 @@ export const loadHistoryTable = async (db: lf.DatabaseConnection) => {
return records as FileFolderHistoryRecord[]; return records as FileFolderHistoryRecord[];
}; };
export const clearHistoryOfKey = async ( export const clearDeleteRenameHistoryOfKey = async (
db: lf.DatabaseConnection, db: lf.DatabaseConnection,
key: string key: string
) => { ) => {
@ -155,9 +194,67 @@ export const insertRenameRecord = async (
await db.insertOrReplace().into(tbl).values([row]).exec(); await db.insertOrReplace().into(tbl).values([row]).exec();
}; };
export const getAllRecords = async (db: lf.DatabaseConnection) => { export const getAllDeleteRenameRecords = async (db: lf.DatabaseConnection) => {
const schema = db.getSchema().table(DEFAULT_TBL_DELETE_HISTORY); const schema = db.getSchema().table(DEFAULT_TBL_DELETE_HISTORY);
const res1 = await db.select().from(schema).exec(); const res1 = await db.select().from(schema).exec();
const res2 = res1 as FileFolderHistoryRecord[]; const res2 = res1 as FileFolderHistoryRecord[];
return res2; return res2;
}; };
export const upsertSyncMetaMappingDataS3 = async (
db: lf.DatabaseConnection,
localKey: string,
localMTime: number,
localSize: number,
remoteKey: string,
remoteMTime: number,
remoteSize: number,
remoteExtraKey: string /* ETag from s3 */
) => {
const schema = db.getSchema().table(DEFAULT_TBL_SYNC_MAPPING);
const aggregratedInfo: SyncMetaMappingRecord = {
local_key: localKey,
local_mtime: localMTime,
local_size: localSize,
remote_key: remoteKey,
remote_mtime: remoteMTime,
remote_size: remoteSize,
remote_extra_key: remoteExtraKey,
remote_type: "s3",
key_type: localKey.endsWith("/") ? "folder" : "file",
};
const row = schema.createRow(aggregratedInfo);
await db.insertOrReplace().into(schema).values([row]).exec();
};
export const getSyncMetaMappingByRemoteKeyS3 = async (
db: lf.DatabaseConnection,
remoteKey: string,
remoteMTime: number,
remoteExtraKey: string
) => {
const schema = db.getSchema().table(DEFAULT_TBL_SYNC_MAPPING);
const tbl = db.getSchema().table(DEFAULT_TBL_SYNC_MAPPING);
const res = (await db
.select()
.from(tbl)
.where(
lf.op.and(
tbl.col("remote_key").eq(remoteKey),
tbl.col("remote_mtime").eq(remoteMTime),
tbl.col("remote_extra_key").eq(remoteExtraKey),
tbl.col("remote_type").eq("s3")
)
)
.exec()) as SyncMetaMappingRecord[];
if (res.length === 1) {
return res[0];
}
if (res.length === 0) {
return undefined;
}
throw Error("something bad in sync meta mapping!");
};

View File

@ -13,12 +13,12 @@ import {
import * as CodeMirror from "codemirror"; import * as CodeMirror from "codemirror";
import type { DatabaseConnection } from "./localdb"; import type { DatabaseConnection } from "./localdb";
import { import {
prepareDB, prepareDBs,
destroyDB, destroyDBs,
loadHistoryTable, loadDeleteRenameHistoryTable,
insertDeleteRecord, insertDeleteRecord,
insertRenameRecord, insertRenameRecord,
getAllRecords, getAllDeleteRenameRecords,
} from "./localdb"; } from "./localdb";
import type { SyncStatusType } from "./sync"; import type { SyncStatusType } from "./sync";
@ -78,15 +78,16 @@ export default class SaveRemotePlugin extends Plugin {
const s3Client = getS3Client(this.settings.s3); const s3Client = getS3Client(this.settings.s3);
const remoteRsp = await listFromRemote(s3Client, this.settings.s3); const remoteRsp = await listFromRemote(s3Client, this.settings.s3);
const local = this.app.vault.getAllLoadedFiles(); const local = this.app.vault.getAllLoadedFiles();
const localHistory = await loadHistoryTable(this.db); const localHistory = await loadDeleteRenameHistoryTable(this.db);
// console.log(remoteRsp); // console.log(remoteRsp);
// console.log(local); // console.log(local);
// console.log(localHistory); // console.log(localHistory);
const mixedStates = ensembleMixedStates( const mixedStates = await ensembleMixedStates(
remoteRsp.Contents, remoteRsp.Contents,
local, local,
localHistory localHistory,
this.db
); );
for (const [key, val] of Object.entries(mixedStates)) { for (const [key, val] of Object.entries(mixedStates)) {
@ -100,7 +101,7 @@ export default class SaveRemotePlugin extends Plugin {
new Notice("Save Remote Sync data exchanging!"); new Notice("Save Remote Sync data exchanging!");
doActualSync( await doActualSync(
s3Client, s3Client,
this.settings.s3, this.settings.s3,
this.db, this.db,
@ -130,7 +131,7 @@ export default class SaveRemotePlugin extends Plugin {
onunload() { onunload() {
console.log("unloading plugin obsidian-save-remote"); console.log("unloading plugin obsidian-save-remote");
this.destroyDB(); this.destroyDBs();
} }
async loadSettings() { async loadSettings() {
@ -142,11 +143,11 @@ export default class SaveRemotePlugin extends Plugin {
} }
async prepareDB() { async prepareDB() {
this.db = await prepareDB(); this.db = await prepareDBs();
} }
destroyDB() { destroyDBs() {
destroyDB(this.db); destroyDBs(this.db);
} }
} }

View File

@ -1,6 +1,8 @@
import { Vault } from "obsidian"; import { Vault } from "obsidian";
import * as path from "path"; import * as path from "path";
export type SUPPORTED_SERVICES_TYPE = "s3" | "webdav" | "ftp";
export const ignoreHiddenFiles = (item: string) => { export const ignoreHiddenFiles = (item: string) => {
const basename = path.basename(item); const basename = path.basename(item);
return basename === "." || basename[0] !== "."; return basename === "." || basename[0] !== ".";

View File

@ -9,6 +9,7 @@ import {
PutObjectCommand, PutObjectCommand,
GetObjectCommand, GetObjectCommand,
DeleteObjectCommand, DeleteObjectCommand,
HeadObjectCommand,
} from "@aws-sdk/client-s3"; } from "@aws-sdk/client-s3";
import type { _Object } from "@aws-sdk/client-s3"; import type { _Object } from "@aws-sdk/client-s3";
@ -46,6 +47,19 @@ export const getS3Client = (s3Config: S3Config) => {
return s3Client; return s3Client;
}; };
export const getRemoteMeta = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPath: string
) => {
return await s3Client.send(
new HeadObjectCommand({
Bucket: s3Config.s3BucketName,
Key: fileOrFolderPath,
})
);
};
export const uploadToRemote = async ( export const uploadToRemote = async (
s3Client: S3Client, s3Client: S3Client,
s3Config: S3Config, s3Config: S3Config,
@ -62,7 +76,7 @@ export const uploadToRemote = async (
} else if (isFolder && !isRecursively) { } else if (isFolder && !isRecursively) {
// folder // folder
const contentType = DEFAULT_CONTENT_TYPE; const contentType = DEFAULT_CONTENT_TYPE;
return await s3Client.send( await s3Client.send(
new PutObjectCommand({ new PutObjectCommand({
Bucket: s3Config.s3BucketName, Bucket: s3Config.s3BucketName,
Key: fileOrFolderPath, Key: fileOrFolderPath,
@ -70,6 +84,7 @@ export const uploadToRemote = async (
ContentType: contentType, ContentType: contentType,
}) })
); );
return await getRemoteMeta(s3Client, s3Config, fileOrFolderPath);
} else { } else {
// file // file
// we ignore isRecursively parameter here // we ignore isRecursively parameter here
@ -78,7 +93,7 @@ export const uploadToRemote = async (
DEFAULT_CONTENT_TYPE; DEFAULT_CONTENT_TYPE;
const content = await vault.adapter.readBinary(fileOrFolderPath); const content = await vault.adapter.readBinary(fileOrFolderPath);
const body = Buffer.from(content); const body = Buffer.from(content);
return await s3Client.send( await s3Client.send(
new PutObjectCommand({ new PutObjectCommand({
Bucket: s3Config.s3BucketName, Bucket: s3Config.s3BucketName,
Key: fileOrFolderPath, Key: fileOrFolderPath,
@ -86,6 +101,7 @@ export const uploadToRemote = async (
ContentType: contentType, ContentType: contentType,
}) })
); );
return await getRemoteMeta(s3Client, s3Config, fileOrFolderPath);
} }
}; };

View File

@ -1,10 +1,21 @@
import { TAbstractFile, TFolder, TFile, Vault } from "obsidian"; import { TAbstractFile, TFolder, TFile, Vault } from "obsidian";
import { S3Client } from "@aws-sdk/client-s3"; import { S3Client } from "@aws-sdk/client-s3";
import * as lf from "lovefield-ts/dist/es6/lf.js"; import * as lf from "lovefield-ts/dist/es6/lf.js";
import { clearHistoryOfKey, FileFolderHistoryRecord } from "./localdb"; import {
import { S3Config, S3ObjectType, uploadToRemote, deleteFromRemote } from "./s3"; clearDeleteRenameHistoryOfKey,
import { downloadFromRemote } from "./s3"; FileFolderHistoryRecord,
upsertSyncMetaMappingDataS3,
getSyncMetaMappingByRemoteKeyS3,
} from "./localdb";
import {
S3Config,
S3ObjectType,
uploadToRemote,
deleteFromRemote,
downloadFromRemote,
} from "./s3";
import { mkdirpInVault } from "./misc"; import { mkdirpInVault } from "./misc";
type DecisionType = type DecisionType =
@ -32,24 +43,43 @@ interface FileOrFolderMixedState {
size_remote?: number; size_remote?: number;
decision?: DecisionType; decision?: DecisionType;
syncDone?: "done"; syncDone?: "done";
decision_branch?: number;
} }
export const ensembleMixedStates = ( export const ensembleMixedStates = async (
remote: S3ObjectType[], remote: S3ObjectType[],
local: TAbstractFile[], local: TAbstractFile[],
deleteHistory: FileFolderHistoryRecord[] deleteHistory: FileFolderHistoryRecord[],
db: lf.DatabaseConnection
) => { ) => {
const results = {} as Record<string, FileOrFolderMixedState>; const results = {} as Record<string, FileOrFolderMixedState>;
remote.forEach((entry) => { for (const entry of remote) {
const backwardMapping = await getSyncMetaMappingByRemoteKeyS3(
db,
entry.Key,
entry.LastModified.valueOf(),
entry.ETag
);
let key = entry.Key;
let r = {} as FileOrFolderMixedState; let r = {} as FileOrFolderMixedState;
const key = entry.Key; if (backwardMapping !== undefined) {
r = { key = backwardMapping.local_key;
key: key, r = {
exist_remote: true, key: key,
mtime_remote: entry.LastModified.valueOf(), exist_remote: true,
size_remote: entry.Size, mtime_remote: backwardMapping.local_mtime,
}; size_remote: backwardMapping.local_size,
};
} else {
r = {
key: key,
exist_remote: true,
mtime_remote: entry.LastModified.valueOf(),
size_remote: entry.Size,
};
}
if (results.hasOwnProperty(key)) { if (results.hasOwnProperty(key)) {
results[key].key = r.key; results[key].key = r.key;
results[key].exist_remote = r.exist_remote; results[key].exist_remote = r.exist_remote;
@ -58,15 +88,15 @@ export const ensembleMixedStates = (
} else { } else {
results[key] = r; results[key] = r;
} }
}); }
local.forEach((entry) => { for (const entry of local) {
let r = {} as FileOrFolderMixedState; let r = {} as FileOrFolderMixedState;
let key = entry.path; let key = entry.path;
if (entry.path === "/") { if (entry.path === "/") {
// ignore // ignore
return; continue;
} else if (entry instanceof TFile) { } else if (entry instanceof TFile) {
r = { r = {
key: entry.path, key: entry.path,
@ -94,9 +124,9 @@ export const ensembleMixedStates = (
} else { } else {
results[key] = r; results[key] = r;
} }
}); }
deleteHistory.forEach((entry) => { for (const entry of deleteHistory) {
let key = entry.key; let key = entry.key;
if (entry.key_type === "folder") { if (entry.key_type === "folder") {
if (!entry.key.endsWith("/")) { if (!entry.key.endsWith("/")) {
@ -119,7 +149,7 @@ export const ensembleMixedStates = (
} else { } else {
results[key] = r; results[key] = r;
} }
}); }
return results; return results;
}; };
@ -158,6 +188,7 @@ export const getOperation = (
r.mtime_remote > r.mtime_local r.mtime_remote > r.mtime_local
) { ) {
r.decision = "download_clearhist"; r.decision = "download_clearhist";
r.decision_branch = 1;
} else if ( } else if (
r.exist_remote && r.exist_remote &&
r.exist_local && r.exist_local &&
@ -166,6 +197,7 @@ export const getOperation = (
r.mtime_remote < r.mtime_local r.mtime_remote < r.mtime_local
) { ) {
r.decision = "upload_clearhist"; r.decision = "upload_clearhist";
r.decision_branch = 2;
} else if ( } else if (
r.exist_remote && r.exist_remote &&
r.exist_local && r.exist_local &&
@ -175,26 +207,24 @@ export const getOperation = (
r.size_local === r.size_remote r.size_local === r.size_remote
) { ) {
r.decision = "skip"; r.decision = "skip";
r.decision_branch = 3;
} else if ( } else if (
r.exist_remote && r.exist_remote &&
r.exist_local && r.exist_local &&
r.mtime_remote !== undefined && r.mtime_remote !== undefined &&
r.mtime_local !== undefined && r.mtime_local !== undefined &&
r.mtime_remote === r.mtime_local && r.mtime_remote === r.mtime_local &&
r.size_local === r.size_remote r.size_local !== r.size_remote
) { ) {
r.decision = "upload_clearhist"; r.decision = "upload_clearhist";
} else if ( r.decision_branch = 4;
r.exist_remote && } else if (r.exist_remote && r.exist_local && r.mtime_local === undefined) {
r.exist_local &&
r.mtime_remote !== undefined &&
r.mtime_local === undefined
) {
// this must be a folder! // this must be a folder!
if (!r.key.endsWith("/")) { if (!r.key.endsWith("/")) {
throw Error(`${r.key} is not a folder but lacks local mtime`); throw Error(`${r.key} is not a folder but lacks local mtime`);
} }
r.decision = "skip"; r.decision = "skip";
r.decision_branch = 5;
} else if ( } else if (
r.exist_remote && r.exist_remote &&
!r.exist_local && !r.exist_local &&
@ -204,6 +234,7 @@ export const getOperation = (
r.mtime_remote >= r.delete_time_local r.mtime_remote >= r.delete_time_local
) { ) {
r.decision = "download_clearhist"; r.decision = "download_clearhist";
r.decision_branch = 6;
} else if ( } else if (
r.exist_remote && r.exist_remote &&
!r.exist_local && !r.exist_local &&
@ -213,6 +244,7 @@ export const getOperation = (
r.mtime_remote < r.delete_time_local r.mtime_remote < r.delete_time_local
) { ) {
r.decision = "delremote_clearhist"; r.decision = "delremote_clearhist";
r.decision_branch = 7;
} else if ( } else if (
r.exist_remote && r.exist_remote &&
!r.exist_local && !r.exist_local &&
@ -221,8 +253,10 @@ export const getOperation = (
r.delete_time_local == undefined r.delete_time_local == undefined
) { ) {
r.decision = "download"; r.decision = "download";
r.decision_branch = 8;
} else if (!r.exist_remote && r.exist_local && r.mtime_remote === undefined) { } else if (!r.exist_remote && r.exist_local && r.mtime_remote === undefined) {
r.decision = "upload_clearhist"; r.decision = "upload_clearhist";
r.decision_branch = 9;
} else if ( } else if (
!r.exist_remote && !r.exist_remote &&
!r.exist_local && !r.exist_local &&
@ -230,6 +264,7 @@ export const getOperation = (
r.mtime_local === undefined r.mtime_local === undefined
) { ) {
r.decision = "clearhist"; r.decision = "clearhist";
r.decision_branch = 10;
} }
return r; return r;
@ -264,10 +299,26 @@ export const doActualSync = async (
vault, vault,
state.mtime_remote state.mtime_remote
); );
await clearHistoryOfKey(db, state.key); await clearDeleteRenameHistoryOfKey(db, state.key);
} else if (state.decision === "upload_clearhist") { } else if (state.decision === "upload_clearhist") {
await uploadToRemote(s3Client, s3Config, state.key, vault, false); const remoteObjMeta = await uploadToRemote(
await clearHistoryOfKey(db, state.key); s3Client,
s3Config,
state.key,
vault,
false
);
await upsertSyncMetaMappingDataS3(
db,
state.key,
state.mtime_local,
state.size_local,
state.key,
remoteObjMeta.LastModified.valueOf(),
remoteObjMeta.ContentLength,
remoteObjMeta.ETag
);
await clearDeleteRenameHistoryOfKey(db, state.key);
} else if (state.decision === "download") { } else if (state.decision === "download") {
await mkdirpInVault(state.key, vault); await mkdirpInVault(state.key, vault);
await downloadFromRemote( await downloadFromRemote(
@ -279,11 +330,27 @@ export const doActualSync = async (
); );
} else if (state.decision === "delremote_clearhist") { } else if (state.decision === "delremote_clearhist") {
await deleteFromRemote(s3Client, s3Config, state.key); await deleteFromRemote(s3Client, s3Config, state.key);
await clearHistoryOfKey(db, state.key); await clearDeleteRenameHistoryOfKey(db, state.key);
} else if (state.decision === "upload") { } else if (state.decision === "upload") {
await uploadToRemote(s3Client, s3Config, state.key, vault, false); const remoteObjMeta = await uploadToRemote(
s3Client,
s3Config,
state.key,
vault,
false
);
await upsertSyncMetaMappingDataS3(
db,
state.key,
state.mtime_local,
state.size_local,
state.key,
remoteObjMeta.LastModified.valueOf(),
remoteObjMeta.ContentLength,
remoteObjMeta.ETag
);
} else if (state.decision === "clearhist") { } else if (state.decision === "clearhist") {
await clearHistoryOfKey(db, state.key); await clearDeleteRenameHistoryOfKey(db, state.key);
} else { } else {
throw Error("this should never happen!"); throw Error("this should never happen!");
} }